From f5b68b91834ccd4bcab33416cd2d66227eae68fb Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Mon, 11 Mar 2024 11:13:31 -0700 Subject: [PATCH 001/155] Initial separation of pipeline for onboarding to github actions for build process. --- .../workflows/fhir-oss-build-variables.yml | 36 +++ .github/workflows/fhir-oss-ci-pipeline.yml | 285 ++++++++++++++++++ .../fhir-oss-ci-test-enviroment-variables.yml | 10 + .github/workflows/jobs/analyze.yml | 201 ++++++++++++ .github/workflows/jobs/build.yml | 88 ++++++ .../workflows/jobs/clean-storage-accounts.yml | 31 ++ .github/workflows/jobs/docker-add-tag.yml | 28 ++ .github/workflows/jobs/docker-build-all.yml | 27 ++ .github/workflows/jobs/docker-build-push.yml | 40 +++ .github/workflows/jobs/e2e-setup.yml | 23 ++ .github/workflows/jobs/e2e-tests-extract.yml | 10 + .github/workflows/jobs/e2e-tests.yml | 139 +++++++++ .../jobs/package-integration-tests.yml | 17 ++ .github/workflows/jobs/package-web.yml | 14 + .github/workflows/jobs/package.yml | 93 ++++++ .../workflows/jobs/provision-healthcheck.yml | 26 ++ .../workflows/jobs/provision-sqlServer.yml | 44 +++ .github/workflows/jobs/redeploy-webapp.yml | 29 ++ .github/workflows/jobs/run-tests.yml | 79 +++++ .github/workflows/jobs/update-semver.yml | 31 ++ .../jobs/update-sqlAdminPassword.yml | 18 ++ 21 files changed, 1269 insertions(+) create mode 100644 .github/workflows/fhir-oss-build-variables.yml create mode 100644 .github/workflows/fhir-oss-ci-pipeline.yml create mode 100644 .github/workflows/fhir-oss-ci-test-enviroment-variables.yml create mode 100644 .github/workflows/jobs/analyze.yml create mode 100644 .github/workflows/jobs/build.yml create mode 100644 .github/workflows/jobs/clean-storage-accounts.yml create mode 100644 .github/workflows/jobs/docker-add-tag.yml create mode 100644 .github/workflows/jobs/docker-build-all.yml create mode 100644 .github/workflows/jobs/docker-build-push.yml create mode 100644 .github/workflows/jobs/e2e-setup.yml create mode 100644 .github/workflows/jobs/e2e-tests-extract.yml create mode 100644 .github/workflows/jobs/e2e-tests.yml create mode 100644 .github/workflows/jobs/package-integration-tests.yml create mode 100644 .github/workflows/jobs/package-web.yml create mode 100644 .github/workflows/jobs/package.yml create mode 100644 .github/workflows/jobs/provision-healthcheck.yml create mode 100644 .github/workflows/jobs/provision-sqlServer.yml create mode 100644 .github/workflows/jobs/redeploy-webapp.yml create mode 100644 .github/workflows/jobs/run-tests.yml create mode 100644 .github/workflows/jobs/update-semver.yml create mode 100644 .github/workflows/jobs/update-sqlAdminPassword.yml diff --git a/.github/workflows/fhir-oss-build-variables.yml b/.github/workflows/fhir-oss-build-variables.yml new file mode 100644 index 0000000000..7f28b89720 --- /dev/null +++ b/.github/workflows/fhir-oss-build-variables.yml @@ -0,0 +1,36 @@ +# DESCRIPTION: +# Variables used during builds. + +variables: + buildConfiguration: 'Release' + defaultBuildFramework: 'net8.0' + azureSubscriptionEndpoint: 'docker-build' + azureContainerRegistryName: 'healthplatformregistry' + azureContainerRegistry: '$(azureContainerRegistryName).azurecr.io' + composeLocation: 'build/docker/docker-compose.yaml' + DeploymentEnvironmentNameSql: '$(DeploymentEnvironmentName)-sql' + DeploymentEnvironmentNameR4: '$(DeploymentEnvironmentName)-r4' + DeploymentEnvironmentNameR4Sql: '$(DeploymentEnvironmentNameR4)-sql' + DeploymentEnvironmentNameR4B: '$(DeploymentEnvironmentName)-r4b' + DeploymentEnvironmentNameR4BSql: '$(DeploymentEnvironmentNameR4B)-sql' + DeploymentEnvironmentNameR5: '$(DeploymentEnvironmentName)-r5' + DeploymentEnvironmentNameR5Sql: '$(DeploymentEnvironmentNameR5)-sql' + TestEnvironmentUrl: 'https://$(DeploymentEnvironmentName).azurewebsites.net' + # These variables are not used in the deployment scripts, but are used in the E2E tests files. + TestEnvironmentUrl_Sql: 'https://$(DeploymentEnvironmentName)-sql.azurewebsites.net' + TestEnvironmentUrl_R4: 'https://$(DeploymentEnvironmentName)-r4.azurewebsites.net' + TestEnvironmentUrl_R4_Sql: 'https://$(DeploymentEnvironmentName)-r4-sql.azurewebsites.net' + TestEnvironmentUrl_R4B: 'https://$(DeploymentEnvironmentName)-r4b.azurewebsites.net' + TestEnvironmentUrl_R4B_Sql: 'https://$(DeploymentEnvironmentName)-r4b-sql.azurewebsites.net' + TestEnvironmentUrl_R5: 'https://$(DeploymentEnvironmentName)-r5.azurewebsites.net' + TestEnvironmentUrl_R5_Sql: 'https://$(DeploymentEnvironmentName)-r5-sql.azurewebsites.net' + #----------------------------------------------------------------------------------------- + TestClientUrl: 'https://$(DeploymentEnvironmentName)-client/' + ConnectedServiceName: 'Microsoft Health Open Source Subscription' + WindowsVmImage: 'windows-latest' + LinuxVmImage: 'ubuntu-latest' + TestApplicationResource: 'https://$(DeploymentEnvironmentName).$(tenantDomain)' + # The following is set by a build Pipeline variable: + # DefaultLinuxPool: 'Azure Pipelines' + # SharedLinuxPool: 'Azure Pipelines' + #----------------------------------------------------------------------------------------- diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml new file mode 100644 index 0000000000..809e00b28f --- /dev/null +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -0,0 +1,285 @@ +# DESCRIPTION: +# Builds, tests, and packages the solution for the main branch. + +name: $(SourceBranchName)-$(Date:yyyyMMdd)$(Rev:-r) +trigger: none + +variables: +- template: fhir-oss-ci-test-environment-variables.yml +- template: fhir-oss-build-variables.yml + +stages: +# *********************** Setup *********************** +- stage: UpdateVersion + displayName: 'Determine Semver' + dependsOn: [] + jobs: + - job: Semver + pool: + name: '$(DefaultLinuxPool)' + vmImage: '$(LinuxVmImage)' + steps: + - template: ./jobs/update-semver.yml + +- stage: cleanStorageAccounts + displayName: 'Clean Storage Accounts' + dependsOn: [] + jobs: + - template: ./jobs/clean-storage-accounts.yml + parameters: + environmentName: $(DeploymentEnvironmentName) + +- stage: cleanupIntegrationTestDatabases + displayName: 'Cleanup Integration Test DBs' + dependsOn: [] + jobs: + - job: cleanup + pool: + name: '$(SharedLinuxPool)' + vmImage: '$(LinuxVmImage)' + steps: + - task: AzurePowerShell@5 + displayName: 'Azure PowerShell script: InlineScript' + inputs: + azureSubscription: $(ConnectedServiceName) + azurePowerShellVersion: latestVersion + ScriptType: inlineScript + Inline: | + $testNamePatterns = @("SNAPSHOT*","FHIRCOMPATIBILITYTEST*","FHIRINTEGRATIONTEST*","FHIRRESOURCECHANGEDISABLEDTEST*","BASE*","SNAPSHOT*") + foreach ($pattern in $testNamePatterns) { + $resources = Get-AzResource -ResourceGroupName $(ResourceGroupName) -ResourceType 'Microsoft.Sql/servers/databases' -Name $pattern + foreach ($resource in $resources) { + Write-Host "Cleaning up $($resource.ResourceName)" + Remove-AzResource -ResourceId $resource.ResourceId -Force + } + } + +- stage: BuildUnitTests + displayName: 'Build and run unit tests' + dependsOn: + - UpdateVersion + variables: + assemblySemVer: $[stageDependencies.UpdateVersion.Semver.outputs['SetVariablesFromGitVersion.assemblySemVer']] + assemblySemFileVer: $[stageDependencies.UpdateVersion.Semver.outputs['SetVariablesFromGitVersion.assemblySemFileVer']] + informationalVersion: $[stageDependencies.UpdateVersion.Semver.outputs['SetVariablesFromGitVersion.informationalVersion']] + majorMinorPatch: $[stageDependencies.UpdateVersion.Semver.outputs['SetVariablesFromGitVersion.majorMinorPatch']] + nuGetVersion: $[stageDependencies.UpdateVersion.Semver.outputs['SetVariablesFromGitVersion.nuGetVersion']] + jobs: + - job: Windows_dotnet8 + pool: + name: '$(DefaultWindowsPool)' + steps: + - template: ./jobs/build.yml + parameters: + targetBuildFramework: $(defaultBuildFramework) + unitTest: false + codeCoverage: true + - job: Linux_dotnet6 + pool: + name: '$(DefaultLinuxPool)' + vmImage: '$(LinuxVmImage)' + steps: + - template: ./jobs/build.yml + parameters: + targetBuildFramework: 'net6.0' + - job: Linux_BuildAndPackage + pool: + name: '$(DefaultLinuxPool)' + vmImage: '$(LinuxVmImage)' + steps: + - template: ./jobs/build.yml + parameters: + codeCoverage: false + unitTest: false + componentGovernance: true + packageArtifacts: true + packageIntegrationTests: true + +- stage: AnalyzeSecurity + displayName: 'Run Security Analysis and Validate' + dependsOn: + - BuildUnitTests + jobs: + - job: Guardian + pool: + name: '$(DefaultWindowsPool)' + + steps: + - template: ./jobs/analyze.yml + +- stage: DockerBuild + displayName: 'Build images' + dependsOn: + - UpdateVersion + variables: + assemblySemFileVer: $[stageDependencies.UpdateVersion.Semver.outputs['SetVariablesFromGitVersion.assemblySemFileVer']] + jobs: + - template: ./jobs/docker-build-all.yml + parameters: + tag: $(ImageTag) + +# *********************** Stu3 *********************** +- stage: redeployStu3 + displayName: 'Redeploy STU3 CosmosDB Site' + dependsOn: + - DockerBuild + jobs: + - template: ./jobs/redeploy-webapp.yml + parameters: + version: Stu3 + webAppName: $(DeploymentEnvironmentName) + subscription: $(ConnectedServiceName) + imageTag: $(ImageTag) + +- stage: redeployStu3Sql + displayName: 'Redeploy STU3 SQL Site' + dependsOn: + - DockerBuild + jobs: + - template: ./jobs/redeploy-webapp.yml + parameters: + version: Stu3 + webAppName: $(DeploymentEnvironmentNameSql) + subscription: $(ConnectedServiceName) + imageTag: $(ImageTag) + +- stage: testStu3 + displayName: 'Run Stu3 Tests' + dependsOn: + - BuildUnitTests + - redeployStu3 + - redeployStu3Sql + jobs: + - template: ./jobs/run-tests.yml + parameters: + version: Stu3 + keyVaultName: $(DeploymentEnvironmentName) + appServiceName: $(DeploymentEnvironmentName) + +# *********************** R4 *********************** +- stage: redeployR4 + displayName: 'Redeploy R4 CosmosDB Site' + dependsOn: + - DockerBuild + jobs: + - template: ./jobs/redeploy-webapp.yml + parameters: + version: R4 + webAppName: $(DeploymentEnvironmentNameR4) + subscription: $(ConnectedServiceName) + imageTag: $(ImageTag) + +- stage: redeployR4Sql + displayName: 'Redeploy R4 SQL Site' + dependsOn: + - DockerBuild + jobs: + - template: ./jobs/redeploy-webapp.yml + parameters: + version: R4 + webAppName: $(DeploymentEnvironmentNameR4Sql) + subscription: $(ConnectedServiceName) + imageTag: $(ImageTag) + +- stage: testR4 + displayName: 'Run R4 Tests' + dependsOn: + - BuildUnitTests + - redeployR4 + - redeployR4Sql + jobs: + - template: ./jobs/run-tests.yml + parameters: + version: R4 + keyVaultName: $(DeploymentEnvironmentNameR4) + appServiceName: $(DeploymentEnvironmentNameR4) + +# *********************** R4B *********************** +- stage: redeployR4B + displayName: 'Redeploy R4B CosmosDB Site' + dependsOn: + - DockerBuild + jobs: + - template: ./jobs/redeploy-webapp.yml + parameters: + version: R4B + webAppName: $(DeploymentEnvironmentNameR4B) + subscription: $(ConnectedServiceName) + imageTag: $(ImageTag) + +- stage: redeployR4BSql + displayName: 'Redeploy R4B SQL Site' + dependsOn: + - DockerBuild + jobs: + - template: ./jobs/redeploy-webapp.yml + parameters: + version: R4B + webAppName: $(DeploymentEnvironmentNameR4BSql) + subscription: $(ConnectedServiceName) + imageTag: $(ImageTag) + +- stage: testR4B + displayName: 'Run R4B Tests' + dependsOn: + - BuildUnitTests + - redeployR4B + - redeployR4BSql + jobs: + - template: ./jobs/run-tests.yml + parameters: + version: R4B + keyVaultName: $(DeploymentEnvironmentNameR4B) + appServiceName: $(DeploymentEnvironmentNameR4B) + +# *********************** R5 *********************** +- stage: redeployR5 + displayName: 'Redeploy R5 CosmosDB Site' + dependsOn: + - DockerBuild + jobs: + - template: ./jobs/redeploy-webapp.yml + parameters: + version: R5 + webAppName: $(DeploymentEnvironmentNameR5) + subscription: $(ConnectedServiceName) + imageTag: $(ImageTag) + +- stage: redeployR5Sql + displayName: 'Redeploy R5 SQL Site' + dependsOn: + - DockerBuild + jobs: + - template: ./jobs/redeploy-webapp.yml + parameters: + version: R5 + webAppName: $(DeploymentEnvironmentNameR5Sql) + subscription: $(ConnectedServiceName) + imageTag: $(ImageTag) + +- stage: testR5 + displayName: 'Run R5 Tests' + dependsOn: + - BuildUnitTests + - redeployR5 + - redeployR5Sql + jobs: + - template: ./jobs/run-tests.yml + parameters: + version: R5 + keyVaultName: $(DeploymentEnvironmentNameR5) + appServiceName: $(DeploymentEnvironmentNameR5) + +# *********************** Finalize *********************** +- stage: DockerAddTag + displayName: 'Docker add main tag' + dependsOn: + - testStu3 + - testR4 + - testR4B + - testR5 + jobs: + - template: ./jobs/docker-add-tag.yml + parameters: + sourceTag: $(ImageTag) + targetTag: 'master' diff --git a/.github/workflows/fhir-oss-ci-test-enviroment-variables.yml b/.github/workflows/fhir-oss-ci-test-enviroment-variables.yml new file mode 100644 index 0000000000..6afcea9248 --- /dev/null +++ b/.github/workflows/fhir-oss-ci-test-enviroment-variables.yml @@ -0,0 +1,10 @@ +variables: + ResourceGroupRegion: 'southcentralus' + # Due to deleting a keyvault with purge protection we must use a name other than msh-fhir-ci for 90 days after 5/20/2021. + resourceGroupRoot: 'msh-fhir-ci4' + appServicePlanName: '$(resourceGroupRoot)-linux' + DeploymentEnvironmentName: '$(resourceGroupRoot)' + ResourceGroupName: '$(resourceGroupRoot)' + CrucibleEnvironmentUrl: 'https://crucible.mshapis.com/' + TestEnvironmentName: 'OSS CI' + ImageTag: '$(build.BuildNumber)' diff --git a/.github/workflows/jobs/analyze.yml b/.github/workflows/jobs/analyze.yml new file mode 100644 index 0000000000..5ad0110a60 --- /dev/null +++ b/.github/workflows/jobs/analyze.yml @@ -0,0 +1,201 @@ +parameters: + analyzeARMTemplates: true + analyzeBinaries: true + analyzePackages: true + runAntiMalware: true + credScanDirectory: '$(Build.SourcesDirectory)' + +steps: +- task: UseDotNet@2 + displayName: Use .NET Core sdk + inputs: + useGlobalJson: true + +- ${{ if eq(parameters.analyzeBinaries, 'true') }}: + - task: DownloadBuildArtifacts@0 + displayName: 'Download Binaries' + inputs: + buildType: 'current' + downloadType: 'single' + downloadPath: '$(Agent.TempDirectory)/artifacts' + artifactName: 'deploy' + +- ${{ if eq(parameters.analyzePackages, 'true') }}: + - task: DownloadBuildArtifacts@0 + displayName: 'Download NuGet Packages' + inputs: + buildType: 'current' + downloadType: 'single' + downloadPath: '$(Build.SourcesDirectory)/artifacts' + artifactName: 'nuget' + +- ${{ if eq(parameters.analyzeBinaries, 'true') }}: + - task: ExtractFiles@1 + displayName: 'Extract Stu3 Web Server Binaries' + inputs: + archiveFilePatterns: '$(Agent.TempDirectory)/artifacts/deploy/Microsoft.Health.Fhir.Stu3.Web.zip' + destinationFolder: '$(Build.SourcesDirectory)/artifacts/web/Stu3' + - task: ExtractFiles@1 + displayName: 'Extract R4 Web Server Binaries' + inputs: + archiveFilePatterns: '$(Agent.TempDirectory)/artifacts/deploy/Microsoft.Health.Fhir.R4.Web.zip' + destinationFolder: '$(Build.SourcesDirectory)/artifacts/web/r4' + - task: ExtractFiles@1 + displayName: 'Extract R4B Web Server Binaries' + inputs: + archiveFilePatterns: '$(Agent.TempDirectory)/artifacts/deploy/Microsoft.Health.Fhir.R4B.Web.zip' + destinationFolder: '$(Build.SourcesDirectory)/artifacts/web/r4b' + - task: ExtractFiles@1 + displayName: 'Extract R5 Web Server Binaries' + inputs: + archiveFilePatterns: '$(Agent.TempDirectory)/artifacts/deploy/Microsoft.Health.Fhir.R5.Web.zip' + destinationFolder: '$(Build.SourcesDirectory)/artifacts/web/r5' + +- ${{ if eq(parameters.runAntiMalware, 'true') }}: + - task: AntiMalware@4 + inputs: + InputType: 'Basic' + ScanType: 'CustomScan' + FileDirPath: '$(Build.SourcesDirectory)' + EnableServices: true + TreatSignatureUpdateFailureAs: 'Standard' + SignatureFreshness: 'OneDay' + TreatStaleSignatureAs: 'Error' + +- ${{ if eq(parameters.analyzeARMTemplates, 'true') }}: + - task: Armory@2 + inputs: + targetDirectory: '$(Build.SourcesDirectory)/samples/templates' + targetFiles: 'f|*.json' + excludePassesFromLog: false + + - task: TemplateAnalyzer@3 + displayName: 'Run Template Analyzer' + inputs: + ToolVersion: Latest + AnalyzeDirectory: '$(Build.SourcesDirectory)/samples/templates' + Verbose: false + IncludeNonSecurityRules: true + +- task: CredScan@3 + inputs: + scanFolder: ${{ parameters.credScanDirectory }} + outputFormat: 'pre' + suppressionsFile: 'CredScanSuppressions.json' + verboseOutput: true + +- task: CSRF@1 + inputs: + Path: '$(Build.SourcesDirectory)' + ToolVersion: Latest + +- task: Trivy@1 + displayName: 'Run Trivy' + inputs: + Target: '$(Build.SourcesDirectory)/build/docker' + Severities: all + VulTypes: all + +- task: PSScriptAnalyzer@1 + displayName: 'Run PSScriptAnalyzer' + inputs: + Path: '$(Build.SourcesDirectory)' + Settings: required + IgnorePattern: .gdn + Recurse: true + +- task: RoslynAnalyzers@3 + inputs: + userProvideBuildInfo: 'msBuildInfo' + msBuildArchitecture: 'DotNetCore' + msBuildCommandline: 'dotnet build $(Build.SourcesDirectory)/Microsoft.Health.Fhir.sln --configuration $(buildConfiguration) -p:ContinuousIntegrationBuild=true -f net8.0' + +- task: BinSkim@4 + inputs: + toolVersion: Latest + InputType: Basic + Function: analyze + AnalyzeTargetGlob: 'f|$(Agent.TempDirectory)/artifacts/**/*Microsoft.Health.*.dll' + + ## PoliCheck@2 does not need to be added since it is run internally + + ## Tools that are no longer supported: + # AutoApplicability@1, CodeMetrics@1, VulnerabilityAssessment@0 + +- task: SdtReport@2 + condition: succeededOrFailed() + continueOnError: True + inputs: + GdnExportAllTools: false + GdnExportGdnToolArmory: ${{ eq(parameters.analyzeARMTemplates, 'true') }} + GdnExportGdnToolCredScan: true + GdnExportGdnToolCSRF: true + GdnExportGdnToolRoslynAnalyzers: true + BinSkim: true + CredScan: true + +- task: PublishSecurityAnalysisLogs@3 + condition: succeededOrFailed() + continueOnError: True + inputs: + ArtifactName: 'CodeAnalysisLogs' + ArtifactType: 'Container' + AllTools: false + AntiMalware: ${{ eq(parameters.runAntiMalware, 'true') }} + APIScan: false + Armory: ${{ eq(parameters.analyzeARMTemplates, 'true') }} + Bandit: false + BinSkim: false + CodesignValidation: false + CredScan: true + CSRF: true + ESLint: false + Flawfinder: false + FortifySCA: false + FxCop: false + ModernCop: false + MSRD: false + PoliCheck: false + RoslynAnalyzers: true + SDLNativeRules: false + Semmle: false + SpotBugs: false + TSLint: false + WebScout: false + ToolLogsNotFoundAction: 'Standard' + +- task: PostAnalysis@2 + condition: succeededOrFailed() + inputs: + GdnBreakAllTools: false + GdnBreakGdnToolArmory: ${{ eq(parameters.analyzeARMTemplates, 'true') }} + GdnBreakGdnToolCredScan: true + GdnBreakGdnToolCSRF: true + GdnBreakGdnToolRoslynAnalyzers: true + BinSkim: true + CredScan: true + +- task: TSAUpload@2 + condition: and(succeeded(), eq(variables['build.sourceBranch'], 'refs/heads/main')) + displayName: 'TSA upload' + inputs: + tsaVersion: 'TsaV2' + codebase: 'NewOrUpdate' + GdnPublishTsaOnboard: false + GdnPublishTsaConfigFile: '$(Build.SourcesDirectory)\build\jobs\tsaconfig.gdntsa' + GdnPublishTsaExportedResultsPublishable: true + +- task: DeleteFiles@1 + displayName: 'Delete files to make space' + inputs: + SourceFolder: '$(Build.SourcesDirectory)' + Contents: '**\*' + +- task: DropValidatorTask@0 + displayName: 'SBOM Validator and Publisher Task' + inputs: + BuildDropPath: '$(Agent.TempDirectory)/artifacts/deploy' + OutputPath: 'output.json' + ValidateSignature: true + Verbosity: 'Verbose' + continueOnError: true diff --git a/.github/workflows/jobs/build.yml b/.github/workflows/jobs/build.yml new file mode 100644 index 0000000000..8bb12db04d --- /dev/null +++ b/.github/workflows/jobs/build.yml @@ -0,0 +1,88 @@ +parameters: + # Default values + unitTest: true + codeCoverage: false + componentGovernance: false + packageArtifacts: false + packageIntegrationTests: false + targetBuildFramework: '' + +steps: +- task: UseDotNet@2 + displayName: 'Use .NET SDK' + inputs: + useGlobalJson: true + +- ${{ if eq(parameters.targetBuildFramework, '') }}: + - task: DotNetCoreCLI@2 + displayName: 'dotnet build $(buildConfiguration)' + inputs: + command: build + arguments: '--configuration $(buildConfiguration) -p:ContinuousIntegrationBuild=true -p:AssemblyVersion="$(assemblySemVer)" -p:FileVersion="$(assemblySemFileVer)" -p:InformationalVersion="$(informationalVersion)" -p:Version="$(majorMinorPatch)" -warnaserror' + workingDirectory: $(System.DefaultWorkingDirectory) + +- ${{ if ne(parameters.targetBuildFramework, '') }}: + - task: DotNetCoreCLI@2 + displayName: 'dotnet build $(buildConfiguration)' + inputs: + command: build + arguments: '--configuration $(buildConfiguration) -p:ContinuousIntegrationBuild=true -p:AssemblyVersion="$(assemblySemVer)" -p:FileVersion="$(assemblySemFileVer)" -p:InformationalVersion="$(informationalVersion)" -p:Version="$(majorMinorPatch)" -warnaserror -f ${{parameters.targetBuildFramework}}' + workingDirectory: $(System.DefaultWorkingDirectory) + +- ${{ if eq(parameters.unitTest, 'true') }}: + - task: DotNetCoreCLI@2 + displayName: 'dotnet test' + inputs: + command: test + projects: '**/*UnitTests/*.csproj' + arguments: '--configuration $(buildConfiguration) --no-build -f ${{parameters.targetBuildFramework}}' + testRunTitle: 'Unit Tests' + +- ${{ if eq(parameters.codeCoverage, 'true') }}: + - task: DotNetCoreCLI@2 + displayName: 'dotnet test with coverage' + inputs: + command: test + projects: '**/*UnitTests/*.csproj' + arguments: '--configuration $(buildConfiguration) --no-build --collect "XPlat Code Coverage" -s "$(build.sourcesDirectory)/CodeCoverage.runsettings" -v normal -f ${{parameters.targetBuildFramework}}' + testRunTitle: 'Unit Tests' + - task: reportgenerator@5 + displayName: 'aggregate code coverage' + condition: succeededOrFailed() + inputs: + reports: '$(Agent.TempDirectory)/*/coverage.cobertura.xml' + reporttypes: 'Cobertura' + targetdir: '$(Agent.TempDirectory)/coverage' + - task: PublishCodeCoverageResults@1 + displayName: 'publish code coverage' + condition: succeededOrFailed() + inputs: + codeCoverageTool: 'Cobertura' + failIfCoverageEmpty: true + summaryFileLocation: '$(Agent.TempDirectory)/coverage/Cobertura.xml' + - task: PublishBuildArtifacts@1 + displayName: 'publish Cobertura.xml' + inputs: + pathToPublish: '$(Agent.TempDirectory)/coverage/Cobertura.xml' + artifactName: 'IntegrationTests' + artifactType: 'container' + +- ${{ if eq(parameters.packageArtifacts, 'true') }}: + # https://eng.ms/docs/cloud-ai-platform/devdiv/one-engineering-system-1es/1es-docs/secure-supply-chain/ado-sbom-generator + - task: AzureArtifacts.manifest-generator-task.manifest-generator-task.ManifestGeneratorTask@0 + displayName: 'SBOM Generation Task' + inputs: + BuildDropPath: '$(build.artifactStagingDirectory)' + BuildComponentPath: '$(Build.SourcesDirectory)' + - task: PublishBuildArtifacts@1 + displayName: 'Publish SBOM Artifacts' + inputs: + pathToPublish: '$(build.artifactStagingDirectory)' + artifactName: 'deploy' + artifactType: 'container' + +- ${{ if eq(parameters.packageArtifacts, 'true') }}: + - template: package.yml + +- ${{ if eq(parameters.packageIntegrationTests, 'true') }}: + - template: package-integration-tests.yml diff --git a/.github/workflows/jobs/clean-storage-accounts.yml b/.github/workflows/jobs/clean-storage-accounts.yml new file mode 100644 index 0000000000..40c5260f04 --- /dev/null +++ b/.github/workflows/jobs/clean-storage-accounts.yml @@ -0,0 +1,31 @@ +parameters: +- name: environmentName + type: string + +jobs: +- job: "cleanStorageAccounts" + pool: + vmImage: $(WindowsVmImage) + steps: + - task: AzurePowerShell@4 + displayName: 'Clean Storage Accounts' + continueOnError: true + inputs: + azureSubscription: $(ConnectedServiceName) + azurePowerShellVersion: latestVersion + ScriptType: inlineScript + Inline: | + $currentUtcTime = [DateTime]::UtcNow + + $storageAccounts = Get-AzStorageAccount -ResourceGroupName ${{ parameters.environmentName }} + foreach ($storageAccount in $storageAccounts) { + + $storageContainers = Get-AzStorageContainer -Name * -Context $storageAccount.Context + foreach ($container in $storageContainers) { + $ageDiff = $currentUtcTime - $container.CloudBlobContainer.Properties.LastModified.UtcDateTime + if($ageDiff.TotalDays -ge 3) { + Write-Host "Deleting container $($container.Name)" + $container.CloudBlobContainer.Delete() + } + } + } \ No newline at end of file diff --git a/.github/workflows/jobs/docker-add-tag.yml b/.github/workflows/jobs/docker-add-tag.yml new file mode 100644 index 0000000000..031ae5e484 --- /dev/null +++ b/.github/workflows/jobs/docker-add-tag.yml @@ -0,0 +1,28 @@ + +parameters: +- name: sourceTag + type: string +- name: targetTag + type: string + +jobs: +- job: DockerAddTag + pool: + name: '$(DefaultLinuxPool)' + vmImage: '$(LinuxVmImage)' + steps: + - task: AzureCLI@2 + displayName: 'Azure CLI: InlineScript' + inputs: + azureSubscription: $(ConnectedServiceName) + scriptType: bash + scriptLocation: inlineScript + inlineScript: | + az acr login -n $(azureContainerRegistry) + for v in stu3 r4 r4b r5; do + sourceImage="$(azureContainerRegistry)/${v}_fhir-server:${{parameters.sourceTag}}" + targetImage="$(azureContainerRegistry)/${v}_fhir-server:${{parameters.targetTag}}" + docker pull $sourceImage + docker tag $sourceImage $targetImage + docker push $targetImage + done diff --git a/.github/workflows/jobs/docker-build-all.yml b/.github/workflows/jobs/docker-build-all.yml new file mode 100644 index 0000000000..3f6e766c53 --- /dev/null +++ b/.github/workflows/jobs/docker-build-all.yml @@ -0,0 +1,27 @@ +# DESCRIPTION: +# Builds and pushes images for all supported FHIR versions + +parameters: +- name: tag + type: string + +jobs: +- template: docker-build-push.yml + parameters: + version: "R4" + tag: ${{parameters.tag}} + +- template: docker-build-push.yml + parameters: + version: "R4B" + tag: ${{parameters.tag}} + +- template: docker-build-push.yml + parameters: + version: "Stu3" + tag: ${{parameters.tag}} + +- template: docker-build-push.yml + parameters: + version: "R5" + tag: ${{parameters.tag}} diff --git a/.github/workflows/jobs/docker-build-push.yml b/.github/workflows/jobs/docker-build-push.yml new file mode 100644 index 0000000000..77cacb2f94 --- /dev/null +++ b/.github/workflows/jobs/docker-build-push.yml @@ -0,0 +1,40 @@ +# DESCRIPTION: +# Builds and pushes a docker image for a given FHIR version + +parameters: +- name: version + type: string +- name: tag + type: string + +jobs: +- job: '${{parameters.version}}_Docker' + pool: + name: '$(DefaultLinuxPool)' + vmImage: '$(LinuxVmImage)' + steps: + - task: DockerCompose@0 + displayName: 'Build FHIR ${{parameters.version}} Server Image' + inputs: + action: Build services + azureSubscriptionEndpoint: $(azureSubscriptionEndpoint) + azureContainerRegistry: $(azureContainerRegistry) + dockerComposeFile: $(composeLocation) + dockerComposeFileArgs: | + FHIR_VERSION=${{parameters.version}} + ASSEMBLY_VER=$(assemblySemFileVer) + projectName: ${{parameters.version}} + additionalImageTags: ${{parameters.tag}} + + - task: DockerCompose@0 + displayName: 'Push FHIR ${{parameters.version}} Server Image' + inputs: + action: Push services + azureSubscriptionEndpoint: $(azureSubscriptionEndpoint) + azureContainerRegistry: $(azureContainerRegistry) + dockerComposeFile: $(composeLocation) + dockerComposeFileArgs: | + FHIR_VERSION=${{parameters.version}} + ASSEMBLY_VER=$(assemblySemFileVer) + projectName: ${{parameters.version}} + additionalImageTags: ${{parameters.tag}} diff --git a/.github/workflows/jobs/e2e-setup.yml b/.github/workflows/jobs/e2e-setup.yml new file mode 100644 index 0000000000..33890f84d2 --- /dev/null +++ b/.github/workflows/jobs/e2e-setup.yml @@ -0,0 +1,23 @@ +steps: + - task: DownloadBuildArtifacts@0 + inputs: + buildType: 'current' + downloadType: 'single' + downloadPath: '$(System.ArtifactsDirectory)' + artifactName: 'IntegrationTests' + + - task: UseDotNet@2 + inputs: + useGlobalJson: true + + - task: AzureKeyVault@1 + displayName: 'Azure Key Vault: resolute-oss-tenant-info' + inputs: + azureSubscription: $(ConnectedServiceName) + KeyVaultName: 'resolute-oss-tenant-info' + + - task: AzureKeyVault@1 + displayName: 'Azure Key Vault: $(DeploymentEnvironmentName)-ts' + inputs: + azureSubscription: $(ConnectedServiceName) + KeyVaultName: '$(DeploymentEnvironmentName)-ts' diff --git a/.github/workflows/jobs/e2e-tests-extract.yml b/.github/workflows/jobs/e2e-tests-extract.yml new file mode 100644 index 0000000000..1d6a11bf0e --- /dev/null +++ b/.github/workflows/jobs/e2e-tests-extract.yml @@ -0,0 +1,10 @@ +parameters: +- name: version + type: string + +steps: + - task: ExtractFiles@1 + displayName: 'Extract E2E Test Binaries' + inputs: + archiveFilePatterns: '$(System.ArtifactsDirectory)/IntegrationTests/Microsoft.Health.Fhir.${{ parameters.version }}.Tests.E2E.zip' + destinationFolder: '$(Agent.TempDirectory)/E2ETests/' diff --git a/.github/workflows/jobs/e2e-tests.yml b/.github/workflows/jobs/e2e-tests.yml new file mode 100644 index 0000000000..b69ef2351d --- /dev/null +++ b/.github/workflows/jobs/e2e-tests.yml @@ -0,0 +1,139 @@ +parameters: +- name: version + type: string +- name: appServiceName + type: string +- name: appServiceType + type: string + +steps: + - template: e2e-tests-extract.yml + parameters: + version: ${{parameters.version}} + + - task: AzurePowerShell@4 + displayName: 'Set Variables' + inputs: + azureSubscription: $(ConnectedServiceName) + azurePowerShellVersion: latestVersion + ScriptType: inlineScript + Inline: | + $keyVault = "$(DeploymentEnvironmentName)-ts" + $secrets = Get-AzKeyVaultSecret -VaultName $keyVault + + foreach($secret in $secrets) + { + $environmentVariableName = $secret.Name.Replace("--","_") + + $secretValue = Get-AzKeyVaultSecret -VaultName $keyVault -Name $secret.Name + # Replace with -AsPlainText flag when v5.3 of the Az Module is supported + $plainValue = ([System.Net.NetworkCredential]::new("", $secretValue.SecretValue).Password).ToString() + if([string]::IsNullOrEmpty($plainValue)) + { + throw "$($secret.Name) is empty" + } + Write-Host "##vso[task.setvariable variable=$($environmentVariableName)]$($plainValue)" + } + + $storageAccounts = Get-AzStorageAccount -ResourceGroupName $(ResourceGroupName) + $allStorageAccounts = "" + foreach ($storageAccount in $storageAccounts) { + $accKey = Get-AzStorageAccountKey -ResourceGroupName $(ResourceGroupName) -Name $storageAccount.StorageAccountName | Where-Object {$_.KeyName -eq "key1"} + + $storageSecretName = "$($storageAccount.StorageAccountName)_secret" + Write-Host "##vso[task.setvariable variable=$($storageSecretName)]$($accKey.Value)" + $allStorageAccounts += "$($storageSecretName)|$($accKey.Value)|" + } + Write-Host "##vso[task.setvariable variable=AllStorageAccounts]$($allStorageAccounts)" + + $appServiceName = "${{ parameters.appServiceName }}" + $appSettings = (Get-AzWebApp -ResourceGroupName $(ResourceGroupName) -Name $appServiceName).SiteConfig.AppSettings + $acrSettings = $appSettings | where {$_.Name -eq "FhirServer__Operations__ConvertData__ContainerRegistryServers__0"} + $acrLoginServer = $acrSettings[0].Value + $acrAccountName = ($acrLoginServer -split '\.')[0] + $acrPassword = (Get-AzContainerRegistryCredential -ResourceGroupName $(ResourceGroupName) -Name $acrAccountName).Password + Write-Host "##vso[task.setvariable variable=TestContainerRegistryServer]$($acrLoginServer)" + Write-Host "##vso[task.setvariable variable=TestContainerRegistryPassword]$($acrPassword)" + + $exportStoreSettings = $appSettings | where {$_.Name -eq "FhirServer__Operations__Export__StorageAccountUri"} + $exportStoreUri = $exportStoreSettings[0].Value + Write-Host "$exportStoreUri" + $exportStoreAccountName = [System.Uri]::new("$exportStoreUri").Host.Split('.')[0] + $exportStoreKey = Get-AzStorageAccountKey -ResourceGroupName $(ResourceGroupName) -Name "$exportStoreAccountName" | Where-Object {$_.KeyName -eq "key1"} + + Write-Host "##vso[task.setvariable variable=TestExportStoreUri]$($exportStoreUri)" + Write-Host "##vso[task.setvariable variable=TestExportStoreKey]$($exportStoreKey.Value)" + + $integrationStoreSettings = $appSettings | where {$_.Name -eq "FhirServer__Operations__IntegrationDataStore__StorageAccountUri"} + $integrationStoreUri = $integrationStoreSettings[0].Value + Write-Host "$integrationStoreUri" + $integrationStoreAccountName = [System.Uri]::new("$integrationStoreUri").Host.Split('.')[0] + $integrationStoreKey = Get-AzStorageAccountKey -ResourceGroupName $(ResourceGroupName) -Name "$integrationStoreAccountName" | Where-Object {$_.KeyName -eq "key1"} + + Write-Host "##vso[task.setvariable variable=TestIntegrationStoreUri]$($integrationStoreUri)" + Write-Host "##vso[task.setvariable variable=TestIntegrationStoreKey]$($integrationStoreKey.Value)" + + Write-Host "##vso[task.setvariable variable=Resource]$(TestApplicationResource)" + + $secrets = Get-AzKeyVaultSecret -VaultName resolute-oss-tenant-info + + foreach($secret in $secrets) + { + $environmentVariableName = $secret.Name.Replace("--","_") + + $secretValue = Get-AzKeyVaultSecret -VaultName resolute-oss-tenant-info -Name $secret.Name + # Replace with -AsPlainText flag when v5.3 of the Az Module is supported + $plainValue = ([System.Net.NetworkCredential]::new("", $secretValue.SecretValue).Password).ToString() + if([string]::IsNullOrEmpty($plainValue)) + { + throw "$($secret.Name) is empty" + } + Write-Host "##vso[task.setvariable variable=$($environmentVariableName)]$($plainValue)" + } + # ---------------------------------------- + + dotnet dev-certs https + + - task: DotNetCoreCLI@2 + displayName: 'E2E ${{ parameters.version }} ${{parameters.appServiceType}}' + inputs: + command: test + arguments: '"$(Agent.TempDirectory)/E2ETests/**/*${{ parameters.version }}.Tests.E2E*.dll" --blame-hang-timeout 7m --filter "FullyQualifiedName~${{parameters.appServiceType}}&Category!=ExportLongRunning"' + workingDirectory: "$(System.ArtifactsDirectory)" + testRunTitle: '${{ parameters.version }} ${{parameters.appServiceType}}' + env: + 'TestEnvironmentUrl': $(TestEnvironmentUrl) + 'TestEnvironmentUrl_${{ parameters.version }}': $(TestEnvironmentUrl_${{ parameters.version }}) + 'TestEnvironmentUrl_Sql': $(TestEnvironmentUrl_Sql) + 'TestEnvironmentUrl_${{ parameters.version }}_Sql': $(TestEnvironmentUrl_${{ parameters.version }}_Sql) + 'Resource': $(Resource) + 'AllStorageAccounts': $(AllStorageAccounts) + 'TestContainerRegistryServer': $(TestContainerRegistryServer) + 'TestContainerRegistryPassword': $(TestContainerRegistryPassword) + 'TestExportStoreUri': $(TestExportStoreUri) + 'TestExportStoreKey': $(TestExportStoreKey) + 'TestIntegrationStoreUri': $(TestIntegrationStoreUri) + 'TestIntegrationStoreKey': $(TestIntegrationStoreKey) + 'tenant-admin-service-principal-name': $(tenant-admin-service-principal-name) + 'tenant-admin-service-principal-password': $(tenant-admin-service-principal-password) + 'tenant-admin-user-name': $(tenant-admin-user-name) + 'tenant-admin-user-password': $(tenant-admin-user-password) + 'tenant-id': $(tenant-id) + 'app_globalAdminServicePrincipal_id': $(app_globalAdminServicePrincipal_id) + 'app_globalAdminServicePrincipal_secret': $(app_globalAdminServicePrincipal_secret) + 'app_nativeClient_id': $(app_nativeClient_id) + 'app_nativeClient_secret': $(app_nativeClient_secret) + 'app_wrongAudienceClient_id': $(app_wrongAudienceClient_id) + 'app_wrongAudienceClient_secret': $(app_wrongAudienceClient_secret) + 'user_globalAdminUser_id': $(user_globalAdminUser_id) + 'user_globalAdminUser_secret': $(user_globalAdminUser_secret) + 'user_globalConverterUser_id': $(user_globalConverterUser_id) + 'user_globalConverterUser_secret': $(user_globalConverterUser_secret) + 'user_globalExporterUser_id': $(user_globalExporterUser_id) + 'user_globalExporterUser_secret': $(user_globalExporterUser_secret) + 'user_globalImporterUser_id': $(user_globalImporterUser_id) + 'user_globalImporterUser_secret': $(user_globalImporterUser_secret) + 'user_globalReaderUser_id': $(user_globalReaderUser_id) + 'user_globalReaderUser_secret': $(user_globalReaderUser_secret) + 'user_globalWriterUser_id': $(user_globalWriterUser_id) + 'user_globalWriterUser_secret': $(user_globalWriterUser_secret) diff --git a/.github/workflows/jobs/package-integration-tests.yml b/.github/workflows/jobs/package-integration-tests.yml new file mode 100644 index 0000000000..180c259c07 --- /dev/null +++ b/.github/workflows/jobs/package-integration-tests.yml @@ -0,0 +1,17 @@ +steps: + + - task: DotNetCoreCLI@2 + displayName: 'dotnet publish Integration Tests' + inputs: + command: publish + projects: 'test/**/*.csproj' + arguments: '--version-suffix $(build.buildNumber) -o "$(build.binariesdirectory)/IntegrationTests" --configuration $(buildConfiguration) --no-build -f $(defaultBuildFramework)' + publishWebProjects: false + zipAfterPublish: true + + - task: PublishBuildArtifacts@1 + displayName: 'publish Integration Tests' + inputs: + pathToPublish: '$(build.binariesdirectory)/IntegrationTests' + artifactName: 'IntegrationTests' + artifactType: 'container' \ No newline at end of file diff --git a/.github/workflows/jobs/package-web.yml b/.github/workflows/jobs/package-web.yml new file mode 100644 index 0000000000..edc1378081 --- /dev/null +++ b/.github/workflows/jobs/package-web.yml @@ -0,0 +1,14 @@ +parameters: + csproj: '**/*Web.csproj' + +steps: + + # Package web + + - task: DotNetCoreCLI@2 + displayName: 'dotnet publish ${{parameters.csproj}}' + inputs: + command: publish + projects: '${{parameters.csproj}}' + arguments: '--output $(build.artifactStagingDirectory)/web --configuration $(buildConfiguration) --version-suffix $(build.buildNumber) --no-build -f $(defaultBuildFramework)' + publishWebProjects: false \ No newline at end of file diff --git a/.github/workflows/jobs/package.yml b/.github/workflows/jobs/package.yml new file mode 100644 index 0000000000..0fbf89aa9e --- /dev/null +++ b/.github/workflows/jobs/package.yml @@ -0,0 +1,93 @@ +steps: + + # Package web + - template: package-web.yml + parameters: + csproj: '**/Microsoft.Health.Fhir.Stu3.Web.csproj' + + - template: package-web.yml + parameters: + csproj: '**/Microsoft.Health.Fhir.R4.Web.csproj' + + - template: package-web.yml + parameters: + csproj: '**/Microsoft.Health.Fhir.R4B.Web.csproj' + + - template: package-web.yml + parameters: + csproj: '**/Microsoft.Health.Fhir.R5.Web.csproj' + + # Package nugets + - powershell: | + & dotnet pack $(Build.SourcesDirectory) --output $(Build.ArtifactStagingDirectory)/nupkgs --no-build --configuration=Release -p:PackageVersion=$(nuGetVersion) + name: PackNugets + + # Publish artifacts + - task: PublishBuildArtifacts@1 + displayName: 'publish web artifacts' + inputs: + pathToPublish: '$(build.artifactStagingDirectory)/web' + artifactName: 'deploy' + artifactType: 'container' + + - task: PublishBuildArtifacts@1 + displayName: 'publish samples' + inputs: + pathToPublish: './samples/' + artifactName: 'deploy' + artifactType: 'container' + + - task: PublishBuildArtifacts@1 + displayName: 'publish testauthenvironment.json' + inputs: + pathToPublish: './testauthenvironment.json' + artifactName: 'deploy' + artifactType: 'container' + + - task: PublishBuildArtifacts@1 + displayName: 'publish global.json' + inputs: + pathToPublish: './global.json' + artifactName: 'deploy' + artifactType: 'container' + + - task: PublishBuildArtifacts@1 + displayName: 'publish test configuration jsons' + inputs: + pathToPublish: './test/Configuration/' + artifactName: 'deploy' + artifactType: 'container' + + - task: PublishBuildArtifacts@1 + displayName: 'publish release directory' + inputs: + pathToPublish: './release/' + artifactName: 'deploy' + artifactType: 'container' + + - task: PublishBuildArtifacts@1 + displayName: 'publish nuget artifacts' + inputs: + pathtoPublish: '$(build.artifactStagingDirectory)/nupkgs' + artifactName: 'nuget' + publishLocation: 'container' + + - task: CopyFiles@2 + displayName: 'copy symbols' + inputs: + sourceFolder: '$(build.sourcesDirectory)' + contents: | + **/*.pdb + !**/*.UnitTests.pdb + targetFolder: '$(build.artifactStagingDirectory)/symbols' + cleanTargetFolder: true + flattenFolders: true + overWrite: true + + - task: PublishBuildArtifacts@1 + displayName: 'publish symbol artifacts' + inputs: + pathtoPublish: '$(build.artifactStagingDirectory)/symbols' + artifactName: 'symbols' + publishLocation: 'container' + \ No newline at end of file diff --git a/.github/workflows/jobs/provision-healthcheck.yml b/.github/workflows/jobs/provision-healthcheck.yml new file mode 100644 index 0000000000..40107e6ebf --- /dev/null +++ b/.github/workflows/jobs/provision-healthcheck.yml @@ -0,0 +1,26 @@ +parameters: +- name: webAppName + type: string + +steps: +- powershell: | + $webAppName = "${{ parameters.webAppName }}".ToLower() + $healthCheckUrl = "https://$webAppName.azurewebsites.net/health/check" + $healthStatus = 0 + Do { + Start-Sleep -s 5 + Write-Host "Checking: $healthCheckUrl" + + try { + $healthStatus = (Invoke-WebRequest -URI $healthCheckUrl).statuscode + Write-Host "Result: $healthStatus" + } + catch { + Write-Host $PSItem.Exception.Message + } + finally { + $Error.Clear() + } + + } While ($healthStatus -ne 200) + name: PingHealthCheckEndpoint diff --git a/.github/workflows/jobs/provision-sqlServer.yml b/.github/workflows/jobs/provision-sqlServer.yml new file mode 100644 index 0000000000..190add675e --- /dev/null +++ b/.github/workflows/jobs/provision-sqlServer.yml @@ -0,0 +1,44 @@ + +parameters: +- name: resourceGroup + type: string +- name: sqlServerName + type: string +- name: schemaAutomaticUpdatesEnabled + type: string + default: 'auto' +- name: sqlServerAdminPassword + type: string + default: '' + +jobs: +- job: provisionEnvironment + pool: + name: '$(SharedLinuxPool)' + vmImage: '$(LinuxVmImage)' + steps: + - task: AzureKeyVault@1 + displayName: 'Azure Key Vault: resolute-oss-tenant-info' + inputs: + azureSubscription: $(ConnectedServiceName) + KeyVaultName: 'resolute-oss-tenant-info' + + - task: AzurePowerShell@5 + displayName: 'Azure PowerShell script: InlineScript' + inputs: + azureSubscription: $(ConnectedServiceName) + azurePowerShellVersion: latestVersion + ScriptType: inlineScript + Inline: | + Add-Type -AssemblyName System.Web + + $templateParameters = @{ + sqlAdminPassword = "${{parameters.sqlServerAdminPassword}}" + sqlServerName = "${{parameters.sqlServerName}}".ToLower() + sqlSchemaAutomaticUpdatesEnabled = "${{parameters.schemaAutomaticUpdatesEnabled}}" + } + + Write-Host "Provisioning Sql server" + Write-Host "Resource Group: ${{ parameters.resourceGroup }}" + Write-Host "SqlServerName: ${{ parameters.sqlServerName }}" + New-AzResourceGroupDeployment -ResourceGroupName "${{ parameters.resourceGroup }}" -TemplateFile $(System.DefaultWorkingDirectory)/samples/templates/default-sqlServer.json -TemplateParameterObject $templateParameters -Verbose diff --git a/.github/workflows/jobs/redeploy-webapp.yml b/.github/workflows/jobs/redeploy-webapp.yml new file mode 100644 index 0000000000..6845260147 --- /dev/null +++ b/.github/workflows/jobs/redeploy-webapp.yml @@ -0,0 +1,29 @@ +parameters: +- name: version + type: string +- name: webAppName + type: string +- name: subscription + type: string +- name: imageTag + type: string + +jobs: +- job: provisionEnvironment + pool: + name: '$(DefaultLinuxPool)' + vmImage: '$(LinuxVmImage)' + steps: + - task: AzureRmWebAppDeployment@4 + displayName: 'Azure App Service Deploy' + inputs: + azureSubscription: '${{ parameters.subscription }}' + appType: 'webAppContainer' + WebAppName: '${{ parameters.webAppName }}' + DockerNamespace: $(azureContainerRegistry) + DockerRepository: '${{ parameters.version }}_fhir-server' + DockerImageTag: ${{ parameters.imageTag }} + + - template: ./provision-healthcheck.yml + parameters: + webAppName: ${{ parameters.webAppName }} \ No newline at end of file diff --git a/.github/workflows/jobs/run-tests.yml b/.github/workflows/jobs/run-tests.yml new file mode 100644 index 0000000000..d8672f3c1c --- /dev/null +++ b/.github/workflows/jobs/run-tests.yml @@ -0,0 +1,79 @@ +parameters: +- name: version + type: string +- name: keyVaultName + type: string +- name: appServiceName + type: string +jobs: +- job: "integrationTests" + pool: + name: '$(SharedLinuxPool)' + vmImage: '$(LinuxVmImage)' + steps: + - task: DownloadBuildArtifacts@0 + inputs: + buildType: 'current' + downloadType: 'single' + downloadPath: '$(System.ArtifactsDirectory)' + artifactName: 'IntegrationTests' + + - task: ExtractFiles@1 + displayName: 'Extract Integration Test Binaries' + inputs: + archiveFilePatterns: '$(System.ArtifactsDirectory)/IntegrationTests/Microsoft.Health.Fhir.${{ parameters.version }}.Tests.Integration.zip' + destinationFolder: '$(Agent.TempDirectory)/IntegrationTests/' + + - task: UseDotNet@2 + inputs: + useGlobalJson: true + + - task: AzureKeyVault@1 + displayName: 'Azure Key Vault: ${{ parameters.keyVaultName }}' + inputs: + azureSubscription: $(ConnectedServiceName) + KeyVaultName: '${{ parameters.keyVaultName }}' + + - task: AzureKeyVault@1 + displayName: 'Azure Key Vault: ${{ parameters.keyVaultName }}-sql' + inputs: + azureSubscription: $(ConnectedServiceName) + KeyVaultName: '${{ parameters.keyVaultName }}-sql' + + - task: DotNetCoreCLI@2 + displayName: 'Run Integration Tests' + inputs: + command: test + arguments: '"$(Agent.TempDirectory)/IntegrationTests/**/*${{ parameters.version }}.Tests.Integration*.dll" --blame-hang-timeout 15m' + workingDirectory: "$(System.ArtifactsDirectory)" + testRunTitle: '${{ parameters.version }} Integration' + env: + 'CosmosDb:Host': $(CosmosDb--Host) + 'CosmosDb:Key': $(CosmosDb--Key) + 'SqlServer:ConnectionString': $(SqlServer--ConnectionString) + +- job: 'cosmosE2eTests' + dependsOn: [] + pool: + name: '$(SharedLinuxPool)' + vmImage: '$(LinuxVmImage)' + steps: + - template: e2e-setup.yml + - template: e2e-tests.yml + parameters: + version: ${{ parameters.version }} + appServiceName: ${{ parameters.appServiceName }} + appServiceType: 'CosmosDb' + +- job: 'sqlE2eTests' + dependsOn: [] + pool: + name: '$(SharedLinuxPool)' + vmImage: '$(LinuxVmImage)' + steps: + - template: e2e-setup.yml + - template: e2e-tests.yml + parameters: + version: ${{ parameters.version }} + appServiceName: '${{ parameters.appServiceName }}-sql' + appServiceType: 'SqlServer' diff --git a/.github/workflows/jobs/update-semver.yml b/.github/workflows/jobs/update-semver.yml new file mode 100644 index 0000000000..f07e6dda97 --- /dev/null +++ b/.github/workflows/jobs/update-semver.yml @@ -0,0 +1,31 @@ +steps: + +- task: UseDotNet@2 + displayName: 'Use .NET Core sdk (for GitVersion)' + inputs: + packageType: sdk + version: 3.1.x + +- task: UseDotNet@2 + inputs: + useGlobalJson: true + +- powershell: | + dotnet tool install --global GitVersion.Tool + + $gitVersionJson = & 'dotnet-gitversion' | ConvertFrom-Json + + Write-Host "##vso[task.setvariable variable=semVer]$($gitVersionJson.semVer)" + Write-Host "##vso[task.setvariable variable=informationalVersion;isOutput=true]$($gitVersionJson.informationalVersion)" + Write-Host "##vso[task.setvariable variable=majorMinorPatch;isOutput=true]$($gitVersionJson.majorMinorPatch)" + Write-Host "##vso[task.setvariable variable=nuGetVersion;isOutput=true]$($gitVersionJson.semVer)" + Write-Host "##vso[task.setvariable variable=assemblySemVer;isOutput=true]$($gitVersionJson.assemblySemVer)" + Write-Host "##vso[task.setvariable variable=assemblySemFileVer;isOutput=true]$($gitVersionJson.assemblySemFileVer)" + + Write-Host "##vso[build.updatebuildnumber]$($gitVersionJson.semVer)" + name: SetVariablesFromGitVersion + +- powershell: | + Write-Host '----------Variables to use for build----------' + Write-Host 'semVer: $(semVer)' + name: PrintVariablesFromGitVersion diff --git a/.github/workflows/jobs/update-sqlAdminPassword.yml b/.github/workflows/jobs/update-sqlAdminPassword.yml new file mode 100644 index 0000000000..b1d4a2466e --- /dev/null +++ b/.github/workflows/jobs/update-sqlAdminPassword.yml @@ -0,0 +1,18 @@ +steps: + +- task: UseDotNet@2 + displayName: 'Use .NET Core sdk (to generate password)' + inputs: + packageType: sdk + version: 3.1.x + +- task: UseDotNet@2 + inputs: + useGlobalJson: true + +- powershell: | + + $random = -join((((33,35,37,38,42,43,45,46,95) + (48..57) + (65..90) + (97..122) | Get-Random -Count 20) + ((33,35,37,38,42,43,45,46,95) | Get-Random -Count 1) + ((48..57) | Get-Random -Count 1) + ((65..90) | Get-Random -Count 1) + ((97..122) | Get-Random -Count 1) | Get-Random -Count 24) | % {[char]$_}) + Write-Host "##vso[task.setvariable variable=password;isOutput=true]" + + name: SetVariablesFromRandomString From ff0df35582a078e1559cb55100b28b3d34a5ba4f Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Tue, 12 Mar 2024 14:30:53 -0700 Subject: [PATCH 002/155] Initial file conversion with test ci pipeline --- .../{workflows/jobs => actions}/analyze.yml | 0 .github/{workflows/jobs => actions}/build.yml | 0 .../clean-storage-accounts.yml | 0 .../jobs => actions}/docker-add-tag.yml | 0 .../jobs => actions}/docker-build-all.yml | 0 .../jobs => actions}/docker-build-push.yml | 0 .../{workflows/jobs => actions}/e2e-setup.yml | 0 .../jobs => actions}/e2e-tests-extract.yml | 0 .../{workflows/jobs => actions}/e2e-tests.yml | 0 .../package-integration-tests.yml | 0 .../jobs => actions}/package-web.yml | 0 .../{workflows/jobs => actions}/package.yml | 0 .../provision-healthcheck.yml | 0 .../jobs => actions}/provision-sqlServer.yml | 0 .../jobs => actions}/redeploy-webapp.yml | 0 .../{workflows/jobs => actions}/run-tests.yml | 0 .../jobs => actions}/update-semver.yml | 0 .../update-sqlAdminPassword.yml | 0 .github/workflows/fhir-oss-ci-pipeline.yml | 296 +----------------- 19 files changed, 15 insertions(+), 281 deletions(-) rename .github/{workflows/jobs => actions}/analyze.yml (100%) rename .github/{workflows/jobs => actions}/build.yml (100%) rename .github/{workflows/jobs => actions}/clean-storage-accounts.yml (100%) rename .github/{workflows/jobs => actions}/docker-add-tag.yml (100%) rename .github/{workflows/jobs => actions}/docker-build-all.yml (100%) rename .github/{workflows/jobs => actions}/docker-build-push.yml (100%) rename .github/{workflows/jobs => actions}/e2e-setup.yml (100%) rename .github/{workflows/jobs => actions}/e2e-tests-extract.yml (100%) rename .github/{workflows/jobs => actions}/e2e-tests.yml (100%) rename .github/{workflows/jobs => actions}/package-integration-tests.yml (100%) rename .github/{workflows/jobs => actions}/package-web.yml (100%) rename .github/{workflows/jobs => actions}/package.yml (100%) rename .github/{workflows/jobs => actions}/provision-healthcheck.yml (100%) rename .github/{workflows/jobs => actions}/provision-sqlServer.yml (100%) rename .github/{workflows/jobs => actions}/redeploy-webapp.yml (100%) rename .github/{workflows/jobs => actions}/run-tests.yml (100%) rename .github/{workflows/jobs => actions}/update-semver.yml (100%) rename .github/{workflows/jobs => actions}/update-sqlAdminPassword.yml (100%) diff --git a/.github/workflows/jobs/analyze.yml b/.github/actions/analyze.yml similarity index 100% rename from .github/workflows/jobs/analyze.yml rename to .github/actions/analyze.yml diff --git a/.github/workflows/jobs/build.yml b/.github/actions/build.yml similarity index 100% rename from .github/workflows/jobs/build.yml rename to .github/actions/build.yml diff --git a/.github/workflows/jobs/clean-storage-accounts.yml b/.github/actions/clean-storage-accounts.yml similarity index 100% rename from .github/workflows/jobs/clean-storage-accounts.yml rename to .github/actions/clean-storage-accounts.yml diff --git a/.github/workflows/jobs/docker-add-tag.yml b/.github/actions/docker-add-tag.yml similarity index 100% rename from .github/workflows/jobs/docker-add-tag.yml rename to .github/actions/docker-add-tag.yml diff --git a/.github/workflows/jobs/docker-build-all.yml b/.github/actions/docker-build-all.yml similarity index 100% rename from .github/workflows/jobs/docker-build-all.yml rename to .github/actions/docker-build-all.yml diff --git a/.github/workflows/jobs/docker-build-push.yml b/.github/actions/docker-build-push.yml similarity index 100% rename from .github/workflows/jobs/docker-build-push.yml rename to .github/actions/docker-build-push.yml diff --git a/.github/workflows/jobs/e2e-setup.yml b/.github/actions/e2e-setup.yml similarity index 100% rename from .github/workflows/jobs/e2e-setup.yml rename to .github/actions/e2e-setup.yml diff --git a/.github/workflows/jobs/e2e-tests-extract.yml b/.github/actions/e2e-tests-extract.yml similarity index 100% rename from .github/workflows/jobs/e2e-tests-extract.yml rename to .github/actions/e2e-tests-extract.yml diff --git a/.github/workflows/jobs/e2e-tests.yml b/.github/actions/e2e-tests.yml similarity index 100% rename from .github/workflows/jobs/e2e-tests.yml rename to .github/actions/e2e-tests.yml diff --git a/.github/workflows/jobs/package-integration-tests.yml b/.github/actions/package-integration-tests.yml similarity index 100% rename from .github/workflows/jobs/package-integration-tests.yml rename to .github/actions/package-integration-tests.yml diff --git a/.github/workflows/jobs/package-web.yml b/.github/actions/package-web.yml similarity index 100% rename from .github/workflows/jobs/package-web.yml rename to .github/actions/package-web.yml diff --git a/.github/workflows/jobs/package.yml b/.github/actions/package.yml similarity index 100% rename from .github/workflows/jobs/package.yml rename to .github/actions/package.yml diff --git a/.github/workflows/jobs/provision-healthcheck.yml b/.github/actions/provision-healthcheck.yml similarity index 100% rename from .github/workflows/jobs/provision-healthcheck.yml rename to .github/actions/provision-healthcheck.yml diff --git a/.github/workflows/jobs/provision-sqlServer.yml b/.github/actions/provision-sqlServer.yml similarity index 100% rename from .github/workflows/jobs/provision-sqlServer.yml rename to .github/actions/provision-sqlServer.yml diff --git a/.github/workflows/jobs/redeploy-webapp.yml b/.github/actions/redeploy-webapp.yml similarity index 100% rename from .github/workflows/jobs/redeploy-webapp.yml rename to .github/actions/redeploy-webapp.yml diff --git a/.github/workflows/jobs/run-tests.yml b/.github/actions/run-tests.yml similarity index 100% rename from .github/workflows/jobs/run-tests.yml rename to .github/actions/run-tests.yml diff --git a/.github/workflows/jobs/update-semver.yml b/.github/actions/update-semver.yml similarity index 100% rename from .github/workflows/jobs/update-semver.yml rename to .github/actions/update-semver.yml diff --git a/.github/workflows/jobs/update-sqlAdminPassword.yml b/.github/actions/update-sqlAdminPassword.yml similarity index 100% rename from .github/workflows/jobs/update-sqlAdminPassword.yml rename to .github/actions/update-sqlAdminPassword.yml diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index 809e00b28f..bc0b654516 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -1,285 +1,19 @@ # DESCRIPTION: # Builds, tests, and packages the solution for the main branch. -name: $(SourceBranchName)-$(Date:yyyyMMdd)$(Rev:-r) -trigger: none - -variables: -- template: fhir-oss-ci-test-environment-variables.yml -- template: fhir-oss-build-variables.yml - -stages: -# *********************** Setup *********************** -- stage: UpdateVersion - displayName: 'Determine Semver' - dependsOn: [] - jobs: - - job: Semver - pool: - name: '$(DefaultLinuxPool)' - vmImage: '$(LinuxVmImage)' - steps: - - template: ./jobs/update-semver.yml - -- stage: cleanStorageAccounts - displayName: 'Clean Storage Accounts' - dependsOn: [] - jobs: - - template: ./jobs/clean-storage-accounts.yml - parameters: - environmentName: $(DeploymentEnvironmentName) - -- stage: cleanupIntegrationTestDatabases - displayName: 'Cleanup Integration Test DBs' - dependsOn: [] - jobs: - - job: cleanup - pool: - name: '$(SharedLinuxPool)' - vmImage: '$(LinuxVmImage)' - steps: - - task: AzurePowerShell@5 - displayName: 'Azure PowerShell script: InlineScript' - inputs: - azureSubscription: $(ConnectedServiceName) - azurePowerShellVersion: latestVersion - ScriptType: inlineScript - Inline: | - $testNamePatterns = @("SNAPSHOT*","FHIRCOMPATIBILITYTEST*","FHIRINTEGRATIONTEST*","FHIRRESOURCECHANGEDISABLEDTEST*","BASE*","SNAPSHOT*") - foreach ($pattern in $testNamePatterns) { - $resources = Get-AzResource -ResourceGroupName $(ResourceGroupName) -ResourceType 'Microsoft.Sql/servers/databases' -Name $pattern - foreach ($resource in $resources) { - Write-Host "Cleaning up $($resource.ResourceName)" - Remove-AzResource -ResourceId $resource.ResourceId -Force - } - } - -- stage: BuildUnitTests - displayName: 'Build and run unit tests' - dependsOn: - - UpdateVersion - variables: - assemblySemVer: $[stageDependencies.UpdateVersion.Semver.outputs['SetVariablesFromGitVersion.assemblySemVer']] - assemblySemFileVer: $[stageDependencies.UpdateVersion.Semver.outputs['SetVariablesFromGitVersion.assemblySemFileVer']] - informationalVersion: $[stageDependencies.UpdateVersion.Semver.outputs['SetVariablesFromGitVersion.informationalVersion']] - majorMinorPatch: $[stageDependencies.UpdateVersion.Semver.outputs['SetVariablesFromGitVersion.majorMinorPatch']] - nuGetVersion: $[stageDependencies.UpdateVersion.Semver.outputs['SetVariablesFromGitVersion.nuGetVersion']] - jobs: - - job: Windows_dotnet8 - pool: - name: '$(DefaultWindowsPool)' +run-name: ${{github.ref_name}}-$(Date:yyyyMMdd)$(Rev:-r) +on: + push: + branches: + - main + +permissions: + id-token: write + contents: read +jobs: + update-version: + name: Determine Semver + runs-on: ubuntu-latest steps: - - template: ./jobs/build.yml - parameters: - targetBuildFramework: $(defaultBuildFramework) - unitTest: false - codeCoverage: true - - job: Linux_dotnet6 - pool: - name: '$(DefaultLinuxPool)' - vmImage: '$(LinuxVmImage)' - steps: - - template: ./jobs/build.yml - parameters: - targetBuildFramework: 'net6.0' - - job: Linux_BuildAndPackage - pool: - name: '$(DefaultLinuxPool)' - vmImage: '$(LinuxVmImage)' - steps: - - template: ./jobs/build.yml - parameters: - codeCoverage: false - unitTest: false - componentGovernance: true - packageArtifacts: true - packageIntegrationTests: true - -- stage: AnalyzeSecurity - displayName: 'Run Security Analysis and Validate' - dependsOn: - - BuildUnitTests - jobs: - - job: Guardian - pool: - name: '$(DefaultWindowsPool)' - - steps: - - template: ./jobs/analyze.yml - -- stage: DockerBuild - displayName: 'Build images' - dependsOn: - - UpdateVersion - variables: - assemblySemFileVer: $[stageDependencies.UpdateVersion.Semver.outputs['SetVariablesFromGitVersion.assemblySemFileVer']] - jobs: - - template: ./jobs/docker-build-all.yml - parameters: - tag: $(ImageTag) - -# *********************** Stu3 *********************** -- stage: redeployStu3 - displayName: 'Redeploy STU3 CosmosDB Site' - dependsOn: - - DockerBuild - jobs: - - template: ./jobs/redeploy-webapp.yml - parameters: - version: Stu3 - webAppName: $(DeploymentEnvironmentName) - subscription: $(ConnectedServiceName) - imageTag: $(ImageTag) - -- stage: redeployStu3Sql - displayName: 'Redeploy STU3 SQL Site' - dependsOn: - - DockerBuild - jobs: - - template: ./jobs/redeploy-webapp.yml - parameters: - version: Stu3 - webAppName: $(DeploymentEnvironmentNameSql) - subscription: $(ConnectedServiceName) - imageTag: $(ImageTag) - -- stage: testStu3 - displayName: 'Run Stu3 Tests' - dependsOn: - - BuildUnitTests - - redeployStu3 - - redeployStu3Sql - jobs: - - template: ./jobs/run-tests.yml - parameters: - version: Stu3 - keyVaultName: $(DeploymentEnvironmentName) - appServiceName: $(DeploymentEnvironmentName) - -# *********************** R4 *********************** -- stage: redeployR4 - displayName: 'Redeploy R4 CosmosDB Site' - dependsOn: - - DockerBuild - jobs: - - template: ./jobs/redeploy-webapp.yml - parameters: - version: R4 - webAppName: $(DeploymentEnvironmentNameR4) - subscription: $(ConnectedServiceName) - imageTag: $(ImageTag) - -- stage: redeployR4Sql - displayName: 'Redeploy R4 SQL Site' - dependsOn: - - DockerBuild - jobs: - - template: ./jobs/redeploy-webapp.yml - parameters: - version: R4 - webAppName: $(DeploymentEnvironmentNameR4Sql) - subscription: $(ConnectedServiceName) - imageTag: $(ImageTag) - -- stage: testR4 - displayName: 'Run R4 Tests' - dependsOn: - - BuildUnitTests - - redeployR4 - - redeployR4Sql - jobs: - - template: ./jobs/run-tests.yml - parameters: - version: R4 - keyVaultName: $(DeploymentEnvironmentNameR4) - appServiceName: $(DeploymentEnvironmentNameR4) - -# *********************** R4B *********************** -- stage: redeployR4B - displayName: 'Redeploy R4B CosmosDB Site' - dependsOn: - - DockerBuild - jobs: - - template: ./jobs/redeploy-webapp.yml - parameters: - version: R4B - webAppName: $(DeploymentEnvironmentNameR4B) - subscription: $(ConnectedServiceName) - imageTag: $(ImageTag) - -- stage: redeployR4BSql - displayName: 'Redeploy R4B SQL Site' - dependsOn: - - DockerBuild - jobs: - - template: ./jobs/redeploy-webapp.yml - parameters: - version: R4B - webAppName: $(DeploymentEnvironmentNameR4BSql) - subscription: $(ConnectedServiceName) - imageTag: $(ImageTag) - -- stage: testR4B - displayName: 'Run R4B Tests' - dependsOn: - - BuildUnitTests - - redeployR4B - - redeployR4BSql - jobs: - - template: ./jobs/run-tests.yml - parameters: - version: R4B - keyVaultName: $(DeploymentEnvironmentNameR4B) - appServiceName: $(DeploymentEnvironmentNameR4B) - -# *********************** R5 *********************** -- stage: redeployR5 - displayName: 'Redeploy R5 CosmosDB Site' - dependsOn: - - DockerBuild - jobs: - - template: ./jobs/redeploy-webapp.yml - parameters: - version: R5 - webAppName: $(DeploymentEnvironmentNameR5) - subscription: $(ConnectedServiceName) - imageTag: $(ImageTag) - -- stage: redeployR5Sql - displayName: 'Redeploy R5 SQL Site' - dependsOn: - - DockerBuild - jobs: - - template: ./jobs/redeploy-webapp.yml - parameters: - version: R5 - webAppName: $(DeploymentEnvironmentNameR5Sql) - subscription: $(ConnectedServiceName) - imageTag: $(ImageTag) - -- stage: testR5 - displayName: 'Run R5 Tests' - dependsOn: - - BuildUnitTests - - redeployR5 - - redeployR5Sql - jobs: - - template: ./jobs/run-tests.yml - parameters: - version: R5 - keyVaultName: $(DeploymentEnvironmentNameR5) - appServiceName: $(DeploymentEnvironmentNameR5) - -# *********************** Finalize *********************** -- stage: DockerAddTag - displayName: 'Docker add main tag' - dependsOn: - - testStu3 - - testR4 - - testR4B - - testR5 - jobs: - - template: ./jobs/docker-add-tag.yml - parameters: - sourceTag: $(ImageTag) - targetTag: 'master' + - name: Update Semver + uses: ./.github/actions/update-semver.yml From adb771f66cd2cc04e16b7b2bfa54ca257610ce85 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Tue, 12 Mar 2024 15:08:23 -0700 Subject: [PATCH 003/155] Temp workflow dispatch --- .github/workflows/fhir-oss-ci-pipeline.yml | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index bc0b654516..d4ef6c0fca 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -3,9 +3,7 @@ run-name: ${{github.ref_name}}-$(Date:yyyyMMdd)$(Rev:-r) on: - push: - branches: - - main + workflow_dispatch permissions: id-token: write From 797788dc0fdbc00ebb8148be89aad30b0ff39952 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Tue, 12 Mar 2024 15:24:22 -0700 Subject: [PATCH 004/155] PR trigger for testing --- .github/{workflows => }/fhir-oss-build-variables.yml | 0 .../{workflows => }/fhir-oss-ci-test-enviroment-variables.yml | 0 .github/workflows/fhir-oss-ci-pipeline.yml | 2 +- 3 files changed, 1 insertion(+), 1 deletion(-) rename .github/{workflows => }/fhir-oss-build-variables.yml (100%) rename .github/{workflows => }/fhir-oss-ci-test-enviroment-variables.yml (100%) diff --git a/.github/workflows/fhir-oss-build-variables.yml b/.github/fhir-oss-build-variables.yml similarity index 100% rename from .github/workflows/fhir-oss-build-variables.yml rename to .github/fhir-oss-build-variables.yml diff --git a/.github/workflows/fhir-oss-ci-test-enviroment-variables.yml b/.github/fhir-oss-ci-test-enviroment-variables.yml similarity index 100% rename from .github/workflows/fhir-oss-ci-test-enviroment-variables.yml rename to .github/fhir-oss-ci-test-enviroment-variables.yml diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index d4ef6c0fca..13edabebbd 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -3,7 +3,7 @@ run-name: ${{github.ref_name}}-$(Date:yyyyMMdd)$(Rev:-r) on: - workflow_dispatch + pull_request permissions: id-token: write From 2cd1d0c14b39e334fe84fff71d654f8876116480 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Tue, 12 Mar 2024 15:28:36 -0700 Subject: [PATCH 005/155] Added checkout to get current version of action --- .github/workflows/fhir-oss-ci-pipeline.yml | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index 13edabebbd..cbed82ec39 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -9,6 +9,14 @@ permissions: id-token: write contents: read jobs: + checkout: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v2 + with: + fetch-depth: 0 + ref: ${{github.ref}} update-version: name: Determine Semver runs-on: ubuntu-latest From 3465ec1e9ee3bf21834ef7bd63cc431c2976c689 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Tue, 12 Mar 2024 15:32:15 -0700 Subject: [PATCH 006/155] Added need for checkout --- .github/workflows/fhir-oss-ci-pipeline.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index cbed82ec39..dbd73f23fd 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -13,13 +13,14 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v2 + uses: actions/checkout@v4 with: fetch-depth: 0 ref: ${{github.ref}} update-version: name: Determine Semver runs-on: ubuntu-latest + needs: checkout steps: - name: Update Semver uses: ./.github/actions/update-semver.yml From 805e7ea70639a3372f480b37250237c47fb30d3c Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Wed, 13 Mar 2024 13:57:59 -0700 Subject: [PATCH 007/155] Testing semver changes --- .github/actions/update-semver.yml | 56 +++++++++++----------- .github/workflows/fhir-oss-ci-pipeline.yml | 11 ++++- 2 files changed, 38 insertions(+), 29 deletions(-) diff --git a/.github/actions/update-semver.yml b/.github/actions/update-semver.yml index f07e6dda97..3b34d705bd 100644 --- a/.github/actions/update-semver.yml +++ b/.github/actions/update-semver.yml @@ -1,31 +1,31 @@ -steps: +name: update-semver +description: 'Update the build number with the SemVer from GitVersion' +inputs: + configFilePath: + description: 'Path to the GitVersion configuration file' + required: true + default: './GitVersion.yml' +outputs: + informationalVersion: ${{ steps.SetVariablesFromGitVersion.outputs.informationalVersion }} + majorMinorPatch: ${{ steps.SetVariablesFromGitVersion.outputs.majorMinorPatch }} + nuGetVersion: ${{ steps.SetVariablesFromGitVersion.outputs.nuGetVersion }} + assemblySemVer: ${{ steps.SetVariablesFromGitVersion.outputs.assemblySemVer }} + assemblySemFileVer: ${{ steps.SetVariablesFromGitVersion.outputs.assemblySemFileVer }} + semVer: ${{ steps.SetVariablesFromGitVersion.outputs.semVer }} + buildNumber: ${{ steps.SetVariablesFromGitVersion.outputs.semVer }} + buildVersion: ${{ steps.SetVariablesFromGitVersion.outputs.semVer }} + buildVersionName: ${{ steps.SetVariablesFromGitVersion.outputs.semVer }} + buildVersionNameWithBranch: ${{ steps.SetVariablesFromGitVersion.outputs.semVer }} -- task: UseDotNet@2 - displayName: 'Use .NET Core sdk (for GitVersion)' - inputs: - packageType: sdk - version: 3.1.x +steps: -- task: UseDotNet@2 - inputs: - useGlobalJson: true - -- powershell: | - dotnet tool install --global GitVersion.Tool - - $gitVersionJson = & 'dotnet-gitversion' | ConvertFrom-Json - - Write-Host "##vso[task.setvariable variable=semVer]$($gitVersionJson.semVer)" - Write-Host "##vso[task.setvariable variable=informationalVersion;isOutput=true]$($gitVersionJson.informationalVersion)" - Write-Host "##vso[task.setvariable variable=majorMinorPatch;isOutput=true]$($gitVersionJson.majorMinorPatch)" - Write-Host "##vso[task.setvariable variable=nuGetVersion;isOutput=true]$($gitVersionJson.semVer)" - Write-Host "##vso[task.setvariable variable=assemblySemVer;isOutput=true]$($gitVersionJson.assemblySemVer)" - Write-Host "##vso[task.setvariable variable=assemblySemFileVer;isOutput=true]$($gitVersionJson.assemblySemFileVer)" - - Write-Host "##vso[build.updatebuildnumber]$($gitVersionJson.semVer)" - name: SetVariablesFromGitVersion + - name: Install GitVersion' + uses: gittools/actions/gitversion/setup@0.13.4 -- powershell: | - Write-Host '----------Variables to use for build----------' - Write-Host 'semVer: $(semVer)' - name: PrintVariablesFromGitVersion + - name: SetVariablesFromGitVersion + id: -p:Version + uses: gittools/actions/gitversion/execute@0.13.4 + with: + configFilePath: './GitVersion.yml' + targetFilePath: ${{github.workspace}} + useConfigFile: true diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index dbd73f23fd..0ce66e67f8 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -17,10 +17,19 @@ jobs: with: fetch-depth: 0 ref: ${{github.ref}} + + - name: Install Latest .Net SDK + uses: actions/setup-dotnet@v4 + + - name: Install Older .Net SDK + uses: actions/setup-dotnet@v4 + with: + dotnet-version: 6.0.x update-version: name: Determine Semver runs-on: ubuntu-latest needs: checkout steps: - name: Update Semver - uses: ./.github/actions/update-semver.yml + id: semver + uses: ./.github/actions/update-semver From d0e9b234e214b48bc34b3e82435a55a90d8b8f30 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Wed, 13 Mar 2024 14:21:01 -0700 Subject: [PATCH 008/155] Moved and renamed update semver action so it gets picked up --- .../action.yml} | 27 ++++++++++--------- 1 file changed, 14 insertions(+), 13 deletions(-) rename .github/actions/{update-semver.yml => update-semver/action.yml} (72%) diff --git a/.github/actions/update-semver.yml b/.github/actions/update-semver/action.yml similarity index 72% rename from .github/actions/update-semver.yml rename to .github/actions/update-semver/action.yml index 3b34d705bd..a297c68471 100644 --- a/.github/actions/update-semver.yml +++ b/.github/actions/update-semver/action.yml @@ -3,7 +3,7 @@ description: 'Update the build number with the SemVer from GitVersion' inputs: configFilePath: description: 'Path to the GitVersion configuration file' - required: true + required: false default: './GitVersion.yml' outputs: informationalVersion: ${{ steps.SetVariablesFromGitVersion.outputs.informationalVersion }} @@ -16,16 +16,17 @@ outputs: buildVersion: ${{ steps.SetVariablesFromGitVersion.outputs.semVer }} buildVersionName: ${{ steps.SetVariablesFromGitVersion.outputs.semVer }} buildVersionNameWithBranch: ${{ steps.SetVariablesFromGitVersion.outputs.semVer }} +runs: + using: 'composite' + steps: + + - name: Install GitVersion' + uses: gittools/actions/gitversion/setup@0.13.4 -steps: - - - name: Install GitVersion' - uses: gittools/actions/gitversion/setup@0.13.4 - - - name: SetVariablesFromGitVersion - id: -p:Version - uses: gittools/actions/gitversion/execute@0.13.4 - with: - configFilePath: './GitVersion.yml' - targetFilePath: ${{github.workspace}} - useConfigFile: true + - name: SetVariablesFromGitVersion + id: -p:Version + uses: gittools/actions/gitversion/execute@0.13.4 + with: + configFilePath: './GitVersion.yml' + targetFilePath: ${{github.workspace}} + useConfigFile: true From 56668f0b8524e994665b57ab0daeebc4f5503b2f Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Wed, 13 Mar 2024 14:33:05 -0700 Subject: [PATCH 009/155] Tweaking semver --- .github/actions/update-semver/action.yml | 15 ++------------- 1 file changed, 2 insertions(+), 13 deletions(-) diff --git a/.github/actions/update-semver/action.yml b/.github/actions/update-semver/action.yml index a297c68471..91426a1db5 100644 --- a/.github/actions/update-semver/action.yml +++ b/.github/actions/update-semver/action.yml @@ -5,17 +5,6 @@ inputs: description: 'Path to the GitVersion configuration file' required: false default: './GitVersion.yml' -outputs: - informationalVersion: ${{ steps.SetVariablesFromGitVersion.outputs.informationalVersion }} - majorMinorPatch: ${{ steps.SetVariablesFromGitVersion.outputs.majorMinorPatch }} - nuGetVersion: ${{ steps.SetVariablesFromGitVersion.outputs.nuGetVersion }} - assemblySemVer: ${{ steps.SetVariablesFromGitVersion.outputs.assemblySemVer }} - assemblySemFileVer: ${{ steps.SetVariablesFromGitVersion.outputs.assemblySemFileVer }} - semVer: ${{ steps.SetVariablesFromGitVersion.outputs.semVer }} - buildNumber: ${{ steps.SetVariablesFromGitVersion.outputs.semVer }} - buildVersion: ${{ steps.SetVariablesFromGitVersion.outputs.semVer }} - buildVersionName: ${{ steps.SetVariablesFromGitVersion.outputs.semVer }} - buildVersionNameWithBranch: ${{ steps.SetVariablesFromGitVersion.outputs.semVer }} runs: using: 'composite' steps: @@ -24,9 +13,9 @@ runs: uses: gittools/actions/gitversion/setup@0.13.4 - name: SetVariablesFromGitVersion - id: -p:Version + id: version uses: gittools/actions/gitversion/execute@0.13.4 with: - configFilePath: './GitVersion.yml' + configFilePath: ${{inputs.configFilePath}} targetFilePath: ${{github.workspace}} useConfigFile: true From 47b867c950e4fa7aa3a74e13f32016036c7f8cd5 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Wed, 13 Mar 2024 14:34:20 -0700 Subject: [PATCH 010/155] Changing run name --- .github/workflows/fhir-oss-ci-pipeline.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index 0ce66e67f8..442bf3054b 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -1,7 +1,7 @@ # DESCRIPTION: # Builds, tests, and packages the solution for the main branch. -run-name: ${{github.ref_name}}-$(Date:yyyyMMdd)$(Rev:-r) +run-name: ${{github.ref_name}}-${{github.run_id}} on: pull_request @@ -33,3 +33,5 @@ jobs: - name: Update Semver id: semver uses: ./.github/actions/update-semver + with: + configFilePath: From d3a7af500d9292321d76ac2eb3ce1a8a33dc3d57 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Wed, 13 Mar 2024 14:47:37 -0700 Subject: [PATCH 011/155] Semver now similar to dicom --- .github/actions/update-semver/action.yml | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/.github/actions/update-semver/action.yml b/.github/actions/update-semver/action.yml index 91426a1db5..a389ac871e 100644 --- a/.github/actions/update-semver/action.yml +++ b/.github/actions/update-semver/action.yml @@ -5,12 +5,27 @@ inputs: description: 'Path to the GitVersion configuration file' required: false default: './GitVersion.yml' +outputs: + assemblyVersion: + description: The assembly version for the shared components + value: ${{ steps.version.outputs.GitVersion_AssemblySemVer }} + fileVersion: + description: The assembly file version for the shared components + value: ${{ steps.version.outputs.GitVersion_AssemblySemFileVer }} + informationalVersion: + description: The assembly informational version for the shared components + value: ${{ steps.version.outputs.GitVersion_InformationalVersion }} + nugetVersion: + description: The NuGet package version for the shared components + value: ${{ steps.version.outputs.GitVersion_SemVer }} runs: using: 'composite' steps: - name: Install GitVersion' uses: gittools/actions/gitversion/setup@0.13.4 + with: + versionSpec: '5.x' - name: SetVariablesFromGitVersion id: version From 3e6b90dd4cec7e844a63bf9cf7d47e904d1a32a2 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Wed, 13 Mar 2024 17:16:03 -0700 Subject: [PATCH 012/155] Removed property from semver call --- .github/workflows/fhir-oss-ci-pipeline.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index 442bf3054b..e5a937ac20 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -33,5 +33,3 @@ jobs: - name: Update Semver id: semver uses: ./.github/actions/update-semver - with: - configFilePath: From 0314029c68bbaca5302e034e651fce9f8a6a1080 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Wed, 13 Mar 2024 17:20:31 -0700 Subject: [PATCH 013/155] Removing ref as it is changing the workspace path --- .github/workflows/fhir-oss-ci-pipeline.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index e5a937ac20..ea2b30b5f6 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -16,7 +16,6 @@ jobs: uses: actions/checkout@v4 with: fetch-depth: 0 - ref: ${{github.ref}} - name: Install Latest .Net SDK uses: actions/setup-dotnet@v4 From 04c31542f71ff64af87f850301ab96f0296339bc Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Wed, 13 Mar 2024 17:37:03 -0700 Subject: [PATCH 014/155] Simplified semver job call --- .github/workflows/fhir-oss-ci-pipeline.yml | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index ea2b30b5f6..9dc7dde3ec 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -24,11 +24,6 @@ jobs: uses: actions/setup-dotnet@v4 with: dotnet-version: 6.0.x - update-version: - name: Determine Semver - runs-on: ubuntu-latest - needs: checkout - steps: - - name: Update Semver + - name: Determine Semver id: semver uses: ./.github/actions/update-semver From 585c9fe9ea309de4127ea6f5ee3940599b45a663 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Wed, 13 Mar 2024 17:41:36 -0700 Subject: [PATCH 015/155] Version typo --- .github/actions/update-semver/action.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/actions/update-semver/action.yml b/.github/actions/update-semver/action.yml index a389ac871e..6e950c7848 100644 --- a/.github/actions/update-semver/action.yml +++ b/.github/actions/update-semver/action.yml @@ -23,13 +23,13 @@ runs: steps: - name: Install GitVersion' - uses: gittools/actions/gitversion/setup@0.13.4 + uses: gittools/actions/gitversion/setup@v0.13.4 with: versionSpec: '5.x' - name: SetVariablesFromGitVersion id: version - uses: gittools/actions/gitversion/execute@0.13.4 + uses: gittools/actions/gitversion/execute@v0.13.4 with: configFilePath: ${{inputs.configFilePath}} targetFilePath: ${{github.workspace}} From ccbc744af5d18d421beea8eb34c16db28fc6027a Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Wed, 13 Mar 2024 17:50:24 -0700 Subject: [PATCH 016/155] Wrong property in git tools --- .github/actions/update-semver/action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/actions/update-semver/action.yml b/.github/actions/update-semver/action.yml index 6e950c7848..9401cb0dfc 100644 --- a/.github/actions/update-semver/action.yml +++ b/.github/actions/update-semver/action.yml @@ -32,5 +32,5 @@ runs: uses: gittools/actions/gitversion/execute@v0.13.4 with: configFilePath: ${{inputs.configFilePath}} - targetFilePath: ${{github.workspace}} + targetPath: ${{github.workspace}} useConfigFile: true From a04ec616cb87e19cf98ba53692233a68beda3dc4 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Thu, 14 Mar 2024 10:49:11 -0700 Subject: [PATCH 017/155] Setup actions to create build variables to be reused --- .../actions/setup-build-variables/action.yml | 36 +++++++++++++++++++ .github/actions/setup-ci-variables/action.yml | 17 +++++++++ .github/fhir-oss-build-variables.yml | 36 ------------------- .../fhir-oss-ci-test-enviroment-variables.yml | 10 ------ 4 files changed, 53 insertions(+), 46 deletions(-) create mode 100644 .github/actions/setup-build-variables/action.yml create mode 100644 .github/actions/setup-ci-variables/action.yml delete mode 100644 .github/fhir-oss-build-variables.yml delete mode 100644 .github/fhir-oss-ci-test-enviroment-variables.yml diff --git a/.github/actions/setup-build-variables/action.yml b/.github/actions/setup-build-variables/action.yml new file mode 100644 index 0000000000..69eee98af1 --- /dev/null +++ b/.github/actions/setup-build-variables/action.yml @@ -0,0 +1,36 @@ +name: setup build variables +description: Sets variables used during builds. + +runs: + using: composite + steps: + - name: Set Build Variables + id: defaultVariables + run: | + echo "buildConfiguration=Release" >> "$GITHUB_ENV" + echo "defaultBuildFramework=net8.0" >> "$GITHUB_ENV" + echo "azureSubscriptionEndpoint=docker-build" >> "$GITHUB_ENV" + echo "azureContainerRegistryName=healthplatformregistry" >> "$GITHUB_ENV" + echo "connectedServiceName=Microsoft Health Open Source Subscription" >> "$GITHUB_ENV" + echo "composeLocation=build/docker/docker-compose.yaml" >> "$GITHUB_ENV" + + - name: Set Build Urls using Deployment Environment + run: | + echo "azureContainerRegistry='$azureContainerRegistryName'.azurecr.io" >> "$GITHUB_ENV" + echo "deploymentEnvironmentNameSql='$deploymentEnvironmentName-sql' >> "$GITHUB_ENV" + echo "deploymentEnvironmentNameR4='$deploymentEnvironmentName-r4' >> "$GITHUB_ENV" + echo "deploymentEnvironmentNameR4Sql='$deploymentEnvironmentNameR4'-sql >> "$GITHUB_ENV" + echo "deploymentEnvironmentNameR4B='$deploymentEnvironmentName-r4b' >> "$GITHUB_ENV" + echo "deploymentEnvironmentNameR4BSql='$deploymentEnvironmentNameR4B'-sql >> "$GITHUB_ENV" + echo "deploymentEnvironmentNameR5='$deploymentEnvironmentName'-r5 >> "$GITHUB_ENV" + echo "deploymentEnvironmentNameR5Sql='$deploymentEnvironmentNameR5'-sql >> "$GITHUB_ENV" + echo "testEnvironmentUrl=https://'$deploymentEnvironmentName'.azurewebsites.net >> "$GITHUB_ENV" + echo "testEnvironmentUrl_Sql=https://'$deploymentEnvironmentName'-sql.azurewebsites.net >> "$GITHUB_ENV" + echo "testEnvironmentUrl_R4=https://'$deploymentEnvironmentName'-r4.azurewebsites.net >> "$GITHUB_ENV" + echo "testEnvironmentUrl_R4_Sql=https://'$deploymentEnvironmentName'-r4-sql.azurewebsites.net >> "$GITHUB_ENV" + echo "testEnvironmentUrl_R4B=https://'$deploymentEnvironmentName'-r4b.azurewebsites.net >> "$GITHUB_ENV" + echo "testEnvironmentUrl_R4B_Sql=https://'$deploymentEnvironmentName'-r4b-sql.azurewebsites.net >> "$GITHUB_ENV" + echo "testEnvironmentUrl_R5=https://'$deploymentEnvironmentName'-r5.azurewebsites.net >> "$GITHUB_ENV" + echo "testEnvironmentUrl_R5_Sql=https://'$deploymentEnvironmentName'-r5-sql.azurewebsites.net >> "$GITHUB_ENV" + echo "testClientUrl=https://'$deploymentEnvironmentName'-client/ >> "$GITHUB_ENV" + echo "testApplicationResource=https://'$deploymentEnvironmentName'.'$tenantDomain' >> "$GITHUB_ENV" diff --git a/.github/actions/setup-ci-variables/action.yml b/.github/actions/setup-ci-variables/action.yml new file mode 100644 index 0000000000..03e75a1201 --- /dev/null +++ b/.github/actions/setup-ci-variables/action.yml @@ -0,0 +1,17 @@ +name: setup ci build variables +description: Sets ci build specific variables. + +runs: + using: composite + steps: + - name: Set CI Build Variables + id: defaultVariables + run: | + echo "resourceGroupRegion=southcentralus" >> "$GITHUB_ENV" + echo "resourceGroupRoot=msh-fhir-ci4" >> "$GITHUB_ENV" + echo "appServicePlanName=$resourceGroupRoot-linux" >> "$GITHUB_ENV" + echo "deploymentEnvironmentName=$resourceGroupRoot" >> "$GITHUB_ENV" + echo "resourceGroupName=$resourceGroupRoot" >> "$GITHUB_ENV" + echo "crucibleEnvironmentUrl=https://crucible.mshapis.com/" >> "$GITHUB_ENV" + echo "testEnvironmentName=OSS CI" >> "$GITHUB_ENV" + echo "imageTag=$build.BuildNumber" >> "$GITHUB_ENV" diff --git a/.github/fhir-oss-build-variables.yml b/.github/fhir-oss-build-variables.yml deleted file mode 100644 index 7f28b89720..0000000000 --- a/.github/fhir-oss-build-variables.yml +++ /dev/null @@ -1,36 +0,0 @@ -# DESCRIPTION: -# Variables used during builds. - -variables: - buildConfiguration: 'Release' - defaultBuildFramework: 'net8.0' - azureSubscriptionEndpoint: 'docker-build' - azureContainerRegistryName: 'healthplatformregistry' - azureContainerRegistry: '$(azureContainerRegistryName).azurecr.io' - composeLocation: 'build/docker/docker-compose.yaml' - DeploymentEnvironmentNameSql: '$(DeploymentEnvironmentName)-sql' - DeploymentEnvironmentNameR4: '$(DeploymentEnvironmentName)-r4' - DeploymentEnvironmentNameR4Sql: '$(DeploymentEnvironmentNameR4)-sql' - DeploymentEnvironmentNameR4B: '$(DeploymentEnvironmentName)-r4b' - DeploymentEnvironmentNameR4BSql: '$(DeploymentEnvironmentNameR4B)-sql' - DeploymentEnvironmentNameR5: '$(DeploymentEnvironmentName)-r5' - DeploymentEnvironmentNameR5Sql: '$(DeploymentEnvironmentNameR5)-sql' - TestEnvironmentUrl: 'https://$(DeploymentEnvironmentName).azurewebsites.net' - # These variables are not used in the deployment scripts, but are used in the E2E tests files. - TestEnvironmentUrl_Sql: 'https://$(DeploymentEnvironmentName)-sql.azurewebsites.net' - TestEnvironmentUrl_R4: 'https://$(DeploymentEnvironmentName)-r4.azurewebsites.net' - TestEnvironmentUrl_R4_Sql: 'https://$(DeploymentEnvironmentName)-r4-sql.azurewebsites.net' - TestEnvironmentUrl_R4B: 'https://$(DeploymentEnvironmentName)-r4b.azurewebsites.net' - TestEnvironmentUrl_R4B_Sql: 'https://$(DeploymentEnvironmentName)-r4b-sql.azurewebsites.net' - TestEnvironmentUrl_R5: 'https://$(DeploymentEnvironmentName)-r5.azurewebsites.net' - TestEnvironmentUrl_R5_Sql: 'https://$(DeploymentEnvironmentName)-r5-sql.azurewebsites.net' - #----------------------------------------------------------------------------------------- - TestClientUrl: 'https://$(DeploymentEnvironmentName)-client/' - ConnectedServiceName: 'Microsoft Health Open Source Subscription' - WindowsVmImage: 'windows-latest' - LinuxVmImage: 'ubuntu-latest' - TestApplicationResource: 'https://$(DeploymentEnvironmentName).$(tenantDomain)' - # The following is set by a build Pipeline variable: - # DefaultLinuxPool: 'Azure Pipelines' - # SharedLinuxPool: 'Azure Pipelines' - #----------------------------------------------------------------------------------------- diff --git a/.github/fhir-oss-ci-test-enviroment-variables.yml b/.github/fhir-oss-ci-test-enviroment-variables.yml deleted file mode 100644 index 6afcea9248..0000000000 --- a/.github/fhir-oss-ci-test-enviroment-variables.yml +++ /dev/null @@ -1,10 +0,0 @@ -variables: - ResourceGroupRegion: 'southcentralus' - # Due to deleting a keyvault with purge protection we must use a name other than msh-fhir-ci for 90 days after 5/20/2021. - resourceGroupRoot: 'msh-fhir-ci4' - appServicePlanName: '$(resourceGroupRoot)-linux' - DeploymentEnvironmentName: '$(resourceGroupRoot)' - ResourceGroupName: '$(resourceGroupRoot)' - CrucibleEnvironmentUrl: 'https://crucible.mshapis.com/' - TestEnvironmentName: 'OSS CI' - ImageTag: '$(build.BuildNumber)' From 5577e2a00cd4a2ac9c74c3313d2b19036994c239 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Thu, 14 Mar 2024 11:46:17 -0700 Subject: [PATCH 018/155] Added setup variables --- .github/workflows/fhir-oss-ci-pipeline.yml | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index 9dc7dde3ec..1e740fe657 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -16,7 +16,10 @@ jobs: uses: actions/checkout@v4 with: fetch-depth: 0 - + - name: Set Build Variables + uses: ./.github/actions/set-build-variables + - name: Set CI Test Variables + uses: ./.github/actions/setup-ci-variables - name: Install Latest .Net SDK uses: actions/setup-dotnet@v4 @@ -27,3 +30,5 @@ jobs: - name: Determine Semver id: semver uses: ./.github/actions/update-semver + - name: Clean Storage Accounts + uses: ./.github/actions/clean-storage-accounts From aaa5027002a491e2b9995662f5e79355827e18d2 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Thu, 14 Mar 2024 11:48:47 -0700 Subject: [PATCH 019/155] Wrong path --- .github/workflows/fhir-oss-ci-pipeline.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index 1e740fe657..874b502a4a 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -17,7 +17,7 @@ jobs: with: fetch-depth: 0 - name: Set Build Variables - uses: ./.github/actions/set-build-variables + uses: ./.github/actions/setup-build-variables - name: Set CI Test Variables uses: ./.github/actions/setup-ci-variables - name: Install Latest .Net SDK @@ -30,5 +30,5 @@ jobs: - name: Determine Semver id: semver uses: ./.github/actions/update-semver - - name: Clean Storage Accounts - uses: ./.github/actions/clean-storage-accounts + # - name: Clean Storage Accounts + # uses: ./.github/actions/clean-storage-accounts From 2ca4a4b8f207ed04fb7e17f695b068e6df0e48ac Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Thu, 14 Mar 2024 12:05:46 -0700 Subject: [PATCH 020/155] Added shell property --- .github/actions/setup-build-variables/action.yml | 2 ++ .github/actions/setup-ci-variables/action.yml | 1 + .github/workflows/fhir-oss-ci-pipeline.yml | 1 - 3 files changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/actions/setup-build-variables/action.yml b/.github/actions/setup-build-variables/action.yml index 69eee98af1..b78873f84b 100644 --- a/.github/actions/setup-build-variables/action.yml +++ b/.github/actions/setup-build-variables/action.yml @@ -6,6 +6,7 @@ runs: steps: - name: Set Build Variables id: defaultVariables + shell: bash run: | echo "buildConfiguration=Release" >> "$GITHUB_ENV" echo "defaultBuildFramework=net8.0" >> "$GITHUB_ENV" @@ -15,6 +16,7 @@ runs: echo "composeLocation=build/docker/docker-compose.yaml" >> "$GITHUB_ENV" - name: Set Build Urls using Deployment Environment + shell: bash run: | echo "azureContainerRegistry='$azureContainerRegistryName'.azurecr.io" >> "$GITHUB_ENV" echo "deploymentEnvironmentNameSql='$deploymentEnvironmentName-sql' >> "$GITHUB_ENV" diff --git a/.github/actions/setup-ci-variables/action.yml b/.github/actions/setup-ci-variables/action.yml index 03e75a1201..50a6936df8 100644 --- a/.github/actions/setup-ci-variables/action.yml +++ b/.github/actions/setup-ci-variables/action.yml @@ -6,6 +6,7 @@ runs: steps: - name: Set CI Build Variables id: defaultVariables + shell: bash run: | echo "resourceGroupRegion=southcentralus" >> "$GITHUB_ENV" echo "resourceGroupRoot=msh-fhir-ci4" >> "$GITHUB_ENV" diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index 874b502a4a..72843fcb40 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -1,7 +1,6 @@ # DESCRIPTION: # Builds, tests, and packages the solution for the main branch. -run-name: ${{github.ref_name}}-${{github.run_id}} on: pull_request From abd804390e84dbd68fc5f8ff4eb1e802070bed13 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Fri, 15 Mar 2024 09:49:53 -0700 Subject: [PATCH 021/155] Updated clean storage accounts action. Removed setup ci build variables and moved into fhir-oss-ci-pipeline. --- .github/actions/clean-storage-accounts.yml | 31 -------------- .../actions/clean-storage-accounts/action.yml | 29 +++++++++++++ .github/actions/setup-ci-variables/action.yml | 18 -------- .github/workflows/fhir-oss-ci-pipeline.yml | 42 +++++++++++++++---- 4 files changed, 63 insertions(+), 57 deletions(-) delete mode 100644 .github/actions/clean-storage-accounts.yml create mode 100644 .github/actions/clean-storage-accounts/action.yml delete mode 100644 .github/actions/setup-ci-variables/action.yml diff --git a/.github/actions/clean-storage-accounts.yml b/.github/actions/clean-storage-accounts.yml deleted file mode 100644 index 40c5260f04..0000000000 --- a/.github/actions/clean-storage-accounts.yml +++ /dev/null @@ -1,31 +0,0 @@ -parameters: -- name: environmentName - type: string - -jobs: -- job: "cleanStorageAccounts" - pool: - vmImage: $(WindowsVmImage) - steps: - - task: AzurePowerShell@4 - displayName: 'Clean Storage Accounts' - continueOnError: true - inputs: - azureSubscription: $(ConnectedServiceName) - azurePowerShellVersion: latestVersion - ScriptType: inlineScript - Inline: | - $currentUtcTime = [DateTime]::UtcNow - - $storageAccounts = Get-AzStorageAccount -ResourceGroupName ${{ parameters.environmentName }} - foreach ($storageAccount in $storageAccounts) { - - $storageContainers = Get-AzStorageContainer -Name * -Context $storageAccount.Context - foreach ($container in $storageContainers) { - $ageDiff = $currentUtcTime - $container.CloudBlobContainer.Properties.LastModified.UtcDateTime - if($ageDiff.TotalDays -ge 3) { - Write-Host "Deleting container $($container.Name)" - $container.CloudBlobContainer.Delete() - } - } - } \ No newline at end of file diff --git a/.github/actions/clean-storage-accounts/action.yml b/.github/actions/clean-storage-accounts/action.yml new file mode 100644 index 0000000000..8c4283c775 --- /dev/null +++ b/.github/actions/clean-storage-accounts/action.yml @@ -0,0 +1,29 @@ +name: clean storage Accounts +description: Removes blob containers from test storage accounts + +inputs: + environmentName: + description: Deployment environment name + required: true + +runs: + using: 'composite' + steps: + - name: Clean Storage Accounts + uses: azure/powershell@v1 + with: + inlineScript: | + $currentUtcTime = [DateTime]::UtcNow + + $storageAccounts = Get-AzStorageAccount -ResourceGroupName ${{ inputs.environmentName }} + foreach ($storageAccount in $storageAccounts) { + + $storageContainers = Get-AzStorageContainer -Name * -Context $storageAccount.Context + foreach ($container in $storageContainers) { + $ageDiff = $currentUtcTime - $container.CloudBlobContainer.Properties.LastModified.UtcDateTime + if($ageDiff.TotalDays -ge 3) { + Write-Host "Deleting container $($container.Name)" + $container.CloudBlobContainer.Delete() + } + } + } diff --git a/.github/actions/setup-ci-variables/action.yml b/.github/actions/setup-ci-variables/action.yml deleted file mode 100644 index 50a6936df8..0000000000 --- a/.github/actions/setup-ci-variables/action.yml +++ /dev/null @@ -1,18 +0,0 @@ -name: setup ci build variables -description: Sets ci build specific variables. - -runs: - using: composite - steps: - - name: Set CI Build Variables - id: defaultVariables - shell: bash - run: | - echo "resourceGroupRegion=southcentralus" >> "$GITHUB_ENV" - echo "resourceGroupRoot=msh-fhir-ci4" >> "$GITHUB_ENV" - echo "appServicePlanName=$resourceGroupRoot-linux" >> "$GITHUB_ENV" - echo "deploymentEnvironmentName=$resourceGroupRoot" >> "$GITHUB_ENV" - echo "resourceGroupName=$resourceGroupRoot" >> "$GITHUB_ENV" - echo "crucibleEnvironmentUrl=https://crucible.mshapis.com/" >> "$GITHUB_ENV" - echo "testEnvironmentName=OSS CI" >> "$GITHUB_ENV" - echo "imageTag=$build.BuildNumber" >> "$GITHUB_ENV" diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index 72843fcb40..741759e28d 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -7,18 +7,33 @@ on: permissions: id-token: write contents: read + +env: + buildConfiguration: Release + defaultBuildFramework: net8.0 + azureSubscriptionEndpoint: docker-build + azureContainerRegistryName: healthplatformregistry + connectedServiceName: Microsoft Health Open Source Subscription + composeLocation: build/docker/docker-compose.yaml + resourceGroupRegion: southcentralus + resourceGroupRoot: msh-fhir-ci4 + crucibleEnvironmentUrl: https://crucible.mshapis.com/ + testEnvironmentName: OSS CI + imageTag: ${{github.run_number}} + jobs: - checkout: - runs-on: ubuntu-latest + setup: + runs-on: [self-hosted, 1ES.Pool=GithubRunPool] + env: + deploymentEnvironmentName: $resourceGroupRoot + appServicePlanName: $resourceGroupRoot-linux + resourceGroupName: $resourceGroupRoot steps: - name: Checkout uses: actions/checkout@v4 with: fetch-depth: 0 - - name: Set Build Variables - uses: ./.github/actions/setup-build-variables - - name: Set CI Test Variables - uses: ./.github/actions/setup-ci-variables + - name: Install Latest .Net SDK uses: actions/setup-dotnet@v4 @@ -26,8 +41,19 @@ jobs: uses: actions/setup-dotnet@v4 with: dotnet-version: 6.0.x + - name: Determine Semver id: semver uses: ./.github/actions/update-semver - # - name: Clean Storage Accounts - # uses: ./.github/actions/clean-storage-accounts + + - name: Azure Login + uses: azure/login@v2 + with: + client-id: ${{secrets.AZURE_CLIENT_ID}} + subscription-id: ${{secrets.AZURE_SUBSCRIPTION_ID}} + tenant-id: ${{secrets.AZURE_TENANT_ID}} + + - name: Clean Storage Accounts + uses: ./.github/actions/clean-storage-accounts + with: + environmentName: $deploymentEnvironmentName From a21accd8b2403075b5f6d78aef9b7a176972ae00 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Fri, 15 Mar 2024 09:54:10 -0700 Subject: [PATCH 022/155] Changing pool --- .github/workflows/fhir-oss-ci-pipeline.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index 741759e28d..e1f32207df 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -23,7 +23,7 @@ env: jobs: setup: - runs-on: [self-hosted, 1ES.Pool=GithubRunPool] + runs-on: linux-latest env: deploymentEnvironmentName: $resourceGroupRoot appServicePlanName: $resourceGroupRoot-linux From bc0d82ba76d100fa0161251b49067d6420f5f764 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Fri, 15 Mar 2024 10:12:11 -0700 Subject: [PATCH 023/155] Wrong tag --- .github/workflows/fhir-oss-ci-pipeline.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index e1f32207df..e8d8ec22d1 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -23,7 +23,7 @@ env: jobs: setup: - runs-on: linux-latest + runs-on: ubuntu-latest env: deploymentEnvironmentName: $resourceGroupRoot appServicePlanName: $resourceGroupRoot-linux From 7eb344a832c900ebfe877d5e782015f678837e54 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Fri, 15 Mar 2024 10:15:27 -0700 Subject: [PATCH 024/155] Missing input added. --- .github/actions/clean-storage-accounts/action.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/actions/clean-storage-accounts/action.yml b/.github/actions/clean-storage-accounts/action.yml index 8c4283c775..9a818a08ad 100644 --- a/.github/actions/clean-storage-accounts/action.yml +++ b/.github/actions/clean-storage-accounts/action.yml @@ -12,6 +12,7 @@ runs: - name: Clean Storage Accounts uses: azure/powershell@v1 with: + azPSVersion: "latest" inlineScript: | $currentUtcTime = [DateTime]::UtcNow From d62ee55e66c2c9039fbd3ec53c1930486dab07b7 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Fri, 15 Mar 2024 10:20:47 -0700 Subject: [PATCH 025/155] Wrong property call --- .github/workflows/fhir-oss-ci-pipeline.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index e8d8ec22d1..254c5e7d91 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -56,4 +56,4 @@ jobs: - name: Clean Storage Accounts uses: ./.github/actions/clean-storage-accounts with: - environmentName: $deploymentEnvironmentName + environmentName: $env.deploymentEnvironmentName From 4271fbe02c9bc872e8d4cba44006bf595971a527 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Fri, 15 Mar 2024 10:40:00 -0700 Subject: [PATCH 026/155] Moved some variables to repository level due to limitations --- .github/workflows/fhir-oss-ci-pipeline.yml | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index 254c5e7d91..63efc4e23d 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -15,19 +15,15 @@ env: azureContainerRegistryName: healthplatformregistry connectedServiceName: Microsoft Health Open Source Subscription composeLocation: build/docker/docker-compose.yaml - resourceGroupRegion: southcentralus - resourceGroupRoot: msh-fhir-ci4 - crucibleEnvironmentUrl: https://crucible.mshapis.com/ - testEnvironmentName: OSS CI imageTag: ${{github.run_number}} jobs: setup: runs-on: ubuntu-latest env: - deploymentEnvironmentName: $resourceGroupRoot - appServicePlanName: $resourceGroupRoot-linux - resourceGroupName: $resourceGroupRoot + deploymentEnvironmentName: $vars.CIRESOURCEGROUPROOT + appServicePlanName: $vars.CIRESOURCEGROUPROOT-linux + resourceGroupName: $vars.CIRESOURCEGROUPROOT steps: - name: Checkout uses: actions/checkout@v4 @@ -56,4 +52,4 @@ jobs: - name: Clean Storage Accounts uses: ./.github/actions/clean-storage-accounts with: - environmentName: $env.deploymentEnvironmentName + environmentName: $vars.CIRESOURCEGROUPROOT From 1e4f71ebe96132910cbce290d9e594852b618f12 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Fri, 15 Mar 2024 12:56:42 -0700 Subject: [PATCH 027/155] property update --- .github/workflows/fhir-oss-ci-pipeline.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index 63efc4e23d..9084d23bc8 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -52,4 +52,4 @@ jobs: - name: Clean Storage Accounts uses: ./.github/actions/clean-storage-accounts with: - environmentName: $vars.CIRESOURCEGROUPROOT + environmentName: ${{vars.CIRESOURCEGROUPROOT}} From 5cc4e8d00dcfa3e3c3516b2f4eed98d3f1a4aef5 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Fri, 15 Mar 2024 13:34:09 -0700 Subject: [PATCH 028/155] Missing az context --- .github/actions/clean-storage-accounts/action.yml | 2 +- .github/workflows/fhir-oss-ci-pipeline.yml | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/actions/clean-storage-accounts/action.yml b/.github/actions/clean-storage-accounts/action.yml index 9a818a08ad..2b91090852 100644 --- a/.github/actions/clean-storage-accounts/action.yml +++ b/.github/actions/clean-storage-accounts/action.yml @@ -15,7 +15,7 @@ runs: azPSVersion: "latest" inlineScript: | $currentUtcTime = [DateTime]::UtcNow - + Get-AzContext $storageAccounts = Get-AzStorageAccount -ResourceGroupName ${{ inputs.environmentName }} foreach ($storageAccount in $storageAccounts) { diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index 9084d23bc8..1e2d89ea55 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -48,6 +48,7 @@ jobs: client-id: ${{secrets.AZURE_CLIENT_ID}} subscription-id: ${{secrets.AZURE_SUBSCRIPTION_ID}} tenant-id: ${{secrets.AZURE_TENANT_ID}} + enable-AzPSSession: true - name: Clean Storage Accounts uses: ./.github/actions/clean-storage-accounts From 4dd3509b22a33c1137c28e9f0f9acec82dbba4e1 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Fri, 15 Mar 2024 13:44:36 -0700 Subject: [PATCH 029/155] Created integration test db cleanup action --- .../action.yml | 25 +++++++++++++++++++ 1 file changed, 25 insertions(+) create mode 100644 .github/actions/cleanup-integration-test-databases/action.yml diff --git a/.github/actions/cleanup-integration-test-databases/action.yml b/.github/actions/cleanup-integration-test-databases/action.yml new file mode 100644 index 0000000000..993c861dfb --- /dev/null +++ b/.github/actions/cleanup-integration-test-databases/action.yml @@ -0,0 +1,25 @@ +name: cleanup integration test databases +description: Deletes databases used for integration tests from previous runs + +inputs: + environmentName: + description: Deployment environment name + required: true + +runs: + using: 'composite' + steps: + - name: Remove Integration Test Databases + uses: azure/powershell@v1 + with: + azPSVersion: "latest" + inlineScript: | + Get-AzContext + $testNamePatterns = @("SNAPSHOT*","FHIRCOMPATIBILITYTEST*","FHIRINTEGRATIONTEST*","FHIRRESOURCECHANGEDISABLEDTEST*","BASE*","SNAPSHOT*") + foreach ($pattern in $testNamePatterns) { + $resources = Get-AzResource -ResourceGroupName ${{ inputs.environmentName }} -ResourceType 'Microsoft.Sql/servers/databases' -Name $pattern + foreach ($resource in $resources) { + Write-Host "Cleaning up $($resource.ResourceName)" + Remove-AzResource -ResourceId $resource.ResourceId -Force + } + } From 77595da164477a8bbdecb7578e8774fbdd051471 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Fri, 15 Mar 2024 14:46:54 -0700 Subject: [PATCH 030/155] Adding in build job --- .github/actions/dotnet-build/action.yml | 28 ++++++++++++++++++++ .github/workflows/fhir-oss-ci-pipeline.yml | 30 ++++++++++++++++++++++ 2 files changed, 58 insertions(+) create mode 100644 .github/actions/dotnet-build/action.yml diff --git a/.github/actions/dotnet-build/action.yml b/.github/actions/dotnet-build/action.yml new file mode 100644 index 0000000000..a0b91b46c1 --- /dev/null +++ b/.github/actions/dotnet-build/action.yml @@ -0,0 +1,28 @@ +name: dotnet build +description: Builds the packages and ensures their quality by running tests. +inputs: + assemblyVersion: + description: The scaler assembly's version. + required: true + buildConfiguration: + default: Debug + description: The dotnet build configuration. + required: false + fileVersion: + description: The scaler assembly's file version. + required: true + informationalVersion: + description: The scaler assembly's informational version. + required: true + dotnetVersion: + description: The version of dotnet to use. + required: true + +runs: + using: composite + steps: + - name: Setup dotnet ${{ inputs.dotnetVersion }} + uses: actions/setup-dotnet@v3 + with: + dotnet-version: ${{ inputs.dotnetVersion}} + \ No newline at end of file diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index 1e2d89ea55..f7e77feb94 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -54,3 +54,33 @@ jobs: uses: ./.github/actions/clean-storage-accounts with: environmentName: ${{vars.CIRESOURCEGROUPROOT}} + buildonLinux: + runs-on: ubuntu-latest + needs: setup + strategy: + matrix: + dotnet-version: [ '8.0.x', '6.0.x' ] + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Setup dotnet ${{ matrix.dotnet-version }} + uses: actions/setup-dotnet@v3 + with: + dotnet-version: ${{ matrix.dotnet-version }} + + - name: Build + uses: ./.github/actions/dotnet-build + with: + assemblyVersion: ${{steps.semver.outputs.assemblyVersion}} + buildConfiguration: ${{env.buildConfiguration}} + fileVersion: ${{steps.semver.outputs.fileVersion}} + informationalVersion: ${{steps.semver.outputs.informationalVersion}} + dotnetVersion: ${{matrix.dotnet-version}} + # - name: Test + # run: dotnet test --configuration ${{env.buildConfiguration}} + + # - name: Publish + # run: dotnet publish --configuration ${{env.buildConfiguration}} --output ./publish From 61b45ffd1df91179114e3854d470e985120b7cac Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Wed, 20 Mar 2024 09:16:48 -0700 Subject: [PATCH 031/155] Added build action. --- .github/actions/dotnet-build/action.yml | 28 ++++++++++++++++++++-- .github/workflows/fhir-oss-ci-pipeline.yml | 8 +------ 2 files changed, 27 insertions(+), 9 deletions(-) diff --git a/.github/actions/dotnet-build/action.yml b/.github/actions/dotnet-build/action.yml index a0b91b46c1..c5e3a71b7c 100644 --- a/.github/actions/dotnet-build/action.yml +++ b/.github/actions/dotnet-build/action.yml @@ -22,7 +22,31 @@ runs: using: composite steps: - name: Setup dotnet ${{ inputs.dotnetVersion }} - uses: actions/setup-dotnet@v3 + uses: actions/setup-dotnet@v4 with: dotnet-version: ${{ inputs.dotnetVersion}} - \ No newline at end of file + + - name: Restore Dependencies + shell: bash + run: dotnet restore + + - name: Build + shell: bash + run: dotnet build --configuration $(inputs.buildConfiguration) -p:ContinuousIntegrationBuild=true -p:AssemblyVersion="$(inputs.assemblyVersion)" -p:FileVersion="$(inputs.fileVersion)" -p:InformationalVersion="$(inputs.informationalVersion)" -p:Version="$(majorMinorPatch)" -warnaserror -f ${{inputs.dotnetVersion}} + + - name: Test + shell: bash + run: dotnet test --no-restore --no-build --verbosity normal -f ${{ inputs.dotnetVersion}} --collect:"XPlat Code Coverage" --results-directory ./coverage + - name: Code Coverage Report + uses: irongut/CodeCoverageSummary@v1.3.0 + with: + filename: coverage/**/coverage.cobertura.xml + badge: true + fail_below_min: true + format: markdown + hide_branch_rate: false + hide_complexity: true + indicators: true + output: both + thresholds: '60 80' +# Code coverage? How often to trigger diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index f7e77feb94..64f609161a 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -10,7 +10,6 @@ permissions: env: buildConfiguration: Release - defaultBuildFramework: net8.0 azureSubscriptionEndpoint: docker-build azureContainerRegistryName: healthplatformregistry connectedServiceName: Microsoft Health Open Source Subscription @@ -33,11 +32,6 @@ jobs: - name: Install Latest .Net SDK uses: actions/setup-dotnet@v4 - - name: Install Older .Net SDK - uses: actions/setup-dotnet@v4 - with: - dotnet-version: 6.0.x - - name: Determine Semver id: semver uses: ./.github/actions/update-semver @@ -67,7 +61,7 @@ jobs: fetch-depth: 0 - name: Setup dotnet ${{ matrix.dotnet-version }} - uses: actions/setup-dotnet@v3 + uses: actions/setup-dotnet@v4 with: dotnet-version: ${{ matrix.dotnet-version }} From 940761bc8a1069e05a09a4f1fb94a74ab929679d Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Wed, 20 Mar 2024 10:55:18 -0700 Subject: [PATCH 032/155] Updated variables references. --- .github/actions/dotnet-build/action.yml | 7 +++++-- .github/actions/update-semver/action.yml | 3 +++ .github/workflows/fhir-oss-ci-pipeline.yml | 6 +----- 3 files changed, 9 insertions(+), 7 deletions(-) diff --git a/.github/actions/dotnet-build/action.yml b/.github/actions/dotnet-build/action.yml index c5e3a71b7c..0fa21b1ed8 100644 --- a/.github/actions/dotnet-build/action.yml +++ b/.github/actions/dotnet-build/action.yml @@ -17,6 +17,9 @@ inputs: dotnetVersion: description: The version of dotnet to use. required: true + majorMinorPatch: + description: The major.minor.patch version to use. + required: true runs: using: composite @@ -32,11 +35,11 @@ runs: - name: Build shell: bash - run: dotnet build --configuration $(inputs.buildConfiguration) -p:ContinuousIntegrationBuild=true -p:AssemblyVersion="$(inputs.assemblyVersion)" -p:FileVersion="$(inputs.fileVersion)" -p:InformationalVersion="$(inputs.informationalVersion)" -p:Version="$(majorMinorPatch)" -warnaserror -f ${{inputs.dotnetVersion}} + run: dotnet build --configuration ${{inputs.buildConfiguration}} -p:ContinuousIntegrationBuild=true -p:AssemblyVersion="${{inputs.assemblyVersion}}" -p:FileVersion="${{inputs.fileVersion}}" -p:InformationalVersion="${{inputs.informationalVersion}}" -p:Version="${{inputs.majorMinorPatch}}" -warnaserror -f ${{inputs.dotnetVersion}} - name: Test shell: bash - run: dotnet test --no-restore --no-build --verbosity normal -f ${{ inputs.dotnetVersion}} --collect:"XPlat Code Coverage" --results-directory ./coverage + run: dotnet test --no-restore --no-build --verbosity normal -f ${{inputs.dotnetVersion}} --collect:"XPlat Code Coverage" --results-directory ./coverage - name: Code Coverage Report uses: irongut/CodeCoverageSummary@v1.3.0 with: diff --git a/.github/actions/update-semver/action.yml b/.github/actions/update-semver/action.yml index 9401cb0dfc..6a7c4b1a18 100644 --- a/.github/actions/update-semver/action.yml +++ b/.github/actions/update-semver/action.yml @@ -18,6 +18,9 @@ outputs: nugetVersion: description: The NuGet package version for the shared components value: ${{ steps.version.outputs.GitVersion_SemVer }} + majorMinorPatch: + description: The major.minor.patch version for the shared components + value: ${{ steps.version.outputs.GitVersion_MajorMinorPatch }} runs: using: 'composite' steps: diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index 64f609161a..26cadcde38 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -73,8 +73,4 @@ jobs: fileVersion: ${{steps.semver.outputs.fileVersion}} informationalVersion: ${{steps.semver.outputs.informationalVersion}} dotnetVersion: ${{matrix.dotnet-version}} - # - name: Test - # run: dotnet test --configuration ${{env.buildConfiguration}} - - # - name: Publish - # run: dotnet publish --configuration ${{env.buildConfiguration}} --output ./publish + majorMinorPatch: ${{steps.semver.outputs.majorMinorPatch}} From 3ef15c87ed3fff6d48b157c8ce59213f6430718a Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Wed, 20 Mar 2024 15:00:37 -0700 Subject: [PATCH 033/155] Working on multiple build targets --- .github/actions/dotnet-build/action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/actions/dotnet-build/action.yml b/.github/actions/dotnet-build/action.yml index 0fa21b1ed8..ddd429a106 100644 --- a/.github/actions/dotnet-build/action.yml +++ b/.github/actions/dotnet-build/action.yml @@ -35,7 +35,7 @@ runs: - name: Build shell: bash - run: dotnet build --configuration ${{inputs.buildConfiguration}} -p:ContinuousIntegrationBuild=true -p:AssemblyVersion="${{inputs.assemblyVersion}}" -p:FileVersion="${{inputs.fileVersion}}" -p:InformationalVersion="${{inputs.informationalVersion}}" -p:Version="${{inputs.majorMinorPatch}}" -warnaserror -f ${{inputs.dotnetVersion}} + run: dotnet build --configuration ${{inputs.buildConfiguration}} -p:ContinuousIntegrationBuild=true -p:AssemblyVersion="${{inputs.assemblyVersion}}" -p:FileVersion="${{inputs.fileVersion}}" -p:InformationalVersion="${{inputs.informationalVersion}}" -p:Version="${{inputs.majorMinorPatch}}" -warnaserror - name: Test shell: bash From 56962784f9375c00af09935e2b9842205f79df0f Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Wed, 20 Mar 2024 15:00:51 -0700 Subject: [PATCH 034/155] Modified build targets --- .github/workflows/fhir-oss-ci-pipeline.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index 26cadcde38..02378797c0 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -54,6 +54,7 @@ jobs: strategy: matrix: dotnet-version: [ '8.0.x', '6.0.x' ] + targetFrameworks: [ 'net8', 'net6'] steps: - name: Checkout uses: actions/checkout@v4 From 6502ed880ca289569be8144a65636257f2baa166 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Wed, 20 Mar 2024 17:39:06 -0700 Subject: [PATCH 035/155] Had wrong output properties for semver --- .github/actions/update-semver/action.yml | 20 ++++++++++---------- .github/workflows/fhir-oss-ci-pipeline.yml | 1 - 2 files changed, 10 insertions(+), 11 deletions(-) diff --git a/.github/actions/update-semver/action.yml b/.github/actions/update-semver/action.yml index 6a7c4b1a18..b793cb74e8 100644 --- a/.github/actions/update-semver/action.yml +++ b/.github/actions/update-semver/action.yml @@ -7,20 +7,20 @@ inputs: default: './GitVersion.yml' outputs: assemblyVersion: - description: The assembly version for the shared components - value: ${{ steps.version.outputs.GitVersion_AssemblySemVer }} + description: The assembly version for the build + value: ${{ steps.version.outputs.assemblySemVer }} fileVersion: - description: The assembly file version for the shared components - value: ${{ steps.version.outputs.GitVersion_AssemblySemFileVer }} + description: The assembly file version for the build + value: ${{ steps.version.outputs.assemblySemFileVer }} informationalVersion: - description: The assembly informational version for the shared components - value: ${{ steps.version.outputs.GitVersion_InformationalVersion }} + description: The assembly informational version for the build + value: ${{ steps.version.outputs.informationalVersion }} nugetVersion: - description: The NuGet package version for the shared components - value: ${{ steps.version.outputs.GitVersion_SemVer }} + description: The NuGet package version for the build + value: ${{ steps.version.outputs.semVer }} majorMinorPatch: - description: The major.minor.patch version for the shared components - value: ${{ steps.version.outputs.GitVersion_MajorMinorPatch }} + description: The major.minor.patch version for the build + value: ${{ steps.version.outputs.majorMinorPatch }} runs: using: 'composite' steps: diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index 02378797c0..26cadcde38 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -54,7 +54,6 @@ jobs: strategy: matrix: dotnet-version: [ '8.0.x', '6.0.x' ] - targetFrameworks: [ 'net8', 'net6'] steps: - name: Checkout uses: actions/checkout@v4 From 274c76b2251dbd6ef79ad0ec8f4941d4ac3c75cf Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Thu, 21 Mar 2024 09:27:36 -0700 Subject: [PATCH 036/155] Fixing output mapping --- .github/workflows/fhir-oss-ci-pipeline.yml | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index 26cadcde38..110fa04937 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -33,7 +33,6 @@ jobs: uses: actions/setup-dotnet@v4 - name: Determine Semver - id: semver uses: ./.github/actions/update-semver - name: Azure Login @@ -68,9 +67,9 @@ jobs: - name: Build uses: ./.github/actions/dotnet-build with: - assemblyVersion: ${{steps.semver.outputs.assemblyVersion}} + assemblyVersion: ${{needs.setup.outputs.assemblyVersion}} buildConfiguration: ${{env.buildConfiguration}} - fileVersion: ${{steps.semver.outputs.fileVersion}} - informationalVersion: ${{steps.semver.outputs.informationalVersion}} + fileVersion: ${{needs.setup.outputs.fileVersion}} + informationalVersion: ${{needs.setup.outputs.informationalVersion}} dotnetVersion: ${{matrix.dotnet-version}} - majorMinorPatch: ${{steps.semver.outputs.majorMinorPatch}} + majorMinorPatch: ${{needs.setup.outputs.majorMinorPatch}} From 1f4ccc5dd89f672e89c0689e42c354d9836bfc56 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Thu, 21 Mar 2024 09:43:53 -0700 Subject: [PATCH 037/155] Updating output of semver --- .github/actions/update-semver/action.yml | 10 +++++----- .github/workflows/fhir-oss-ci-pipeline.yml | 9 +++++---- 2 files changed, 10 insertions(+), 9 deletions(-) diff --git a/.github/actions/update-semver/action.yml b/.github/actions/update-semver/action.yml index b793cb74e8..ef491516c1 100644 --- a/.github/actions/update-semver/action.yml +++ b/.github/actions/update-semver/action.yml @@ -8,19 +8,19 @@ inputs: outputs: assemblyVersion: description: The assembly version for the build - value: ${{ steps.version.outputs.assemblySemVer }} + value: ${{ steps.version.outputs.GitVersion_AssemblySemVer }} fileVersion: description: The assembly file version for the build - value: ${{ steps.version.outputs.assemblySemFileVer }} + value: ${{ steps.version.outputs.GitVersion_AssemblySemFileVer }} informationalVersion: description: The assembly informational version for the build - value: ${{ steps.version.outputs.informationalVersion }} + value: ${{ steps.version.outputs.GitVersion_InformationalVersion }} nugetVersion: description: The NuGet package version for the build - value: ${{ steps.version.outputs.semVer }} + value: ${{ steps.version.outputs.GitVersion_SemVer }} majorMinorPatch: description: The major.minor.patch version for the build - value: ${{ steps.version.outputs.majorMinorPatch }} + value: ${{ steps.version.outputs.GitVersion_MajorMinorPatch }} runs: using: 'composite' steps: diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index 110fa04937..eb96c0afd1 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -33,6 +33,7 @@ jobs: uses: actions/setup-dotnet@v4 - name: Determine Semver + id: version uses: ./.github/actions/update-semver - name: Azure Login @@ -67,9 +68,9 @@ jobs: - name: Build uses: ./.github/actions/dotnet-build with: - assemblyVersion: ${{needs.setup.outputs.assemblyVersion}} + assemblyVersion: ${{needs.setup.version.outputs.assemblyVersion}} buildConfiguration: ${{env.buildConfiguration}} - fileVersion: ${{needs.setup.outputs.fileVersion}} - informationalVersion: ${{needs.setup.outputs.informationalVersion}} + fileVersion: ${{needs.setup.version.outputs.fileVersion}} + informationalVersion: ${{needs.setup.version.outputs.informationalVersion}} dotnetVersion: ${{matrix.dotnet-version}} - majorMinorPatch: ${{needs.setup.outputs.majorMinorPatch}} + majorMinorPatch: ${{needs.setup.version.outputs.majorMinorPatch}} From 5c1a84b1721f3a83663f46397a4525915c3312eb Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Thu, 21 Mar 2024 09:57:39 -0700 Subject: [PATCH 038/155] Further output param adjustment --- .github/workflows/fhir-oss-ci-pipeline.yml | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index eb96c0afd1..e413d08789 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -23,6 +23,11 @@ jobs: deploymentEnvironmentName: $vars.CIRESOURCEGROUPROOT appServicePlanName: $vars.CIRESOURCEGROUPROOT-linux resourceGroupName: $vars.CIRESOURCEGROUPROOT + outputs: + assemblyVersion: ${{ steps.version.outputs.assemblyVersion }} + fileVersion: ${{ steps.version.outputs.fileVersion }} + informationalVersion: ${{ steps.version.outputs.informationalVersion }} + majorMinorPatch: ${{ steps.version.outputs.majorMinorPatch }} steps: - name: Checkout uses: actions/checkout@v4 @@ -68,9 +73,9 @@ jobs: - name: Build uses: ./.github/actions/dotnet-build with: - assemblyVersion: ${{needs.setup.version.outputs.assemblyVersion}} + assemblyVersion: ${{needs.setup.outputs.assemblyVersion}} buildConfiguration: ${{env.buildConfiguration}} - fileVersion: ${{needs.setup.version.outputs.fileVersion}} - informationalVersion: ${{needs.setup.version.outputs.informationalVersion}} + fileVersion: ${{needs.setup.outputs.fileVersion}} + informationalVersion: ${{needs.setup.outputs.informationalVersion}} dotnetVersion: ${{matrix.dotnet-version}} - majorMinorPatch: ${{needs.setup.version.outputs.majorMinorPatch}} + majorMinorPatch: ${{needs.setup.outputs.majorMinorPatch}} From 4d09faad63c5459548a541ff4d2e115038a198a0 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Thu, 21 Mar 2024 10:49:13 -0700 Subject: [PATCH 039/155] Specifying unittest project to run --- .github/actions/dotnet-build/action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/actions/dotnet-build/action.yml b/.github/actions/dotnet-build/action.yml index ddd429a106..c44bb385b8 100644 --- a/.github/actions/dotnet-build/action.yml +++ b/.github/actions/dotnet-build/action.yml @@ -39,7 +39,7 @@ runs: - name: Test shell: bash - run: dotnet test --no-restore --no-build --verbosity normal -f ${{inputs.dotnetVersion}} --collect:"XPlat Code Coverage" --results-directory ./coverage + run: dotnet test '**/*UnitTests/*.csproj' --no-restore --no-build --verbosity normal -f ${{inputs.dotnetVersion}} --collect:"XPlat Code Coverage" --results-directory ./coverage - name: Code Coverage Report uses: irongut/CodeCoverageSummary@v1.3.0 with: From 8ec2d8a12c04ddfe186a5e90c787e30303957a63 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Thu, 21 Mar 2024 11:04:50 -0700 Subject: [PATCH 040/155] Fixing test call --- .github/actions/dotnet-build/action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/actions/dotnet-build/action.yml b/.github/actions/dotnet-build/action.yml index c44bb385b8..15be811548 100644 --- a/.github/actions/dotnet-build/action.yml +++ b/.github/actions/dotnet-build/action.yml @@ -39,7 +39,7 @@ runs: - name: Test shell: bash - run: dotnet test '**/*UnitTests/*.csproj' --no-restore --no-build --verbosity normal -f ${{inputs.dotnetVersion}} --collect:"XPlat Code Coverage" --results-directory ./coverage + run: dotnet test **/*UnitTests/*.csproj --no-restore --no-build --verbosity normal --collect:"XPlat Code Coverage" --results-directory ./coverage - name: Code Coverage Report uses: irongut/CodeCoverageSummary@v1.3.0 with: From 08314cab1c736ac5418f6af57c99c5c650a8cac9 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Thu, 21 Mar 2024 11:44:17 -0700 Subject: [PATCH 041/155] Changed property for unit test run --- .github/actions/dotnet-build/action.yml | 23 ++++++++++++++++++---- .github/workflows/fhir-oss-ci-pipeline.yml | 16 ++++++++++++++- 2 files changed, 34 insertions(+), 5 deletions(-) diff --git a/.github/actions/dotnet-build/action.yml b/.github/actions/dotnet-build/action.yml index 15be811548..0e5785cc8a 100644 --- a/.github/actions/dotnet-build/action.yml +++ b/.github/actions/dotnet-build/action.yml @@ -35,11 +35,11 @@ runs: - name: Build shell: bash - run: dotnet build --configuration ${{inputs.buildConfiguration}} -p:ContinuousIntegrationBuild=true -p:AssemblyVersion="${{inputs.assemblyVersion}}" -p:FileVersion="${{inputs.fileVersion}}" -p:InformationalVersion="${{inputs.informationalVersion}}" -p:Version="${{inputs.majorMinorPatch}}" -warnaserror + run: dotnet build Microsoft.Health.Fhir.sln --output output--configuration ${{inputs.buildConfiguration}} -p:ContinuousIntegrationBuild=true -p:AssemblyVersion="${{inputs.assemblyVersion}}" -p:FileVersion="${{inputs.fileVersion}}" -p:InformationalVersion="${{inputs.informationalVersion}}" -p:Version="${{inputs.majorMinorPatch}}" -warnaserror - name: Test shell: bash - run: dotnet test **/*UnitTests/*.csproj --no-restore --no-build --verbosity normal --collect:"XPlat Code Coverage" --results-directory ./coverage + run: dotnet test **/*UnitTests/*.csproj --no-restore --no-build --verbosity normal --collect:"XPlat Code Coverage" -s "CodeCoverage.runsettings" -v normal --results-directory ./coverage - name: Code Coverage Report uses: irongut/CodeCoverageSummary@v1.3.0 with: @@ -51,5 +51,20 @@ runs: hide_complexity: true indicators: true output: both - thresholds: '60 80' -# Code coverage? How often to trigger + thresholds: '60 80' + # - run: mkdir -p coverage + # shell: bash + # - run: mkdir -p artifacts + # shell: bash + # - name: actions/upload-artifact + # uses: actions/upload-artifact@v4 + # with: + # name: fhirBuild-${{inputs.dotnetVersion}} + # path: artifacts + # retention-days: 1 + # - name: Upload Code Coverage Results + # uses: actions/upload-artifact@v4 + # with: + # name: fhirBuild-${{inputs.dotnetVersion}} + # path: artifacts + # retention-days: 1 diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index e413d08789..953e6de5a5 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -8,6 +8,11 @@ permissions: id-token: write contents: read +defaults: + run: + working-directory: src + shell: bash + env: buildConfiguration: Release azureSubscriptionEndpoint: docker-build @@ -53,7 +58,7 @@ jobs: uses: ./.github/actions/clean-storage-accounts with: environmentName: ${{vars.CIRESOURCEGROUPROOT}} - buildonLinux: + buildAndUnitTest: runs-on: ubuntu-latest needs: setup strategy: @@ -79,3 +84,12 @@ jobs: informationalVersion: ${{needs.setup.outputs.informationalVersion}} dotnetVersion: ${{matrix.dotnet-version}} majorMinorPatch: ${{needs.setup.outputs.majorMinorPatch}} + runIntegrationTests: + runs-on: ubuntu-latest + needs : buildAndUnitTest + steps: + - name: Download Build Artifact for Testing + uses: actions/download-artifact@v4 + with: + name: fhirBuild-8.0.x + path: artifacts From 7065e5ffa914b6f8b8c7d05c550f524ca30c6842 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Thu, 21 Mar 2024 11:55:48 -0700 Subject: [PATCH 042/155] REmoving solution from build command --- .github/actions/dotnet-build/action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/actions/dotnet-build/action.yml b/.github/actions/dotnet-build/action.yml index 0e5785cc8a..8b1c16f9d0 100644 --- a/.github/actions/dotnet-build/action.yml +++ b/.github/actions/dotnet-build/action.yml @@ -35,7 +35,7 @@ runs: - name: Build shell: bash - run: dotnet build Microsoft.Health.Fhir.sln --output output--configuration ${{inputs.buildConfiguration}} -p:ContinuousIntegrationBuild=true -p:AssemblyVersion="${{inputs.assemblyVersion}}" -p:FileVersion="${{inputs.fileVersion}}" -p:InformationalVersion="${{inputs.informationalVersion}}" -p:Version="${{inputs.majorMinorPatch}}" -warnaserror + run: dotnet build --output output --configuration ${{inputs.buildConfiguration}} -p:ContinuousIntegrationBuild=true -p:AssemblyVersion="${{inputs.assemblyVersion}}" -p:FileVersion="${{inputs.fileVersion}}" -p:InformationalVersion="${{inputs.informationalVersion}}" -p:Version="${{inputs.majorMinorPatch}}" -warnaserror - name: Test shell: bash From 49e3fe7579842190138dd36523cde262a6c233c7 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Thu, 21 Mar 2024 12:12:59 -0700 Subject: [PATCH 043/155] Removed output directory for build since it is a solution this won't work. --- .github/actions/dotnet-build/action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/actions/dotnet-build/action.yml b/.github/actions/dotnet-build/action.yml index 8b1c16f9d0..e200d6eda9 100644 --- a/.github/actions/dotnet-build/action.yml +++ b/.github/actions/dotnet-build/action.yml @@ -35,7 +35,7 @@ runs: - name: Build shell: bash - run: dotnet build --output output --configuration ${{inputs.buildConfiguration}} -p:ContinuousIntegrationBuild=true -p:AssemblyVersion="${{inputs.assemblyVersion}}" -p:FileVersion="${{inputs.fileVersion}}" -p:InformationalVersion="${{inputs.informationalVersion}}" -p:Version="${{inputs.majorMinorPatch}}" -warnaserror + run: dotnet build --configuration ${{inputs.buildConfiguration}} -p:ContinuousIntegrationBuild=true -p:AssemblyVersion="${{inputs.assemblyVersion}}" -p:FileVersion="${{inputs.fileVersion}}" -p:InformationalVersion="${{inputs.informationalVersion}}" -p:Version="${{inputs.majorMinorPatch}}" -warnaserror - name: Test shell: bash From f255ccfc689e44dedff82832c395b8da7c39779b Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Thu, 21 Mar 2024 12:49:30 -0700 Subject: [PATCH 044/155] Ironing out test properties --- .github/actions/dotnet-build/action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/actions/dotnet-build/action.yml b/.github/actions/dotnet-build/action.yml index e200d6eda9..3efb2b8dc3 100644 --- a/.github/actions/dotnet-build/action.yml +++ b/.github/actions/dotnet-build/action.yml @@ -39,7 +39,7 @@ runs: - name: Test shell: bash - run: dotnet test **/*UnitTests/*.csproj --no-restore --no-build --verbosity normal --collect:"XPlat Code Coverage" -s "CodeCoverage.runsettings" -v normal --results-directory ./coverage + run: dotnet test **/*UnitTests/*.csproj --configuration ${{inputs.buildConfiguration}} --no-build --verbosity normal --collect:"XPlat Code Coverage" -s "CodeCoverage.runsettings" -v normal --results-directory ./coverage - name: Code Coverage Report uses: irongut/CodeCoverageSummary@v1.3.0 with: From 5cec7ac364607c0d46e854fd9fb8316c09554d52 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Thu, 21 Mar 2024 13:07:35 -0700 Subject: [PATCH 045/155] Removed duplicated property --- .github/actions/dotnet-build/action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/actions/dotnet-build/action.yml b/.github/actions/dotnet-build/action.yml index 3efb2b8dc3..70b6b3e648 100644 --- a/.github/actions/dotnet-build/action.yml +++ b/.github/actions/dotnet-build/action.yml @@ -39,7 +39,7 @@ runs: - name: Test shell: bash - run: dotnet test **/*UnitTests/*.csproj --configuration ${{inputs.buildConfiguration}} --no-build --verbosity normal --collect:"XPlat Code Coverage" -s "CodeCoverage.runsettings" -v normal --results-directory ./coverage + run: dotnet test **/*UnitTests/*.csproj --configuration ${{inputs.buildConfiguration}} --no-build --verbosity normal --collect:"XPlat Code Coverage" -s "CodeCoverage.runsettings" --results-directory ./coverage - name: Code Coverage Report uses: irongut/CodeCoverageSummary@v1.3.0 with: From b5ad1fa54a4fc641beac390ec7faaaeac1fd0d90 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Thu, 21 Mar 2024 13:28:15 -0700 Subject: [PATCH 046/155] Removed code coverage as it does not appear to be working --- .github/actions/dotnet-build/action.yml | 26 ++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/.github/actions/dotnet-build/action.yml b/.github/actions/dotnet-build/action.yml index 70b6b3e648..02f5500d7a 100644 --- a/.github/actions/dotnet-build/action.yml +++ b/.github/actions/dotnet-build/action.yml @@ -39,19 +39,19 @@ runs: - name: Test shell: bash - run: dotnet test **/*UnitTests/*.csproj --configuration ${{inputs.buildConfiguration}} --no-build --verbosity normal --collect:"XPlat Code Coverage" -s "CodeCoverage.runsettings" --results-directory ./coverage - - name: Code Coverage Report - uses: irongut/CodeCoverageSummary@v1.3.0 - with: - filename: coverage/**/coverage.cobertura.xml - badge: true - fail_below_min: true - format: markdown - hide_branch_rate: false - hide_complexity: true - indicators: true - output: both - thresholds: '60 80' + run: dotnet test **/*UnitTests/*.csproj --configuration ${{inputs.buildConfiguration}} --no-build --verbosity normal + # - name: Code Coverage Report + # uses: irongut/CodeCoverageSummary@v1.3.0 + # with: + # filename: coverage/**/coverage.cobertura.xml + # badge: true + # fail_below_min: true + # format: markdown + # hide_branch_rate: false + # hide_complexity: true + # indicators: true + # output: both + # thresholds: '60 80' # - run: mkdir -p coverage # shell: bash # - run: mkdir -p artifacts From 79f49b85341a0a6accd0ef77b36c6b929c702efe Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Thu, 21 Mar 2024 13:55:30 -0700 Subject: [PATCH 047/155] Project path update --- .github/actions/dotnet-build/action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/actions/dotnet-build/action.yml b/.github/actions/dotnet-build/action.yml index 02f5500d7a..5bd37b4af5 100644 --- a/.github/actions/dotnet-build/action.yml +++ b/.github/actions/dotnet-build/action.yml @@ -39,7 +39,7 @@ runs: - name: Test shell: bash - run: dotnet test **/*UnitTests/*.csproj --configuration ${{inputs.buildConfiguration}} --no-build --verbosity normal + run: dotnet test "**\*UnitTests\*.csproj" --configuration ${{inputs.buildConfiguration}} --no-build --verbosity normal # - name: Code Coverage Report # uses: irongut/CodeCoverageSummary@v1.3.0 # with: From 76c58088e779ed5cabf11b86495412e175e11008 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Thu, 21 Mar 2024 14:10:30 -0700 Subject: [PATCH 048/155] Pathing update --- .github/actions/dotnet-build/action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/actions/dotnet-build/action.yml b/.github/actions/dotnet-build/action.yml index 5bd37b4af5..802c43695b 100644 --- a/.github/actions/dotnet-build/action.yml +++ b/.github/actions/dotnet-build/action.yml @@ -39,7 +39,7 @@ runs: - name: Test shell: bash - run: dotnet test "**\*UnitTests\*.csproj" --configuration ${{inputs.buildConfiguration}} --no-build --verbosity normal + run: dotnet test "./**/*UnitTests/*.csproj" --configuration ${{inputs.buildConfiguration}} --no-build --verbosity normal # - name: Code Coverage Report # uses: irongut/CodeCoverageSummary@v1.3.0 # with: From e77eec8567f50ad4aafbb1f84230d5087213b521 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Thu, 21 Mar 2024 15:03:45 -0700 Subject: [PATCH 049/155] Updating path of project --- .github/actions/dotnet-build/action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/actions/dotnet-build/action.yml b/.github/actions/dotnet-build/action.yml index 802c43695b..bd5e13ce70 100644 --- a/.github/actions/dotnet-build/action.yml +++ b/.github/actions/dotnet-build/action.yml @@ -39,7 +39,7 @@ runs: - name: Test shell: bash - run: dotnet test "./**/*UnitTests/*.csproj" --configuration ${{inputs.buildConfiguration}} --no-build --verbosity normal + run: dotnet test "./Test/*UnitTests/*.csproj" --configuration ${{inputs.buildConfiguration}} --no-build --verbosity normal # - name: Code Coverage Report # uses: irongut/CodeCoverageSummary@v1.3.0 # with: From a860ed6c66d47be4c06608c8b6957173db86aa77 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Fri, 22 Mar 2024 11:32:33 -0700 Subject: [PATCH 050/155] Update unit test path --- .github/actions/dotnet-build/action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/actions/dotnet-build/action.yml b/.github/actions/dotnet-build/action.yml index bd5e13ce70..112e146125 100644 --- a/.github/actions/dotnet-build/action.yml +++ b/.github/actions/dotnet-build/action.yml @@ -39,7 +39,7 @@ runs: - name: Test shell: bash - run: dotnet test "./Test/*UnitTests/*.csproj" --configuration ${{inputs.buildConfiguration}} --no-build --verbosity normal + run: dotnet test "${{github.workspace}}/Test/*UnitTests/*.csproj" --configuration ${{inputs.buildConfiguration}} --no-build --verbosity normal # - name: Code Coverage Report # uses: irongut/CodeCoverageSummary@v1.3.0 # with: From 80d8b6b83fe781aefd70c7a8178e14b461cc5bc0 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Fri, 22 Mar 2024 11:54:57 -0700 Subject: [PATCH 051/155] Updating to point to sln as the wild cards are not supported --- .github/actions/dotnet-build/action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/actions/dotnet-build/action.yml b/.github/actions/dotnet-build/action.yml index 112e146125..22c55bdbb7 100644 --- a/.github/actions/dotnet-build/action.yml +++ b/.github/actions/dotnet-build/action.yml @@ -39,7 +39,7 @@ runs: - name: Test shell: bash - run: dotnet test "${{github.workspace}}/Test/*UnitTests/*.csproj" --configuration ${{inputs.buildConfiguration}} --no-build --verbosity normal + run: dotnet test "Microsoft.Health.Fhir.sln" --configuration ${{inputs.buildConfiguration}} --no-build --verbosity normal # - name: Code Coverage Report # uses: irongut/CodeCoverageSummary@v1.3.0 # with: From b49225a3c47c446b9992e308a7a19268da7064d7 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Fri, 22 Mar 2024 12:44:31 -0700 Subject: [PATCH 052/155] Updating test settings path --- .github/actions/dotnet-build/action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/actions/dotnet-build/action.yml b/.github/actions/dotnet-build/action.yml index 22c55bdbb7..76fd64d73f 100644 --- a/.github/actions/dotnet-build/action.yml +++ b/.github/actions/dotnet-build/action.yml @@ -39,7 +39,7 @@ runs: - name: Test shell: bash - run: dotnet test "Microsoft.Health.Fhir.sln" --configuration ${{inputs.buildConfiguration}} --no-build --verbosity normal + run: dotnet test "Microsoft.Health.Fhir.sln" --settings test/Configuration/testconfiguration.json --configuration ${{inputs.buildConfiguration}} --no-build --verbosity normal # - name: Code Coverage Report # uses: irongut/CodeCoverageSummary@v1.3.0 # with: From 006bf9f8fa842ad747866edcc205f0622f36b4f8 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Fri, 22 Mar 2024 16:07:03 -0700 Subject: [PATCH 053/155] Added test filter to only run unit tests --- .github/actions/dotnet-build/action.yml | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/.github/actions/dotnet-build/action.yml b/.github/actions/dotnet-build/action.yml index 76fd64d73f..16c9e133f7 100644 --- a/.github/actions/dotnet-build/action.yml +++ b/.github/actions/dotnet-build/action.yml @@ -39,7 +39,15 @@ runs: - name: Test shell: bash - run: dotnet test "Microsoft.Health.Fhir.sln" --settings test/Configuration/testconfiguration.json --configuration ${{inputs.buildConfiguration}} --no-build --verbosity normal + run: dotnet test "Microsoft.Health.Fhir.sln" --filter "FullyQualifiedName\~UnitTests" --configuration ${{inputs.buildConfiguration}} --no-build --verbosity normal + + # - name: Publish Artifacts + # shell: bash + # run: dotnet publish "Microsoft.Health.Fhir.sln" --output $(Build.ArtifactStagingDirectory)/web --configuration ${{inputs.buildConfiguration}} --no-build -f $(defaultBuildFramework) + # - name: Pack nugets + # shell: bash + # run: dotnet pack "Microsoft.Health.Fhir.sln" --output $(Build.ArtifactStagingDirectory)/nupkgs --no-build --configuration=Release -p:PackageVersion=${{inputs.nugetVersion}} + #Try pack command here to see if it picks up both framework outputs. It may not without referencing output of each job # - name: Code Coverage Report # uses: irongut/CodeCoverageSummary@v1.3.0 # with: From 1c1eb0aec9b502994c4aa45ef4fd0c65cca4f0bd Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Mon, 25 Mar 2024 08:55:16 -0700 Subject: [PATCH 054/155] Added SBOM task and initial artifact upload to use between jobs. --- .github/actions/dotnet-build/action.yml | 5 ++++- .github/workflows/fhir-oss-ci-pipeline.yml | 12 ++++++++++++ 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/.github/actions/dotnet-build/action.yml b/.github/actions/dotnet-build/action.yml index 16c9e133f7..74dee3c24e 100644 --- a/.github/actions/dotnet-build/action.yml +++ b/.github/actions/dotnet-build/action.yml @@ -20,6 +20,9 @@ inputs: majorMinorPatch: description: The major.minor.patch version to use. required: true + buildOutput: + description: The output directory for the build. + required: true runs: using: composite @@ -35,7 +38,7 @@ runs: - name: Build shell: bash - run: dotnet build --configuration ${{inputs.buildConfiguration}} -p:ContinuousIntegrationBuild=true -p:AssemblyVersion="${{inputs.assemblyVersion}}" -p:FileVersion="${{inputs.fileVersion}}" -p:InformationalVersion="${{inputs.informationalVersion}}" -p:Version="${{inputs.majorMinorPatch}}" -warnaserror + run: dotnet build --configuration ${{inputs.buildConfiguration}} -p:ContinuousIntegrationBuild=true -p:AssemblyVersion="${{inputs.assemblyVersion}}" -p:FileVersion="${{inputs.fileVersion}}" -p:InformationalVersion="${{inputs.informationalVersion}}" -p:Version="${{inputs.majorMinorPatch}}" -warnaserror --output ${{inputs.buildOutput}} - name: Test shell: bash diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index 953e6de5a5..fdb4fe807d 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -84,6 +84,18 @@ jobs: informationalVersion: ${{needs.setup.outputs.informationalVersion}} dotnetVersion: ${{matrix.dotnet-version}} majorMinorPatch: ${{needs.setup.outputs.majorMinorPatch}} + buildOutput: ${{github.workspace}}/artifacts + - name: Generate SBOM + run: | + curl -Lo $RUNNER_TEMP/sbom-tool https://github.com/microsoft/sbom-tool/releases/latest/download/sbom-tool-linux-x64 + chmod +x $RUNNER_TEMP/sbom-tool + $RUNNER_TEMP/sbom-tool generate -b ./buildOutput -bc . -V Verbose + + - name: Upload a Build Artifact + uses: actions/upload-artifact@v4 + with: + path: buildOutput + runIntegrationTests: runs-on: ubuntu-latest needs : buildAndUnitTest From 723dd4f40e5290f809d277b6ffdf9be4119e06d0 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Mon, 25 Mar 2024 10:23:53 -0700 Subject: [PATCH 055/155] SBOM with initial artifact upload. --- .github/actions/dotnet-build/action.yml | 6 +--- .../package-web-build-artifacts/action.yml | 35 +++++++++++++++++++ .github/actions/update-semver/action.yml | 2 +- .github/workflows/fhir-oss-ci-pipeline.yml | 19 ++++++++-- 4 files changed, 53 insertions(+), 9 deletions(-) create mode 100644 .github/actions/package-web-build-artifacts/action.yml diff --git a/.github/actions/dotnet-build/action.yml b/.github/actions/dotnet-build/action.yml index 74dee3c24e..34c9fc0b42 100644 --- a/.github/actions/dotnet-build/action.yml +++ b/.github/actions/dotnet-build/action.yml @@ -20,10 +20,6 @@ inputs: majorMinorPatch: description: The major.minor.patch version to use. required: true - buildOutput: - description: The output directory for the build. - required: true - runs: using: composite steps: @@ -38,7 +34,7 @@ runs: - name: Build shell: bash - run: dotnet build --configuration ${{inputs.buildConfiguration}} -p:ContinuousIntegrationBuild=true -p:AssemblyVersion="${{inputs.assemblyVersion}}" -p:FileVersion="${{inputs.fileVersion}}" -p:InformationalVersion="${{inputs.informationalVersion}}" -p:Version="${{inputs.majorMinorPatch}}" -warnaserror --output ${{inputs.buildOutput}} + run: dotnet build --configuration ${{inputs.buildConfiguration}} -p:ContinuousIntegrationBuild=true -p:AssemblyVersion="${{inputs.assemblyVersion}}" -p:FileVersion="${{inputs.fileVersion}}" -p:InformationalVersion="${{inputs.informationalVersion}}" -p:Version="${{inputs.majorMinorPatch}}" -warnaserror - name: Test shell: bash diff --git a/.github/actions/package-web-build-artifacts/action.yml b/.github/actions/package-web-build-artifacts/action.yml new file mode 100644 index 0000000000..5ba3d07712 --- /dev/null +++ b/.github/actions/package-web-build-artifacts/action.yml @@ -0,0 +1,35 @@ +name: Package Web Build Artifacts +description: 'Packages the web build artifacts for deployment' +inputs: + fhirSchemaVersion: + description: 'The FHIR schema version to package' + required: true + majorMinorPatch: + description: 'The version of the Nuget package' + required: true + outputPath: + description: 'The path to the output directory' + required: true + buildConfiguration: + description: 'The build configuration to use' + required: true + dotnetVerison: + description: 'The version of dotnet to use' + required: true + semVer: + description: 'The SemVer to use' + required: true +runs: + using: 'composite' + steps: + - name: Create Nuget packages + shell: bash + run: | + echo "Creating Nuget packages for FHIR schema version ${{inputs.fhirSchemaVersion}}" + dotnet pack ${{github.workspace}} --output ${{input.outputPath}}/nupkgs --no-build --configuration=${{inputs.buildConfiguration}} -p:PackageVersion=${{inputs.majorMinorPatch}} + - name: Package Web Artifacts + shell: bash + run: | + echo "Packaging web artifacts for FHIR schema version ${{inputs.fhirSchemaVersion}}" + dotnet publish ${{github.workspace}}/**/*Web.csproj --output ${{input.outputPath}}/web --configuration ${{inputs.buildConfiguration}} --version-suffix ${{inputs.semVer}} --no-build -f ${{inputs.dotnetVerison}} + diff --git a/.github/actions/update-semver/action.yml b/.github/actions/update-semver/action.yml index ef491516c1..ccaa23c812 100644 --- a/.github/actions/update-semver/action.yml +++ b/.github/actions/update-semver/action.yml @@ -15,7 +15,7 @@ outputs: informationalVersion: description: The assembly informational version for the build value: ${{ steps.version.outputs.GitVersion_InformationalVersion }} - nugetVersion: + semVer: description: The NuGet package version for the build value: ${{ steps.version.outputs.GitVersion_SemVer }} majorMinorPatch: diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index fdb4fe807d..72cf238612 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -20,6 +20,7 @@ env: connectedServiceName: Microsoft Health Open Source Subscription composeLocation: build/docker/docker-compose.yaml imageTag: ${{github.run_number}} + outputPath: ${{github.workspace}}/artifacts jobs: setup: @@ -33,6 +34,7 @@ jobs: fileVersion: ${{ steps.version.outputs.fileVersion }} informationalVersion: ${{ steps.version.outputs.informationalVersion }} majorMinorPatch: ${{ steps.version.outputs.majorMinorPatch }} + semVer: ${{steps.version.outputs.SemVer}} steps: - name: Checkout uses: actions/checkout@v4 @@ -64,6 +66,7 @@ jobs: strategy: matrix: dotnet-version: [ '8.0.x', '6.0.x' ] + fhirSchemaVersion: ["Stu3", "R4", "R4B", "R5"] steps: - name: Checkout uses: actions/checkout@v4 @@ -84,17 +87,27 @@ jobs: informationalVersion: ${{needs.setup.outputs.informationalVersion}} dotnetVersion: ${{matrix.dotnet-version}} majorMinorPatch: ${{needs.setup.outputs.majorMinorPatch}} - buildOutput: ${{github.workspace}}/artifacts + + - name: Package Web Build Artifacts + uses: ./.github/actions/package-web-build-artifacts + with: + fhirschemaversion: ${{ matrix.fhirSchemaVersion }} + majorMinorPatch: ${{needs.setup.outputs.majorMinorPatch}} + outputPath: ${{env.outputPath}} + buildConfiguration: ${{env.buildConfiguration}} + dotnetVersion: ${{matrix.dotnet-version}} + semVer: ${{needs.setup.outputs.semVer}} + - name: Generate SBOM run: | curl -Lo $RUNNER_TEMP/sbom-tool https://github.com/microsoft/sbom-tool/releases/latest/download/sbom-tool-linux-x64 chmod +x $RUNNER_TEMP/sbom-tool - $RUNNER_TEMP/sbom-tool generate -b ./buildOutput -bc . -V Verbose + $RUNNER_TEMP/sbom-tool generate -b ./artifacts -bc . -V Verbose - name: Upload a Build Artifact uses: actions/upload-artifact@v4 with: - path: buildOutput + path: ${{env.outputPath}} runIntegrationTests: runs-on: ubuntu-latest From 5b8ee3a07a5789264e546240d7b1bbec3509bb6d Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Mon, 25 Mar 2024 11:12:29 -0700 Subject: [PATCH 056/155] Attempting to parse dotnet version --- .github/actions/dotnet-build/action.yml | 51 +++++++------------------ 1 file changed, 13 insertions(+), 38 deletions(-) diff --git a/.github/actions/dotnet-build/action.yml b/.github/actions/dotnet-build/action.yml index 34c9fc0b42..f6566df02b 100644 --- a/.github/actions/dotnet-build/action.yml +++ b/.github/actions/dotnet-build/action.yml @@ -32,46 +32,21 @@ runs: shell: bash run: dotnet restore + - name: Convert version + id: convert_version + shell: bash + run: | + version=${${{inputs.dotnetVerison}}%%.*} + netVersion="net${version}.0" + echo "$netVersion" + echo "::set-output name=net_version::$netVersion" + env: + VERSION: ${{ steps.get_version.outputs.version }} + - name: Build shell: bash - run: dotnet build --configuration ${{inputs.buildConfiguration}} -p:ContinuousIntegrationBuild=true -p:AssemblyVersion="${{inputs.assemblyVersion}}" -p:FileVersion="${{inputs.fileVersion}}" -p:InformationalVersion="${{inputs.informationalVersion}}" -p:Version="${{inputs.majorMinorPatch}}" -warnaserror + run: dotnet build --targetFrameworkVersion ${{inputs.dotnetVersion}} --configuration ${{inputs.buildConfiguration}} -p:ContinuousIntegrationBuild=true -p:AssemblyVersion="${{inputs.assemblyVersion}}" -p:FileVersion="${{inputs.fileVersion}}" -p:InformationalVersion="${{inputs.informationalVersion}}" -p:Version="${{inputs.majorMinorPatch}}" -warnaserror - name: Test shell: bash - run: dotnet test "Microsoft.Health.Fhir.sln" --filter "FullyQualifiedName\~UnitTests" --configuration ${{inputs.buildConfiguration}} --no-build --verbosity normal - - # - name: Publish Artifacts - # shell: bash - # run: dotnet publish "Microsoft.Health.Fhir.sln" --output $(Build.ArtifactStagingDirectory)/web --configuration ${{inputs.buildConfiguration}} --no-build -f $(defaultBuildFramework) - # - name: Pack nugets - # shell: bash - # run: dotnet pack "Microsoft.Health.Fhir.sln" --output $(Build.ArtifactStagingDirectory)/nupkgs --no-build --configuration=Release -p:PackageVersion=${{inputs.nugetVersion}} - #Try pack command here to see if it picks up both framework outputs. It may not without referencing output of each job - # - name: Code Coverage Report - # uses: irongut/CodeCoverageSummary@v1.3.0 - # with: - # filename: coverage/**/coverage.cobertura.xml - # badge: true - # fail_below_min: true - # format: markdown - # hide_branch_rate: false - # hide_complexity: true - # indicators: true - # output: both - # thresholds: '60 80' - # - run: mkdir -p coverage - # shell: bash - # - run: mkdir -p artifacts - # shell: bash - # - name: actions/upload-artifact - # uses: actions/upload-artifact@v4 - # with: - # name: fhirBuild-${{inputs.dotnetVersion}} - # path: artifacts - # retention-days: 1 - # - name: Upload Code Coverage Results - # uses: actions/upload-artifact@v4 - # with: - # name: fhirBuild-${{inputs.dotnetVersion}} - # path: artifacts - # retention-days: 1 + run: dotnet test "Microsoft.Health.Fhir.sln" --targetFrameworkVersion ${{inputs.dotnetVersion}} --filter "FullyQualifiedName\~UnitTests" --configuration ${{inputs.buildConfiguration}} --no-build --verbosity normal From 48e51370f0b10f5cf40a0cefdeef7bf9f0e77a9a Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Mon, 25 Mar 2024 11:37:04 -0700 Subject: [PATCH 057/155] Added conversion for dotnet version to use in build --- .github/actions/dotnet-build/action.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/actions/dotnet-build/action.yml b/.github/actions/dotnet-build/action.yml index f6566df02b..d64f137211 100644 --- a/.github/actions/dotnet-build/action.yml +++ b/.github/actions/dotnet-build/action.yml @@ -36,17 +36,17 @@ runs: id: convert_version shell: bash run: | - version=${${{inputs.dotnetVerison}}%%.*} + version=${VERSION%%.*} netVersion="net${version}.0" echo "$netVersion" echo "::set-output name=net_version::$netVersion" env: - VERSION: ${{ steps.get_version.outputs.version }} + VERSION: ${{ inputs.dotnet-version }} - name: Build shell: bash - run: dotnet build --targetFrameworkVersion ${{inputs.dotnetVersion}} --configuration ${{inputs.buildConfiguration}} -p:ContinuousIntegrationBuild=true -p:AssemblyVersion="${{inputs.assemblyVersion}}" -p:FileVersion="${{inputs.fileVersion}}" -p:InformationalVersion="${{inputs.informationalVersion}}" -p:Version="${{inputs.majorMinorPatch}}" -warnaserror + run: dotnet build --targetFrameworkVersion ${{steps.convert_version.outputs.net_version}} --configuration ${{inputs.buildConfiguration}} -p:ContinuousIntegrationBuild=true -p:AssemblyVersion="${{inputs.assemblyVersion}}" -p:FileVersion="${{inputs.fileVersion}}" -p:InformationalVersion="${{inputs.informationalVersion}}" -p:Version="${{inputs.majorMinorPatch}}" -warnaserror - name: Test shell: bash - run: dotnet test "Microsoft.Health.Fhir.sln" --targetFrameworkVersion ${{inputs.dotnetVersion}} --filter "FullyQualifiedName\~UnitTests" --configuration ${{inputs.buildConfiguration}} --no-build --verbosity normal + run: dotnet test "Microsoft.Health.Fhir.sln" --targetFrameworkVersion ${{steps.convert_version.outputs.net_version}} --filter "FullyQualifiedName\~UnitTests" --configuration ${{inputs.buildConfiguration}} --no-build --verbosity normal From d660afb3824a2fa9a269b9559d5a109a597734af Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Mon, 25 Mar 2024 11:48:00 -0700 Subject: [PATCH 058/155] Variable reference fix --- .github/actions/dotnet-build/action.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/actions/dotnet-build/action.yml b/.github/actions/dotnet-build/action.yml index d64f137211..d9c022d67b 100644 --- a/.github/actions/dotnet-build/action.yml +++ b/.github/actions/dotnet-build/action.yml @@ -36,10 +36,10 @@ runs: id: convert_version shell: bash run: | - version=${VERSION%%.*} + version=${env.VERSION%%.*} netVersion="net${version}.0" echo "$netVersion" - echo "::set-output name=net_version::$netVersion" + echo "net_version=$netVersion >> $GITHUB_OUTPUT" env: VERSION: ${{ inputs.dotnet-version }} From 340b69f8e9acf9b3756a97de817709b2a80fcb4e Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Mon, 25 Mar 2024 12:01:38 -0700 Subject: [PATCH 059/155] Fixing reference --- .github/actions/dotnet-build/action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/actions/dotnet-build/action.yml b/.github/actions/dotnet-build/action.yml index d9c022d67b..8bf1f11510 100644 --- a/.github/actions/dotnet-build/action.yml +++ b/.github/actions/dotnet-build/action.yml @@ -36,7 +36,7 @@ runs: id: convert_version shell: bash run: | - version=${env.VERSION%%.*} + version=${{env.VERSION}}%%.* netVersion="net${version}.0" echo "$netVersion" echo "net_version=$netVersion >> $GITHUB_OUTPUT" From 6037a1696479930f8a3459debdaf78bff6b27dfa Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Mon, 25 Mar 2024 12:08:48 -0700 Subject: [PATCH 060/155] Variable fixing --- .github/actions/dotnet-build/action.yml | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/.github/actions/dotnet-build/action.yml b/.github/actions/dotnet-build/action.yml index 8bf1f11510..8bc4e858cd 100644 --- a/.github/actions/dotnet-build/action.yml +++ b/.github/actions/dotnet-build/action.yml @@ -36,12 +36,10 @@ runs: id: convert_version shell: bash run: | - version=${{env.VERSION}}%%.* + version=${{inputs.dotnetVerison}}%%.* netVersion="net${version}.0" echo "$netVersion" echo "net_version=$netVersion >> $GITHUB_OUTPUT" - env: - VERSION: ${{ inputs.dotnet-version }} - name: Build shell: bash From 2f4445dcd2a8e6dc3dbb36b275e4d016091f36ab Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Mon, 25 Mar 2024 12:25:48 -0700 Subject: [PATCH 061/155] Trying another approach --- .github/actions/dotnet-build/action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/actions/dotnet-build/action.yml b/.github/actions/dotnet-build/action.yml index 8bc4e858cd..0c10da7122 100644 --- a/.github/actions/dotnet-build/action.yml +++ b/.github/actions/dotnet-build/action.yml @@ -36,7 +36,7 @@ runs: id: convert_version shell: bash run: | - version=${{inputs.dotnetVerison}}%%.* + version=$( echo "${{inputs.dotnetVerison}}" | sed '^.*?(?=\.)') netVersion="net${version}.0" echo "$netVersion" echo "net_version=$netVersion >> $GITHUB_OUTPUT" From 6d3ab837ab0abd2736fac5dfefe0da6c6db102ea Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Mon, 25 Mar 2024 12:37:25 -0700 Subject: [PATCH 062/155] Variable still not showing up in bash command --- .github/actions/dotnet-build/action.yml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/actions/dotnet-build/action.yml b/.github/actions/dotnet-build/action.yml index 0c10da7122..95d4a35d9f 100644 --- a/.github/actions/dotnet-build/action.yml +++ b/.github/actions/dotnet-build/action.yml @@ -36,10 +36,11 @@ runs: id: convert_version shell: bash run: | - version=$( echo "${{inputs.dotnetVerison}}" | sed '^.*?(?=\.)') + dotnetVersion=${{inputs.dotnetVersion}} + version=$( echo "dotnetVersion" | sed '^.*?(?=\.)') netVersion="net${version}.0" echo "$netVersion" - echo "net_version=$netVersion >> $GITHUB_OUTPUT" + echo "net_version=$netVersion" >> "$GITHUB_OUTPUT" - name: Build shell: bash From 896d8afcfcf8da50a58b770c4a0cda8c60453f13 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Mon, 25 Mar 2024 12:43:01 -0700 Subject: [PATCH 063/155] Closer --- .github/actions/dotnet-build/action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/actions/dotnet-build/action.yml b/.github/actions/dotnet-build/action.yml index 95d4a35d9f..40807cbcfb 100644 --- a/.github/actions/dotnet-build/action.yml +++ b/.github/actions/dotnet-build/action.yml @@ -37,7 +37,7 @@ runs: shell: bash run: | dotnetVersion=${{inputs.dotnetVersion}} - version=$( echo "dotnetVersion" | sed '^.*?(?=\.)') + version=$( echo "{dotnetVersion}" | sed '^.*?(?=\.)') netVersion="net${version}.0" echo "$netVersion" echo "net_version=$netVersion" >> "$GITHUB_OUTPUT" From 9d7a12694e9ca4fd4d89e74033b04978d574ed7c Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Mon, 25 Mar 2024 12:51:18 -0700 Subject: [PATCH 064/155] Syntax --- .github/actions/dotnet-build/action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/actions/dotnet-build/action.yml b/.github/actions/dotnet-build/action.yml index 40807cbcfb..0c4f6331ad 100644 --- a/.github/actions/dotnet-build/action.yml +++ b/.github/actions/dotnet-build/action.yml @@ -37,7 +37,7 @@ runs: shell: bash run: | dotnetVersion=${{inputs.dotnetVersion}} - version=$( echo "{dotnetVersion}" | sed '^.*?(?=\.)') + version=$( echo "$dotnetVersion" | sed '^.*?(?=\.)') netVersion="net${version}.0" echo "$netVersion" echo "net_version=$netVersion" >> "$GITHUB_OUTPUT" From 06a3926add09e17ed1b34f6baeda0e9d0e880e00 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Mon, 25 Mar 2024 13:22:50 -0700 Subject: [PATCH 065/155] Removing quotes --- .github/actions/dotnet-build/action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/actions/dotnet-build/action.yml b/.github/actions/dotnet-build/action.yml index 0c4f6331ad..8f439bd1f8 100644 --- a/.github/actions/dotnet-build/action.yml +++ b/.github/actions/dotnet-build/action.yml @@ -37,7 +37,7 @@ runs: shell: bash run: | dotnetVersion=${{inputs.dotnetVersion}} - version=$( echo "$dotnetVersion" | sed '^.*?(?=\.)') + version=$( echo $dotnetVersion | sed '^.*?(?=\.)') netVersion="net${version}.0" echo "$netVersion" echo "net_version=$netVersion" >> "$GITHUB_OUTPUT" From 00a0f27d1c2a9ca93d55c8b9c566ef706da5edd8 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Mon, 25 Mar 2024 13:30:05 -0700 Subject: [PATCH 066/155] working on bash command --- .github/actions/dotnet-build/action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/actions/dotnet-build/action.yml b/.github/actions/dotnet-build/action.yml index 8f439bd1f8..3f77531483 100644 --- a/.github/actions/dotnet-build/action.yml +++ b/.github/actions/dotnet-build/action.yml @@ -37,7 +37,7 @@ runs: shell: bash run: | dotnetVersion=${{inputs.dotnetVersion}} - version=$( echo $dotnetVersion | sed '^.*?(?=\.)') + version=$( echo "$dotnetVersion" | sed 's/^.*?(?=\.)') netVersion="net${version}.0" echo "$netVersion" echo "net_version=$netVersion" >> "$GITHUB_OUTPUT" From bfa63a5a56bbf6ef30512f3dd7a4685b7f3b2325 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Mon, 25 Mar 2024 13:55:10 -0700 Subject: [PATCH 067/155] Updated regex --- .github/actions/dotnet-build/action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/actions/dotnet-build/action.yml b/.github/actions/dotnet-build/action.yml index 3f77531483..b05f5d9b82 100644 --- a/.github/actions/dotnet-build/action.yml +++ b/.github/actions/dotnet-build/action.yml @@ -37,7 +37,7 @@ runs: shell: bash run: | dotnetVersion=${{inputs.dotnetVersion}} - version=$( echo "$dotnetVersion" | sed 's/^.*?(?=\.)') + version=$( echo "$dotnetVersion" | sed 's/^\([0-9]*\)\..*/\1/') netVersion="net${version}.0" echo "$netVersion" echo "net_version=$netVersion" >> "$GITHUB_OUTPUT" From 102d8af5eafe0df5aad160b659f81ee0e08db9ca Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Mon, 25 Mar 2024 14:08:25 -0700 Subject: [PATCH 068/155] Updated build and test property --- .github/actions/dotnet-build/action.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/actions/dotnet-build/action.yml b/.github/actions/dotnet-build/action.yml index b05f5d9b82..ae6c983de9 100644 --- a/.github/actions/dotnet-build/action.yml +++ b/.github/actions/dotnet-build/action.yml @@ -44,8 +44,8 @@ runs: - name: Build shell: bash - run: dotnet build --targetFrameworkVersion ${{steps.convert_version.outputs.net_version}} --configuration ${{inputs.buildConfiguration}} -p:ContinuousIntegrationBuild=true -p:AssemblyVersion="${{inputs.assemblyVersion}}" -p:FileVersion="${{inputs.fileVersion}}" -p:InformationalVersion="${{inputs.informationalVersion}}" -p:Version="${{inputs.majorMinorPatch}}" -warnaserror + run: dotnet build -f ${{steps.convert_version.outputs.net_version}} --configuration ${{inputs.buildConfiguration}} -p:ContinuousIntegrationBuild=true -p:AssemblyVersion="${{inputs.assemblyVersion}}" -p:FileVersion="${{inputs.fileVersion}}" -p:InformationalVersion="${{inputs.informationalVersion}}" -p:Version="${{inputs.majorMinorPatch}}" -warnaserror - name: Test shell: bash - run: dotnet test "Microsoft.Health.Fhir.sln" --targetFrameworkVersion ${{steps.convert_version.outputs.net_version}} --filter "FullyQualifiedName\~UnitTests" --configuration ${{inputs.buildConfiguration}} --no-build --verbosity normal + run: dotnet test "Microsoft.Health.Fhir.sln" -f ${{steps.convert_version.outputs.net_version}} --filter "FullyQualifiedName\~UnitTests" --configuration ${{inputs.buildConfiguration}} --no-build --verbosity normal From deb2239226e68ddf862abe60f809c11ba474e69e Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Mon, 25 Mar 2024 14:24:19 -0700 Subject: [PATCH 069/155] Misspelled property --- .github/actions/package-web-build-artifacts/action.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/actions/package-web-build-artifacts/action.yml b/.github/actions/package-web-build-artifacts/action.yml index 5ba3d07712..8dd882ea12 100644 --- a/.github/actions/package-web-build-artifacts/action.yml +++ b/.github/actions/package-web-build-artifacts/action.yml @@ -26,10 +26,10 @@ runs: shell: bash run: | echo "Creating Nuget packages for FHIR schema version ${{inputs.fhirSchemaVersion}}" - dotnet pack ${{github.workspace}} --output ${{input.outputPath}}/nupkgs --no-build --configuration=${{inputs.buildConfiguration}} -p:PackageVersion=${{inputs.majorMinorPatch}} + dotnet pack ${{github.workspace}} --output ${{inputs.outputPath}}/nupkgs --no-build --configuration=${{inputs.buildConfiguration}} -p:PackageVersion=${{inputs.majorMinorPatch}} - name: Package Web Artifacts shell: bash run: | echo "Packaging web artifacts for FHIR schema version ${{inputs.fhirSchemaVersion}}" - dotnet publish ${{github.workspace}}/**/*Web.csproj --output ${{input.outputPath}}/web --configuration ${{inputs.buildConfiguration}} --version-suffix ${{inputs.semVer}} --no-build -f ${{inputs.dotnetVerison}} + dotnet publish ${{github.workspace}}/**/*Web.csproj --output ${{inputs.outputPath}}/web --configuration ${{inputs.buildConfiguration}} --version-suffix ${{inputs.semVer}} --no-build -f ${{inputs.dotnetVerison}} From 311bb1315d88d851bc6f790c347d09ba91b1b9ac Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Mon, 25 Mar 2024 14:50:08 -0700 Subject: [PATCH 070/155] Splitting packaging off to separate job so we don't create more than two build jobs. More than that causes too many file locks --- .github/workflows/fhir-oss-ci-pipeline.yml | 34 +++++++++++++--------- 1 file changed, 21 insertions(+), 13 deletions(-) diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index 72cf238612..7be4e7254c 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -66,7 +66,6 @@ jobs: strategy: matrix: dotnet-version: [ '8.0.x', '6.0.x' ] - fhirSchemaVersion: ["Stu3", "R4", "R4B", "R5"] steps: - name: Checkout uses: actions/checkout@v4 @@ -88,16 +87,6 @@ jobs: dotnetVersion: ${{matrix.dotnet-version}} majorMinorPatch: ${{needs.setup.outputs.majorMinorPatch}} - - name: Package Web Build Artifacts - uses: ./.github/actions/package-web-build-artifacts - with: - fhirschemaversion: ${{ matrix.fhirSchemaVersion }} - majorMinorPatch: ${{needs.setup.outputs.majorMinorPatch}} - outputPath: ${{env.outputPath}} - buildConfiguration: ${{env.buildConfiguration}} - dotnetVersion: ${{matrix.dotnet-version}} - semVer: ${{needs.setup.outputs.semVer}} - - name: Generate SBOM run: | curl -Lo $RUNNER_TEMP/sbom-tool https://github.com/microsoft/sbom-tool/releases/latest/download/sbom-tool-linux-x64 @@ -108,7 +97,27 @@ jobs: uses: actions/upload-artifact@v4 with: path: ${{env.outputPath}} - + packageBuildArtifacts: + runs-on: ubuntu-latest + needs: buildAndUnitTest + strategy: + matrix: + dotnet-version: [ '8.0.x', '6.0.x' ] + fhirSchemaVersion: ["Stu3", "R4", "R4B", "R5"] + steps: + - name: Download Build Artifacts from Job Cache + uses: actions/download-artifact@v4 + with: + path: artifacts + - name: Package Web Build Artifacts + uses: ./.github/actions/package-web-build-artifacts + with: + fhirschemaversion: ${{ matrix.fhirSchemaVersion }} + majorMinorPatch: ${{needs.setup.outputs.majorMinorPatch}} + outputPath: ${{env.outputPath}} + buildConfiguration: ${{env.buildConfiguration}} + dotnetVersion: ${{matrix.dotnet-version}} + semVer: ${{needs.setup.outputs.semVer}} runIntegrationTests: runs-on: ubuntu-latest needs : buildAndUnitTest @@ -116,5 +125,4 @@ jobs: - name: Download Build Artifact for Testing uses: actions/download-artifact@v4 with: - name: fhirBuild-8.0.x path: artifacts From e1366bf89ad88f25f1b7bb3bfe6bda402693f3c4 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Mon, 25 Mar 2024 15:15:01 -0700 Subject: [PATCH 071/155] Removing escape character --- .github/actions/dotnet-build/action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/actions/dotnet-build/action.yml b/.github/actions/dotnet-build/action.yml index ae6c983de9..4e159e6a4a 100644 --- a/.github/actions/dotnet-build/action.yml +++ b/.github/actions/dotnet-build/action.yml @@ -48,4 +48,4 @@ runs: - name: Test shell: bash - run: dotnet test "Microsoft.Health.Fhir.sln" -f ${{steps.convert_version.outputs.net_version}} --filter "FullyQualifiedName\~UnitTests" --configuration ${{inputs.buildConfiguration}} --no-build --verbosity normal + run: dotnet test "Microsoft.Health.Fhir.sln" -f ${{steps.convert_version.outputs.net_version}} --filter "FullyQualifiedName~UnitTests" --configuration ${{inputs.buildConfiguration}} --no-build --verbosity normal From 8a2f5264865c971cdfb6d17c981f93b4b97c0977 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Tue, 26 Mar 2024 09:38:13 -0700 Subject: [PATCH 072/155] Added condition to only run test if matrix is dotnet 8 so we don't duplicate test runs. --- .github/actions/dotnet-build/action.yml | 4 ---- .github/actions/dotnet-test/action.yml | 16 ++++++++++++++++ .github/workflows/fhir-oss-ci-pipeline.yml | 5 +++++ 3 files changed, 21 insertions(+), 4 deletions(-) create mode 100644 .github/actions/dotnet-test/action.yml diff --git a/.github/actions/dotnet-build/action.yml b/.github/actions/dotnet-build/action.yml index 4e159e6a4a..ab87d9d56f 100644 --- a/.github/actions/dotnet-build/action.yml +++ b/.github/actions/dotnet-build/action.yml @@ -45,7 +45,3 @@ runs: - name: Build shell: bash run: dotnet build -f ${{steps.convert_version.outputs.net_version}} --configuration ${{inputs.buildConfiguration}} -p:ContinuousIntegrationBuild=true -p:AssemblyVersion="${{inputs.assemblyVersion}}" -p:FileVersion="${{inputs.fileVersion}}" -p:InformationalVersion="${{inputs.informationalVersion}}" -p:Version="${{inputs.majorMinorPatch}}" -warnaserror - - - name: Test - shell: bash - run: dotnet test "Microsoft.Health.Fhir.sln" -f ${{steps.convert_version.outputs.net_version}} --filter "FullyQualifiedName~UnitTests" --configuration ${{inputs.buildConfiguration}} --no-build --verbosity normal diff --git a/.github/actions/dotnet-test/action.yml b/.github/actions/dotnet-test/action.yml new file mode 100644 index 0000000000..5ecf9b3cc5 --- /dev/null +++ b/.github/actions/dotnet-test/action.yml @@ -0,0 +1,16 @@ +name: dotnet test +description: 'Runs the unit tests for the Fhir solution' +inputs: + buildConfiguration: + description: 'The build configuration to use' + required: true + dotnetVersion: + description: 'The version of dotnet to use' + default: net8.0 + required: true +runs: + using: 'composite' + steps: + - name: Run Unit Tests + shell: bash + run: dotnet test "Microsoft.Health.Fhir.sln" -f ${{inputs.dotnetVersion}} --filter "FullyQualifiedName~UnitTests" --configuration ${{inputs.buildConfiguration}} --no-build --verbosity normal diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index 7be4e7254c..6a3981de7c 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -86,6 +86,11 @@ jobs: informationalVersion: ${{needs.setup.outputs.informationalVersion}} dotnetVersion: ${{matrix.dotnet-version}} majorMinorPatch: ${{needs.setup.outputs.majorMinorPatch}} + - name: Test + uses: ./.github/actions/dotnet-test + if: ${{ matrix.dotnet-version == '8.0.x' }} + with: + buildConfiguration: ${{env.buildConfiguration}} - name: Generate SBOM run: | From 401fad124da99b45b4677ba996db8a144f5703fa Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Tue, 26 Mar 2024 10:40:53 -0700 Subject: [PATCH 073/155] Updated SBOM path --- .github/workflows/fhir-oss-ci-pipeline.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index 6a3981de7c..2447e6c7d5 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -96,7 +96,7 @@ jobs: run: | curl -Lo $RUNNER_TEMP/sbom-tool https://github.com/microsoft/sbom-tool/releases/latest/download/sbom-tool-linux-x64 chmod +x $RUNNER_TEMP/sbom-tool - $RUNNER_TEMP/sbom-tool generate -b ./artifacts -bc . -V Verbose + $RUNNER_TEMP/sbom-tool generate -b . -bc . -V Verbose - name: Upload a Build Artifact uses: actions/upload-artifact@v4 From 88518d320593851cff91bf9284361717aa2ae951 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Tue, 26 Mar 2024 12:19:23 -0700 Subject: [PATCH 074/155] Required package supplier --- .github/workflows/fhir-oss-ci-pipeline.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index 2447e6c7d5..2a2f48918c 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -96,7 +96,7 @@ jobs: run: | curl -Lo $RUNNER_TEMP/sbom-tool https://github.com/microsoft/sbom-tool/releases/latest/download/sbom-tool-linux-x64 chmod +x $RUNNER_TEMP/sbom-tool - $RUNNER_TEMP/sbom-tool generate -b . -bc . -V Verbose + $RUNNER_TEMP/sbom-tool generate -b . -bc . -V Verbose -ps "Organization: Microsoft" - name: Upload a Build Artifact uses: actions/upload-artifact@v4 From 9a1a946baf9e2edc9d3415f66be6c2d3e9b37c8a Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Tue, 26 Mar 2024 12:34:03 -0700 Subject: [PATCH 075/155] Added package name and version to SboM --- .github/workflows/fhir-oss-ci-pipeline.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index 2a2f48918c..e1176da3ab 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -96,7 +96,7 @@ jobs: run: | curl -Lo $RUNNER_TEMP/sbom-tool https://github.com/microsoft/sbom-tool/releases/latest/download/sbom-tool-linux-x64 chmod +x $RUNNER_TEMP/sbom-tool - $RUNNER_TEMP/sbom-tool generate -b . -bc . -V Verbose -ps "Organization: Microsoft" + $RUNNER_TEMP/sbom-tool generate -b . -bc . -V Verbose -ps "Organization: Microsoft" -pv ${{needs.setup.outputs.majorMinorPatch}} -pn ${{needs.setup.outputs.informationalVersion}} - name: Upload a Build Artifact uses: actions/upload-artifact@v4 From 3c851b5c45e0ec19559578c796b27bbb514cee70 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Wed, 27 Mar 2024 10:02:17 -0700 Subject: [PATCH 076/155] Trying with just one dotnet version to see if badimageformatexception still shows up. --- .github/workflows/fhir-oss-ci-pipeline.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index e1176da3ab..0cfeb55479 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -65,7 +65,7 @@ jobs: needs: setup strategy: matrix: - dotnet-version: [ '8.0.x', '6.0.x' ] + dotnet-version: [ '8.0.x'] steps: - name: Checkout uses: actions/checkout@v4 From bbf341dccc33a9f21753864245dc43d561e33cea Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Wed, 27 Mar 2024 10:28:25 -0700 Subject: [PATCH 077/155] Removing additional restore during build --- .github/actions/dotnet-build/action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/actions/dotnet-build/action.yml b/.github/actions/dotnet-build/action.yml index ab87d9d56f..55b797b4b6 100644 --- a/.github/actions/dotnet-build/action.yml +++ b/.github/actions/dotnet-build/action.yml @@ -44,4 +44,4 @@ runs: - name: Build shell: bash - run: dotnet build -f ${{steps.convert_version.outputs.net_version}} --configuration ${{inputs.buildConfiguration}} -p:ContinuousIntegrationBuild=true -p:AssemblyVersion="${{inputs.assemblyVersion}}" -p:FileVersion="${{inputs.fileVersion}}" -p:InformationalVersion="${{inputs.informationalVersion}}" -p:Version="${{inputs.majorMinorPatch}}" -warnaserror + run: dotnet build -f ${{steps.convert_version.outputs.net_version}} --no-restore --configuration ${{inputs.buildConfiguration}} -p:ContinuousIntegrationBuild=true -p:AssemblyVersion="${{inputs.assemblyVersion}}" -p:FileVersion="${{inputs.fileVersion}}" -p:InformationalVersion="${{inputs.informationalVersion}}" -p:Version="${{inputs.majorMinorPatch}}" -warnaserror From a5338c9fcfe8f70607204d7d55f170af88515fb8 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Wed, 27 Mar 2024 13:49:33 -0700 Subject: [PATCH 078/155] Simplifying build and test --- .github/actions/dotnet-build/action.yml | 24 +--------------------- .github/actions/dotnet-test/action.yml | 6 +----- .github/workflows/fhir-oss-ci-pipeline.yml | 17 ++++++++------- 3 files changed, 12 insertions(+), 35 deletions(-) diff --git a/.github/actions/dotnet-build/action.yml b/.github/actions/dotnet-build/action.yml index 55b797b4b6..4adc3f4bca 100644 --- a/.github/actions/dotnet-build/action.yml +++ b/.github/actions/dotnet-build/action.yml @@ -14,34 +14,12 @@ inputs: informationalVersion: description: The scaler assembly's informational version. required: true - dotnetVersion: - description: The version of dotnet to use. - required: true majorMinorPatch: description: The major.minor.patch version to use. required: true runs: using: composite steps: - - name: Setup dotnet ${{ inputs.dotnetVersion }} - uses: actions/setup-dotnet@v4 - with: - dotnet-version: ${{ inputs.dotnetVersion}} - - - name: Restore Dependencies - shell: bash - run: dotnet restore - - - name: Convert version - id: convert_version - shell: bash - run: | - dotnetVersion=${{inputs.dotnetVersion}} - version=$( echo "$dotnetVersion" | sed 's/^\([0-9]*\)\..*/\1/') - netVersion="net${version}.0" - echo "$netVersion" - echo "net_version=$netVersion" >> "$GITHUB_OUTPUT" - - name: Build shell: bash - run: dotnet build -f ${{steps.convert_version.outputs.net_version}} --no-restore --configuration ${{inputs.buildConfiguration}} -p:ContinuousIntegrationBuild=true -p:AssemblyVersion="${{inputs.assemblyVersion}}" -p:FileVersion="${{inputs.fileVersion}}" -p:InformationalVersion="${{inputs.informationalVersion}}" -p:Version="${{inputs.majorMinorPatch}}" -warnaserror + run: dotnet build --configuration ${{inputs.buildConfiguration}} -p:ContinuousIntegrationBuild=true -p:AssemblyVersion="${{inputs.assemblyVersion}}" -p:FileVersion="${{inputs.fileVersion}}" -p:InformationalVersion="${{inputs.informationalVersion}}" -p:Version="${{inputs.majorMinorPatch}}" -warnaserror diff --git a/.github/actions/dotnet-test/action.yml b/.github/actions/dotnet-test/action.yml index 5ecf9b3cc5..e15a750bb4 100644 --- a/.github/actions/dotnet-test/action.yml +++ b/.github/actions/dotnet-test/action.yml @@ -4,13 +4,9 @@ inputs: buildConfiguration: description: 'The build configuration to use' required: true - dotnetVersion: - description: 'The version of dotnet to use' - default: net8.0 - required: true runs: using: 'composite' steps: - name: Run Unit Tests shell: bash - run: dotnet test "Microsoft.Health.Fhir.sln" -f ${{inputs.dotnetVersion}} --filter "FullyQualifiedName~UnitTests" --configuration ${{inputs.buildConfiguration}} --no-build --verbosity normal + run: dotnet test "Microsoft.Health.Fhir.sln" --filter "FullyQualifiedName~UnitTests" --configuration ${{inputs.buildConfiguration}} --no-build --verbosity normal diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index 0cfeb55479..1e9397890c 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -43,6 +43,11 @@ jobs: - name: Install Latest .Net SDK uses: actions/setup-dotnet@v4 + with: + global-json-file: 'global.json' + dotnet-version: | + 6.x + 8.x - name: Determine Semver id: version @@ -63,19 +68,19 @@ jobs: buildAndUnitTest: runs-on: ubuntu-latest needs: setup - strategy: - matrix: - dotnet-version: [ '8.0.x'] steps: - name: Checkout uses: actions/checkout@v4 with: fetch-depth: 0 - - name: Setup dotnet ${{ matrix.dotnet-version }} + - name: Install Latest .Net SDK uses: actions/setup-dotnet@v4 with: - dotnet-version: ${{ matrix.dotnet-version }} + global-json-file: 'global.json' + dotnet-version: | + 6.x + 8.x - name: Build uses: ./.github/actions/dotnet-build @@ -84,11 +89,9 @@ jobs: buildConfiguration: ${{env.buildConfiguration}} fileVersion: ${{needs.setup.outputs.fileVersion}} informationalVersion: ${{needs.setup.outputs.informationalVersion}} - dotnetVersion: ${{matrix.dotnet-version}} majorMinorPatch: ${{needs.setup.outputs.majorMinorPatch}} - name: Test uses: ./.github/actions/dotnet-test - if: ${{ matrix.dotnet-version == '8.0.x' }} with: buildConfiguration: ${{env.buildConfiguration}} From 89ea79099dce348c8ff93e41f919c6e0132afc37 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Wed, 27 Mar 2024 16:42:14 -0700 Subject: [PATCH 079/155] Trying with 3.1 installed --- .github/workflows/fhir-oss-ci-pipeline.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index 1e9397890c..35800463e0 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -81,6 +81,7 @@ jobs: dotnet-version: | 6.x 8.x + 3.1 - name: Build uses: ./.github/actions/dotnet-build From 76996b863b52322bacbdc8a49788c5b4bf7a7278 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Thu, 28 Mar 2024 11:58:57 -0700 Subject: [PATCH 080/155] Temporarily commenting out unit tests until I can rest of pipeline working. --- .../package-web-build-artifacts/action.yml | 5 +--- .github/workflows/fhir-oss-ci-pipeline.yml | 26 +++++++++---------- 2 files changed, 13 insertions(+), 18 deletions(-) diff --git a/.github/actions/package-web-build-artifacts/action.yml b/.github/actions/package-web-build-artifacts/action.yml index 8dd882ea12..5b7d88f85d 100644 --- a/.github/actions/package-web-build-artifacts/action.yml +++ b/.github/actions/package-web-build-artifacts/action.yml @@ -13,9 +13,6 @@ inputs: buildConfiguration: description: 'The build configuration to use' required: true - dotnetVerison: - description: 'The version of dotnet to use' - required: true semVer: description: 'The SemVer to use' required: true @@ -31,5 +28,5 @@ runs: shell: bash run: | echo "Packaging web artifacts for FHIR schema version ${{inputs.fhirSchemaVersion}}" - dotnet publish ${{github.workspace}}/**/*Web.csproj --output ${{inputs.outputPath}}/web --configuration ${{inputs.buildConfiguration}} --version-suffix ${{inputs.semVer}} --no-build -f ${{inputs.dotnetVerison}} + dotnet publish ${{github.workspace}}/**/*Web.csproj --output ${{inputs.outputPath}}/web --configuration ${{inputs.buildConfiguration}} --version-suffix ${{inputs.semVer}} --no-build diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index 35800463e0..d05b91e8e2 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -91,10 +91,10 @@ jobs: fileVersion: ${{needs.setup.outputs.fileVersion}} informationalVersion: ${{needs.setup.outputs.informationalVersion}} majorMinorPatch: ${{needs.setup.outputs.majorMinorPatch}} - - name: Test - uses: ./.github/actions/dotnet-test - with: - buildConfiguration: ${{env.buildConfiguration}} + # - name: Test + # uses: ./.github/actions/dotnet-test + # with: + # buildConfiguration: ${{env.buildConfiguration}} - name: Generate SBOM run: | @@ -111,7 +111,6 @@ jobs: needs: buildAndUnitTest strategy: matrix: - dotnet-version: [ '8.0.x', '6.0.x' ] fhirSchemaVersion: ["Stu3", "R4", "R4B", "R5"] steps: - name: Download Build Artifacts from Job Cache @@ -125,13 +124,12 @@ jobs: majorMinorPatch: ${{needs.setup.outputs.majorMinorPatch}} outputPath: ${{env.outputPath}} buildConfiguration: ${{env.buildConfiguration}} - dotnetVersion: ${{matrix.dotnet-version}} semVer: ${{needs.setup.outputs.semVer}} - runIntegrationTests: - runs-on: ubuntu-latest - needs : buildAndUnitTest - steps: - - name: Download Build Artifact for Testing - uses: actions/download-artifact@v4 - with: - path: artifacts + # runIntegrationTests: + # runs-on: ubuntu-latest + # needs : buildAndUnitTest + # steps: + # - name: Download Build Artifact for Testing + # uses: actions/download-artifact@v4 + # with: + # path: artifacts From c6c705c8eddc14c13da0f76ea4aada1da4b70bc5 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Fri, 29 Mar 2024 08:57:02 -0700 Subject: [PATCH 081/155] Added checkout command before running package --- .github/workflows/fhir-oss-ci-pipeline.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index d05b91e8e2..f2675a42df 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -113,6 +113,10 @@ jobs: matrix: fhirSchemaVersion: ["Stu3", "R4", "R4B", "R5"] steps: + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 - name: Download Build Artifacts from Job Cache uses: actions/download-artifact@v4 with: From ee9866ccbab5046664f6d6c4354b2d19ddf62f4d Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Fri, 29 Mar 2024 09:40:38 -0700 Subject: [PATCH 082/155] Added explicit restore for nuget packages --- .github/workflows/fhir-oss-ci-pipeline.yml | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index f2675a42df..ca508af809 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -81,7 +81,6 @@ jobs: dotnet-version: | 6.x 8.x - 3.1 - name: Build uses: ./.github/actions/dotnet-build @@ -117,10 +116,20 @@ jobs: uses: actions/checkout@v4 with: fetch-depth: 0 + - name: Install Latest .Net SDK + uses: actions/setup-dotnet@v4 + with: + global-json-file: 'global.json' + dotnet-version: | + 6.x + 8.x - name: Download Build Artifacts from Job Cache uses: actions/download-artifact@v4 with: path: artifacts + - name: Nuget Restore + shell: bash + run: dotnet restore - name: Package Web Build Artifacts uses: ./.github/actions/package-web-build-artifacts with: From 5148fa05697580034d5f01c4fabd40742b619de7 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Fri, 29 Mar 2024 09:56:49 -0700 Subject: [PATCH 083/155] Moved restore to before the artifacts pull --- .github/workflows/fhir-oss-ci-pipeline.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index ca508af809..2e8def0c50 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -123,13 +123,13 @@ jobs: dotnet-version: | 6.x 8.x + - name: Nuget Restore + shell: bash + run: dotnet restore - name: Download Build Artifacts from Job Cache uses: actions/download-artifact@v4 with: path: artifacts - - name: Nuget Restore - shell: bash - run: dotnet restore - name: Package Web Build Artifacts uses: ./.github/actions/package-web-build-artifacts with: From 3b9bc2856e08da61680e7b8ca32616540babff16 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Fri, 29 Mar 2024 10:52:08 -0700 Subject: [PATCH 084/155] Added sln to restore call --- .github/actions/docker-build/action.yml | 25 ++++++++++++++++++++++ .github/workflows/fhir-oss-ci-pipeline.yml | 21 ++++++++++++------ 2 files changed, 40 insertions(+), 6 deletions(-) create mode 100644 .github/actions/docker-build/action.yml diff --git a/.github/actions/docker-build/action.yml b/.github/actions/docker-build/action.yml new file mode 100644 index 0000000000..5e66230270 --- /dev/null +++ b/.github/actions/docker-build/action.yml @@ -0,0 +1,25 @@ +name: Docker Build +description: 'Builds images for all supported FHIR versions' + +inputs: + tag: + description: 'The tag to apply to the images' + required: true + fhirSchemaVersion: + description: 'The FHIR schema version to package' + required: true + assemblyVersion: + description: 'The assembly version to use' + required: true + composeLocation: + description: 'The location of the docker-compose file' + required: true + +runs: + using: 'composite' + steps: + - name: Build and Push Docker Images + shell: bash + run: | + echo "Building and pushing Docker images for FHIR schema version ${{inputs.fhirSchemaVersion}}" + docker-compose -f ${{inputs.composeLocation}} build -p ${{inputs.fhirSchemaVersion}} --build-arg FHIR_VERSION=${{inputs.fhirSchemaVersion}} --build-arg ASSEMBLY_VER=${{inputs.assemblyVersion}} diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index 2e8def0c50..9d9b49efcc 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -65,6 +65,10 @@ jobs: uses: ./.github/actions/clean-storage-accounts with: environmentName: ${{vars.CIRESOURCEGROUPROOT}} + # - name: Cleanup Integration Test databases + # uses: ./.github/actions/cleanup-integration-test-databases + # with: + # environmentName: ${{vars.CIRESOURCEGROUPROOT}} buildAndUnitTest: runs-on: ubuntu-latest needs: setup @@ -95,11 +99,16 @@ jobs: # with: # buildConfiguration: ${{env.buildConfiguration}} - - name: Generate SBOM - run: | - curl -Lo $RUNNER_TEMP/sbom-tool https://github.com/microsoft/sbom-tool/releases/latest/download/sbom-tool-linux-x64 - chmod +x $RUNNER_TEMP/sbom-tool - $RUNNER_TEMP/sbom-tool generate -b . -bc . -V Verbose -ps "Organization: Microsoft" -pv ${{needs.setup.outputs.majorMinorPatch}} -pn ${{needs.setup.outputs.informationalVersion}} + # - name: Generate SBOM + # run: | + # curl -Lo $RUNNER_TEMP/sbom-tool https://github.com/microsoft/sbom-tool/releases/latest/download/sbom-tool-linux-x64 + # chmod +x $RUNNER_TEMP/sbom-tool + # $RUNNER_TEMP/sbom-tool generate -b . -bc . -V Verbose -ps "Organization: Microsoft" -pv ${{needs.setup.outputs.majorMinorPatch}} -pn ${{needs.setup.outputs.informationalVersion}} + + # - name: Docker Build + # uses: ./.github/actions/docker-build + # with: + # assemblySemFileVer: ${{needs.setup.outputs.semVer}} - name: Upload a Build Artifact uses: actions/upload-artifact@v4 @@ -125,7 +134,7 @@ jobs: 8.x - name: Nuget Restore shell: bash - run: dotnet restore + run: dotnet restore Microsoft.Health.Fhir.sln - name: Download Build Artifacts from Job Cache uses: actions/download-artifact@v4 with: From f3680f3cd638361fd6ed2c42261a27e479f3e4ea Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Fri, 29 Mar 2024 11:13:50 -0700 Subject: [PATCH 085/155] Updating action to v2 --- .github/actions/clean-storage-accounts/action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/actions/clean-storage-accounts/action.yml b/.github/actions/clean-storage-accounts/action.yml index 2b91090852..35df072a4c 100644 --- a/.github/actions/clean-storage-accounts/action.yml +++ b/.github/actions/clean-storage-accounts/action.yml @@ -10,7 +10,7 @@ runs: using: 'composite' steps: - name: Clean Storage Accounts - uses: azure/powershell@v1 + uses: azure/powershell@v2 with: azPSVersion: "latest" inlineScript: | From ca5701fd41212658736ea61bc7bf997eb1f7b233 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Fri, 29 Mar 2024 11:44:32 -0700 Subject: [PATCH 086/155] dotnet restore update --- .github/workflows/fhir-oss-ci-pipeline.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index 9d9b49efcc..118a9b1178 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -134,7 +134,7 @@ jobs: 8.x - name: Nuget Restore shell: bash - run: dotnet restore Microsoft.Health.Fhir.sln + run: dotnet restore ./Microsoft.Health.Fhir.sln - name: Download Build Artifacts from Job Cache uses: actions/download-artifact@v4 with: From 3f824014e6939d7f040d72d092f21d4fa7cab3a5 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Fri, 29 Mar 2024 12:25:07 -0700 Subject: [PATCH 087/155] Debugging restore --- .github/workflows/fhir-oss-ci-pipeline.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index 118a9b1178..73bc5b8473 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -134,7 +134,7 @@ jobs: 8.x - name: Nuget Restore shell: bash - run: dotnet restore ./Microsoft.Health.Fhir.sln + run: dotnet restore --verbosity detailed - name: Download Build Artifacts from Job Cache uses: actions/download-artifact@v4 with: From 027772f62fa2d7babe37737d8c9b7ae172ae6a70 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Fri, 29 Mar 2024 14:02:47 -0700 Subject: [PATCH 088/155] Added working path --- .github/actions/docker-add-tag/action.yml | 36 ++++++++++++++++++++++ .github/workflows/fhir-oss-ci-pipeline.yml | 28 ++++++++++++++--- 2 files changed, 59 insertions(+), 5 deletions(-) create mode 100644 .github/actions/docker-add-tag/action.yml diff --git a/.github/actions/docker-add-tag/action.yml b/.github/actions/docker-add-tag/action.yml new file mode 100644 index 0000000000..68d89b1835 --- /dev/null +++ b/.github/actions/docker-add-tag/action.yml @@ -0,0 +1,36 @@ +name: Docker Add Main tag +description: 'Adds the main tag to the images for all supported FHIR versions' + +inputs: + sourceTag: + description: 'The tag to apply to the images' + required: true + targetTag: + description: 'The tag to apply to the images' + required: true + fhirSchemaVersion: + description: 'The FHIR schema version to package' + required: true + azureContainerRegistry: + description: 'The Azure Container Registry to push the images to' + required: true + +runs: + using: 'composite' + steps: + - name: Azure Login + uses: azure/login@v2 + with: + client-id: ${{secrets.AZURE_CLIENT_ID}} + subscription-id: ${{secrets.AZURE_SUBSCRIPTION_ID}} + tenant-id: ${{secrets.AZURE_TENANT_ID}} + enable-AzPSSession: true + - name: Add Tag to Docker Images + shell: bash + run: | + az acr login -n ${{inputs.azureContainerRegistry}} + sourceImage="${{inputs.azureContainerRegistry}}/${{inputs.fhirSchemaVersion}}_fhir-server:${{inputs.sourceTag}}" + targetImage="${{inputs.azureContainerRegistry}}/${{inputs.fhirSchemaVersion}}_fhir-server:${{inputs.targetTag}}" + docker pull $sourceImage + docker tag $sourceImage $targetImage + docker push $targetImage diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index 73bc5b8473..ca4adcc8dc 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -105,11 +105,6 @@ jobs: # chmod +x $RUNNER_TEMP/sbom-tool # $RUNNER_TEMP/sbom-tool generate -b . -bc . -V Verbose -ps "Organization: Microsoft" -pv ${{needs.setup.outputs.majorMinorPatch}} -pn ${{needs.setup.outputs.informationalVersion}} - # - name: Docker Build - # uses: ./.github/actions/docker-build - # with: - # assemblySemFileVer: ${{needs.setup.outputs.semVer}} - - name: Upload a Build Artifact uses: actions/upload-artifact@v4 with: @@ -133,6 +128,7 @@ jobs: 6.x 8.x - name: Nuget Restore + working-directory: ${{github.workspace}}/fhirserver/fhirserver shell: bash run: dotnet restore --verbosity detailed - name: Download Build Artifacts from Job Cache @@ -147,11 +143,33 @@ jobs: outputPath: ${{env.outputPath}} buildConfiguration: ${{env.buildConfiguration}} semVer: ${{needs.setup.outputs.semVer}} + # - name: Docker Build + # uses: ./.github/actions/docker-build + # with: + # assemblySemFileVer: ${{needs.setup.outputs.semVer}} # runIntegrationTests: # runs-on: ubuntu-latest # needs : buildAndUnitTest # steps: + # - name: Checkout + # uses: actions/checkout@v4 + # with: + # fetch-depth: 0 # - name: Download Build Artifact for Testing # uses: actions/download-artifact@v4 # with: # path: artifacts + # - name: Install Latest .Net SDK + # uses: actions/setup-dotnet@v4 + # with: + # global-json-file: 'global.json' + # dotnet-version: | + # 6.x + # 8.x + # - name: Docker add main tag + # uses: ./.github/actions/docker-add-main-tag + # with: + # assemblySemFileVer: ${{needs.setup.outputs.semVer}} + # imageTag: ${{env.imageTag}} + # azureContainerRegistryName: ${{env.azureContainerRegistryName}} + # connectedServiceName: ${{env.connectedServiceName}} From 8ba73e41a3e6d6b14b907a43ebdcc1890159e4b7 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Fri, 29 Mar 2024 14:16:11 -0700 Subject: [PATCH 089/155] Fixed working path --- .github/workflows/fhir-oss-ci-pipeline.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index ca4adcc8dc..02bfb81f60 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -128,7 +128,7 @@ jobs: 6.x 8.x - name: Nuget Restore - working-directory: ${{github.workspace}}/fhirserver/fhirserver + working-directory: ${{github.workspace}}/fhirserver shell: bash run: dotnet restore --verbosity detailed - name: Download Build Artifacts from Job Cache From ba2f956dd54fc23e4c8ebb56d79c067e4eb531e7 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Fri, 29 Mar 2024 14:40:29 -0700 Subject: [PATCH 090/155] path update --- .github/workflows/fhir-oss-ci-pipeline.yml | 24 +++++++++++----------- 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index 02bfb81f60..4e9d948f46 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -53,18 +53,18 @@ jobs: id: version uses: ./.github/actions/update-semver - - name: Azure Login - uses: azure/login@v2 - with: - client-id: ${{secrets.AZURE_CLIENT_ID}} - subscription-id: ${{secrets.AZURE_SUBSCRIPTION_ID}} - tenant-id: ${{secrets.AZURE_TENANT_ID}} - enable-AzPSSession: true + # - name: Azure Login + # uses: azure/login@v2 + # with: + # client-id: ${{secrets.AZURE_CLIENT_ID}} + # subscription-id: ${{secrets.AZURE_SUBSCRIPTION_ID}} + # tenant-id: ${{secrets.AZURE_TENANT_ID}} + # enable-AzPSSession: true - - name: Clean Storage Accounts - uses: ./.github/actions/clean-storage-accounts - with: - environmentName: ${{vars.CIRESOURCEGROUPROOT}} + # - name: Clean Storage Accounts + # uses: ./.github/actions/clean-storage-accounts + # with: + # environmentName: ${{vars.CIRESOURCEGROUPROOT}} # - name: Cleanup Integration Test databases # uses: ./.github/actions/cleanup-integration-test-databases # with: @@ -128,7 +128,7 @@ jobs: 6.x 8.x - name: Nuget Restore - working-directory: ${{github.workspace}}/fhirserver + working-directory: ${{github.workspace}} shell: bash run: dotnet restore --verbosity detailed - name: Download Build Artifacts from Job Cache From 297aab5716b0e052266ba3b149facb64278808a3 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Fri, 29 Mar 2024 15:05:23 -0700 Subject: [PATCH 091/155] Added missing parameter --- .github/actions/package-web-build-artifacts/action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/actions/package-web-build-artifacts/action.yml b/.github/actions/package-web-build-artifacts/action.yml index 5b7d88f85d..15a25d7f67 100644 --- a/.github/actions/package-web-build-artifacts/action.yml +++ b/.github/actions/package-web-build-artifacts/action.yml @@ -28,5 +28,5 @@ runs: shell: bash run: | echo "Packaging web artifacts for FHIR schema version ${{inputs.fhirSchemaVersion}}" - dotnet publish ${{github.workspace}}/**/*Web.csproj --output ${{inputs.outputPath}}/web --configuration ${{inputs.buildConfiguration}} --version-suffix ${{inputs.semVer}} --no-build + dotnet publish ${{github.workspace}}/**/*Web.csproj --output ${{inputs.outputPath}}/web --configuration ${{inputs.buildConfiguration}} --version-suffix ${{inputs.semVer}} --no-build -p:PackageVersion=${{inputs.majorMinorPatch}} From 86d8890e0f9c129bc93e69e3c8a716c53466a39f Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Fri, 29 Mar 2024 15:22:38 -0700 Subject: [PATCH 092/155] Added additional needs --- .github/workflows/fhir-oss-ci-pipeline.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index 4e9d948f46..a4ad7b1dad 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -111,7 +111,7 @@ jobs: path: ${{env.outputPath}} packageBuildArtifacts: runs-on: ubuntu-latest - needs: buildAndUnitTest + needs: [setup, buildAndUnitTest] strategy: matrix: fhirSchemaVersion: ["Stu3", "R4", "R4B", "R5"] From 2f86d9283709d934f06e631ec1308af4acb0441f Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Fri, 29 Mar 2024 15:36:11 -0700 Subject: [PATCH 093/155] Temp change to same job for build and package --- .github/workflows/fhir-oss-ci-pipeline.yml | 79 ++++++++++++---------- 1 file changed, 45 insertions(+), 34 deletions(-) diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index a4ad7b1dad..0ae9e07819 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -72,6 +72,9 @@ jobs: buildAndUnitTest: runs-on: ubuntu-latest needs: setup + strategy: + matrix: + fhirSchemaVersion: ["Stu3", "R4", "R4B", "R5"] steps: - name: Checkout uses: actions/checkout@v4 @@ -94,6 +97,14 @@ jobs: fileVersion: ${{needs.setup.outputs.fileVersion}} informationalVersion: ${{needs.setup.outputs.informationalVersion}} majorMinorPatch: ${{needs.setup.outputs.majorMinorPatch}} + - name: Package Web Build Artifacts + uses: ./.github/actions/package-web-build-artifacts + with: + fhirschemaversion: ${{ matrix.fhirSchemaVersion }} + majorMinorPatch: ${{needs.setup.outputs.majorMinorPatch}} + outputPath: ${{env.outputPath}} + buildConfiguration: ${{env.buildConfiguration}} + semVer: ${{needs.setup.outputs.semVer}} # - name: Test # uses: ./.github/actions/dotnet-test # with: @@ -109,40 +120,40 @@ jobs: uses: actions/upload-artifact@v4 with: path: ${{env.outputPath}} - packageBuildArtifacts: - runs-on: ubuntu-latest - needs: [setup, buildAndUnitTest] - strategy: - matrix: - fhirSchemaVersion: ["Stu3", "R4", "R4B", "R5"] - steps: - - name: Checkout - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - name: Install Latest .Net SDK - uses: actions/setup-dotnet@v4 - with: - global-json-file: 'global.json' - dotnet-version: | - 6.x - 8.x - - name: Nuget Restore - working-directory: ${{github.workspace}} - shell: bash - run: dotnet restore --verbosity detailed - - name: Download Build Artifacts from Job Cache - uses: actions/download-artifact@v4 - with: - path: artifacts - - name: Package Web Build Artifacts - uses: ./.github/actions/package-web-build-artifacts - with: - fhirschemaversion: ${{ matrix.fhirSchemaVersion }} - majorMinorPatch: ${{needs.setup.outputs.majorMinorPatch}} - outputPath: ${{env.outputPath}} - buildConfiguration: ${{env.buildConfiguration}} - semVer: ${{needs.setup.outputs.semVer}} + # packageBuildArtifacts: + # runs-on: ubuntu-latest + # needs: [setup, buildAndUnitTest] + # strategy: + # matrix: + # fhirSchemaVersion: ["Stu3", "R4", "R4B", "R5"] + # steps: + # - name: Checkout + # uses: actions/checkout@v4 + # with: + # fetch-depth: 0 + # - name: Install Latest .Net SDK + # uses: actions/setup-dotnet@v4 + # with: + # global-json-file: 'global.json' + # dotnet-version: | + # 6.x + # 8.x + # - name: Nuget Restore + # working-directory: ${{github.workspace}} + # shell: bash + # run: dotnet restore --verbosity detailed + # - name: Download Build Artifacts from Job Cache + # uses: actions/download-artifact@v4 + # with: + # path: artifacts + # - name: Package Web Build Artifacts + # uses: ./.github/actions/package-web-build-artifacts + # with: + # fhirschemaversion: ${{ matrix.fhirSchemaVersion }} + # majorMinorPatch: ${{needs.setup.outputs.majorMinorPatch}} + # outputPath: ${{env.outputPath}} + # buildConfiguration: ${{env.buildConfiguration}} + # semVer: ${{needs.setup.outputs.semVer}} # - name: Docker Build # uses: ./.github/actions/docker-build # with: From 8cb3d4282b734009d695e5256cc8781a34c6aa3e Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Mon, 1 Apr 2024 09:27:56 -0700 Subject: [PATCH 094/155] Put in explicit path for web projects --- .github/actions/package-web-build-artifacts/action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/actions/package-web-build-artifacts/action.yml b/.github/actions/package-web-build-artifacts/action.yml index 15a25d7f67..0c66ad85e8 100644 --- a/.github/actions/package-web-build-artifacts/action.yml +++ b/.github/actions/package-web-build-artifacts/action.yml @@ -28,5 +28,5 @@ runs: shell: bash run: | echo "Packaging web artifacts for FHIR schema version ${{inputs.fhirSchemaVersion}}" - dotnet publish ${{github.workspace}}/**/*Web.csproj --output ${{inputs.outputPath}}/web --configuration ${{inputs.buildConfiguration}} --version-suffix ${{inputs.semVer}} --no-build -p:PackageVersion=${{inputs.majorMinorPatch}} + dotnet publish ${{github.workspace}}/src/Microsoft.Health.Fhir.${{inputs.fhirSchemaVersion}}.Web/Microsoft.Health.Fhir.${{inputs.fhirSchemaVersion}}.Web.csproj --output ${{inputs.outputPath}}/web --configuration ${{inputs.buildConfiguration}} --version-suffix ${{inputs.semVer}} --no-build -p:PackageVersion=${{inputs.majorMinorPatch}} From 3ec269ad0eb32cd05ccaefa024c9a14608d6faef Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Mon, 1 Apr 2024 09:41:57 -0700 Subject: [PATCH 095/155] Explicit dotnet version package --- .../actions/package-web-build-artifacts/action.yml | 5 ++++- .github/workflows/fhir-oss-ci-pipeline.yml | 12 +++++++++++- 2 files changed, 15 insertions(+), 2 deletions(-) diff --git a/.github/actions/package-web-build-artifacts/action.yml b/.github/actions/package-web-build-artifacts/action.yml index 0c66ad85e8..fd6bbcc214 100644 --- a/.github/actions/package-web-build-artifacts/action.yml +++ b/.github/actions/package-web-build-artifacts/action.yml @@ -16,6 +16,9 @@ inputs: semVer: description: 'The SemVer to use' required: true + dotnetVersion: + description: 'The version of dotnet to use' + required: true runs: using: 'composite' steps: @@ -28,5 +31,5 @@ runs: shell: bash run: | echo "Packaging web artifacts for FHIR schema version ${{inputs.fhirSchemaVersion}}" - dotnet publish ${{github.workspace}}/src/Microsoft.Health.Fhir.${{inputs.fhirSchemaVersion}}.Web/Microsoft.Health.Fhir.${{inputs.fhirSchemaVersion}}.Web.csproj --output ${{inputs.outputPath}}/web --configuration ${{inputs.buildConfiguration}} --version-suffix ${{inputs.semVer}} --no-build -p:PackageVersion=${{inputs.majorMinorPatch}} + dotnet publish ${{github.workspace}}/src/Microsoft.Health.Fhir.${{inputs.fhirSchemaVersion}}.Web/Microsoft.Health.Fhir.${{inputs.fhirSchemaVersion}}.Web.csproj --output ${{inputs.outputPath}}/web --configuration ${{inputs.buildConfiguration}} --version-suffix ${{inputs.semVer}} --no-build -p:PackageVersion=${{inputs.majorMinorPatch}} -f ${{inputs.dotnetVersion}} diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index 0ae9e07819..bf3b292ef7 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -97,7 +97,7 @@ jobs: fileVersion: ${{needs.setup.outputs.fileVersion}} informationalVersion: ${{needs.setup.outputs.informationalVersion}} majorMinorPatch: ${{needs.setup.outputs.majorMinorPatch}} - - name: Package Web Build Artifacts + - name: Package Web Build Artifacts 6.x uses: ./.github/actions/package-web-build-artifacts with: fhirschemaversion: ${{ matrix.fhirSchemaVersion }} @@ -105,6 +105,16 @@ jobs: outputPath: ${{env.outputPath}} buildConfiguration: ${{env.buildConfiguration}} semVer: ${{needs.setup.outputs.semVer}} + dotnetVersion: 6.x + - name: Package Web Build Artifacts 8.x + uses: ./.github/actions/package-web-build-artifacts + with: + fhirschemaversion: ${{ matrix.fhirSchemaVersion }} + majorMinorPatch: ${{needs.setup.outputs.majorMinorPatch}} + outputPath: ${{env.outputPath}} + buildConfiguration: ${{env.buildConfiguration}} + semVer: ${{needs.setup.outputs.semVer}} + dotnetVersion: 8.x # - name: Test # uses: ./.github/actions/dotnet-test # with: From f70e55b81cabfca6ba06c5feedd1f3ed4d70a410 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Mon, 1 Apr 2024 10:18:53 -0700 Subject: [PATCH 096/155] Used wrong dotnet version modifiers --- .github/workflows/fhir-oss-ci-pipeline.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index bf3b292ef7..9d1134783c 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -105,7 +105,7 @@ jobs: outputPath: ${{env.outputPath}} buildConfiguration: ${{env.buildConfiguration}} semVer: ${{needs.setup.outputs.semVer}} - dotnetVersion: 6.x + dotnetVersion: net6.0 - name: Package Web Build Artifacts 8.x uses: ./.github/actions/package-web-build-artifacts with: @@ -114,7 +114,7 @@ jobs: outputPath: ${{env.outputPath}} buildConfiguration: ${{env.buildConfiguration}} semVer: ${{needs.setup.outputs.semVer}} - dotnetVersion: 8.x + dotnetVersion: net8.0 # - name: Test # uses: ./.github/actions/dotnet-test # with: From b29fa14c327c66fc6a0fc1bf9903fd21b4352eec Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Mon, 1 Apr 2024 13:05:46 -0700 Subject: [PATCH 097/155] Updated web packaging to only target latest dotnet 8 --- .../actions/package-web-build-artifacts/action.yml | 5 +---- .github/workflows/fhir-oss-ci-pipeline.yml | 13 ++----------- 2 files changed, 3 insertions(+), 15 deletions(-) diff --git a/.github/actions/package-web-build-artifacts/action.yml b/.github/actions/package-web-build-artifacts/action.yml index fd6bbcc214..1d8f4da74c 100644 --- a/.github/actions/package-web-build-artifacts/action.yml +++ b/.github/actions/package-web-build-artifacts/action.yml @@ -16,9 +16,6 @@ inputs: semVer: description: 'The SemVer to use' required: true - dotnetVersion: - description: 'The version of dotnet to use' - required: true runs: using: 'composite' steps: @@ -31,5 +28,5 @@ runs: shell: bash run: | echo "Packaging web artifacts for FHIR schema version ${{inputs.fhirSchemaVersion}}" - dotnet publish ${{github.workspace}}/src/Microsoft.Health.Fhir.${{inputs.fhirSchemaVersion}}.Web/Microsoft.Health.Fhir.${{inputs.fhirSchemaVersion}}.Web.csproj --output ${{inputs.outputPath}}/web --configuration ${{inputs.buildConfiguration}} --version-suffix ${{inputs.semVer}} --no-build -p:PackageVersion=${{inputs.majorMinorPatch}} -f ${{inputs.dotnetVersion}} + dotnet publish ${{github.workspace}}/src/Microsoft.Health.Fhir.${{inputs.fhirSchemaVersion}}.Web/Microsoft.Health.Fhir.${{inputs.fhirSchemaVersion}}.Web.csproj --output ${{inputs.outputPath}}/web --configuration ${{inputs.buildConfiguration}} --version-suffix ${{inputs.semVer}} --no-build -p:PackageVersion=${{inputs.majorMinorPatch}} -f ${{env.defaultDotNetVersion}} diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index 9d1134783c..614d1f31e0 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -21,6 +21,7 @@ env: composeLocation: build/docker/docker-compose.yaml imageTag: ${{github.run_number}} outputPath: ${{github.workspace}}/artifacts + defaultDotNetVersion: net8.0 jobs: setup: @@ -97,7 +98,7 @@ jobs: fileVersion: ${{needs.setup.outputs.fileVersion}} informationalVersion: ${{needs.setup.outputs.informationalVersion}} majorMinorPatch: ${{needs.setup.outputs.majorMinorPatch}} - - name: Package Web Build Artifacts 6.x + - name: Package Web Build Artifacts uses: ./.github/actions/package-web-build-artifacts with: fhirschemaversion: ${{ matrix.fhirSchemaVersion }} @@ -105,16 +106,6 @@ jobs: outputPath: ${{env.outputPath}} buildConfiguration: ${{env.buildConfiguration}} semVer: ${{needs.setup.outputs.semVer}} - dotnetVersion: net6.0 - - name: Package Web Build Artifacts 8.x - uses: ./.github/actions/package-web-build-artifacts - with: - fhirschemaversion: ${{ matrix.fhirSchemaVersion }} - majorMinorPatch: ${{needs.setup.outputs.majorMinorPatch}} - outputPath: ${{env.outputPath}} - buildConfiguration: ${{env.buildConfiguration}} - semVer: ${{needs.setup.outputs.semVer}} - dotnetVersion: net8.0 # - name: Test # uses: ./.github/actions/dotnet-test # with: From 5d33dee027303385a86f349976bb57b1dc6fe241 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Mon, 1 Apr 2024 13:42:17 -0700 Subject: [PATCH 098/155] Testing packaging back into its own job. --- .github/workflows/fhir-oss-ci-pipeline.yml | 76 ++++++++++------------ 1 file changed, 34 insertions(+), 42 deletions(-) diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index 614d1f31e0..36007e88eb 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -98,14 +98,6 @@ jobs: fileVersion: ${{needs.setup.outputs.fileVersion}} informationalVersion: ${{needs.setup.outputs.informationalVersion}} majorMinorPatch: ${{needs.setup.outputs.majorMinorPatch}} - - name: Package Web Build Artifacts - uses: ./.github/actions/package-web-build-artifacts - with: - fhirschemaversion: ${{ matrix.fhirSchemaVersion }} - majorMinorPatch: ${{needs.setup.outputs.majorMinorPatch}} - outputPath: ${{env.outputPath}} - buildConfiguration: ${{env.buildConfiguration}} - semVer: ${{needs.setup.outputs.semVer}} # - name: Test # uses: ./.github/actions/dotnet-test # with: @@ -121,40 +113,40 @@ jobs: uses: actions/upload-artifact@v4 with: path: ${{env.outputPath}} - # packageBuildArtifacts: - # runs-on: ubuntu-latest - # needs: [setup, buildAndUnitTest] - # strategy: - # matrix: - # fhirSchemaVersion: ["Stu3", "R4", "R4B", "R5"] - # steps: - # - name: Checkout - # uses: actions/checkout@v4 - # with: - # fetch-depth: 0 - # - name: Install Latest .Net SDK - # uses: actions/setup-dotnet@v4 - # with: - # global-json-file: 'global.json' - # dotnet-version: | - # 6.x - # 8.x - # - name: Nuget Restore - # working-directory: ${{github.workspace}} - # shell: bash - # run: dotnet restore --verbosity detailed - # - name: Download Build Artifacts from Job Cache - # uses: actions/download-artifact@v4 - # with: - # path: artifacts - # - name: Package Web Build Artifacts - # uses: ./.github/actions/package-web-build-artifacts - # with: - # fhirschemaversion: ${{ matrix.fhirSchemaVersion }} - # majorMinorPatch: ${{needs.setup.outputs.majorMinorPatch}} - # outputPath: ${{env.outputPath}} - # buildConfiguration: ${{env.buildConfiguration}} - # semVer: ${{needs.setup.outputs.semVer}} + packageBuildArtifacts: + runs-on: ubuntu-latest + needs: [setup, buildAndUnitTest] + strategy: + matrix: + fhirSchemaVersion: ["Stu3", "R4", "R4B", "R5"] + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + - name: Install Latest .Net SDK + uses: actions/setup-dotnet@v4 + with: + global-json-file: 'global.json' + dotnet-version: | + 6.x + 8.x + - name: Nuget Restore + working-directory: ${{github.workspace}} + shell: bash + run: dotnet restore --verbosity detailed + - name: Download Build Artifacts from Job Cache + uses: actions/download-artifact@v4 + with: + path: artifacts + - name: Package Web Build Artifacts + uses: ./.github/actions/package-web-build-artifacts + with: + fhirschemaversion: ${{ matrix.fhirSchemaVersion }} + majorMinorPatch: ${{needs.setup.outputs.majorMinorPatch}} + outputPath: ${{env.outputPath}} + buildConfiguration: ${{env.buildConfiguration}} + semVer: ${{needs.setup.outputs.semVer}} # - name: Docker Build # uses: ./.github/actions/docker-build # with: From a7fa0ae64e8af6b7bfaeb81655b40243e151417f Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Mon, 1 Apr 2024 13:53:53 -0700 Subject: [PATCH 099/155] Remove no longer needed matrix --- .github/workflows/fhir-oss-ci-pipeline.yml | 3 --- 1 file changed, 3 deletions(-) diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index 36007e88eb..80993996e2 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -73,9 +73,6 @@ jobs: buildAndUnitTest: runs-on: ubuntu-latest needs: setup - strategy: - matrix: - fhirSchemaVersion: ["Stu3", "R4", "R4B", "R5"] steps: - name: Checkout uses: actions/checkout@v4 From 16c0544efd302dbc8c542c97070cda6ca7932522 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Tue, 2 Apr 2024 10:38:14 -0700 Subject: [PATCH 100/155] Migrated to primary yml file for uploads. --- .../package-web-build-artifacts/action.yml | 12 ++----- .github/workflows/fhir-oss-ci-pipeline.yml | 35 ++++++++++++++++++- 2 files changed, 37 insertions(+), 10 deletions(-) diff --git a/.github/actions/package-web-build-artifacts/action.yml b/.github/actions/package-web-build-artifacts/action.yml index 1d8f4da74c..54b7325689 100644 --- a/.github/actions/package-web-build-artifacts/action.yml +++ b/.github/actions/package-web-build-artifacts/action.yml @@ -19,14 +19,8 @@ inputs: runs: using: 'composite' steps: - - name: Create Nuget packages + - name: Publish Web Artifacts shell: bash run: | - echo "Creating Nuget packages for FHIR schema version ${{inputs.fhirSchemaVersion}}" - dotnet pack ${{github.workspace}} --output ${{inputs.outputPath}}/nupkgs --no-build --configuration=${{inputs.buildConfiguration}} -p:PackageVersion=${{inputs.majorMinorPatch}} - - name: Package Web Artifacts - shell: bash - run: | - echo "Packaging web artifacts for FHIR schema version ${{inputs.fhirSchemaVersion}}" - dotnet publish ${{github.workspace}}/src/Microsoft.Health.Fhir.${{inputs.fhirSchemaVersion}}.Web/Microsoft.Health.Fhir.${{inputs.fhirSchemaVersion}}.Web.csproj --output ${{inputs.outputPath}}/web --configuration ${{inputs.buildConfiguration}} --version-suffix ${{inputs.semVer}} --no-build -p:PackageVersion=${{inputs.majorMinorPatch}} -f ${{env.defaultDotNetVersion}} - + echo "Publishing web artifacts for FHIR schema version ${{inputs.fhirSchemaVersion}}" + dotnet publish ${{github.workspace}}/src/Microsoft.Health.Fhir.${{inputs.fhirSchemaVersion}}.Web/Microsoft.Health.Fhir.${{inputs.fhirSchemaVersion}}.Web.csproj --output ${{inputs.outputPath}}/deploy --configuration ${{inputs.buildConfiguration}} --version-suffix ${{inputs.semVer}} --no-build -p:PackageVersion=${{inputs.majorMinorPatch}} -f ${{env.defaultDotNetVersion}} diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index 80993996e2..e5b1e31272 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -110,6 +110,39 @@ jobs: uses: actions/upload-artifact@v4 with: path: ${{env.outputPath}} + - name: Create Nuget packages + shell: bash + run: | + echo "Creating Nuget packages for FHIR schema version ${{inputs.fhirSchemaVersion}}" + dotnet pack ${{github.workspace}} --output ${{inputs.outputPath}}/nupkgs --no-build --configuration=${{inputs.buildConfiguration}} -p:PackageVersion=${{inputs.majorMinorPatch}} + - name: Upload Nuget Packages + uses: actions/upload-artifact@v4 + with: + path: ${{env.outputPath}}/nupkgs + - name: Publish testauthenvironment.json to deploy directory + shell: bash + run: | + echo "Publishing testauthenvironment.json to deploy directory" + dotnet publish ${{github.workspace}}/testauthenvironment.json --output ${{inputs.outputPath}}/deploy --configuration ${{inputs.buildConfiguration}} --version-suffix ${{inputs.semVer}} --no-build -p:PackageVersion=${{inputs.majorMinorPatch}} -f ${{env.defaultDotNetVersion}} --force + - name: Publish global.json to deploy directory + shell: bash + run: | + echo "Publishing global.json to deploy directory" + dotnet publish ${{github.workspace}}/global.json --output ${{inputs.outputPath}}/deploy --configuration ${{inputs.buildConfiguration}} --version-suffix ${{inputs.semVer}} --no-build -p:PackageVersion=${{inputs.majorMinorPatch}} -f ${{env.defaultDotNetVersion}} --force + - name: Publish test configuration jsons to deploy directory + shell: bash + run: | + echo "Publishing test configuration jsons to deploy directory" + dotnet publish ${{github.workspace}}/test/Configuration --output ${{inputs.outputPath}}/deploy --configuration ${{inputs.buildConfiguration}} --version-suffix ${{inputs.semVer}} --no-build -p:PackageVersion=${{inputs.majorMinorPatch}} -f ${{env.defaultDotNetVersion}} --force + - name: Publish release directory to deploy directory + shell: bash + run: | + echo "Publishing release directory to deploy directory" + dotnet publish ${{github.workspace}}/release --output ${{inputs.outputPath}}/deploy --configuration ${{inputs.buildConfiguration}} --version-suffix ${{inputs.semVer}} --no-build -p:PackageVersion=${{inputs.majorMinorPatch}} -f ${{env.defaultDotNetVersion}} --force + - name: Upload deploy directory + uses: actions/upload-artifact@v4 + with: + path: ${{env.outputPath}}/deploy packageBuildArtifacts: runs-on: ubuntu-latest needs: [setup, buildAndUnitTest] @@ -136,7 +169,7 @@ jobs: uses: actions/download-artifact@v4 with: path: artifacts - - name: Package Web Build Artifacts + - name: Package and Publish Artifacts uses: ./.github/actions/package-web-build-artifacts with: fhirschemaversion: ${{ matrix.fhirSchemaVersion }} From d2c5afdc29d9689a8b3b1e999fa81195846fdbc4 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Tue, 2 Apr 2024 11:45:56 -0700 Subject: [PATCH 101/155] Fixed properties after moving --- .github/workflows/fhir-oss-ci-pipeline.yml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index e5b1e31272..9a5db76274 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -113,8 +113,8 @@ jobs: - name: Create Nuget packages shell: bash run: | - echo "Creating Nuget packages for FHIR schema version ${{inputs.fhirSchemaVersion}}" - dotnet pack ${{github.workspace}} --output ${{inputs.outputPath}}/nupkgs --no-build --configuration=${{inputs.buildConfiguration}} -p:PackageVersion=${{inputs.majorMinorPatch}} + echo "Creating Nuget packages" + dotnet pack ${{github.workspace}} --output ${{env.outputPath}}/nupkgs --no-build --configuration=${{env.buildConfiguration}} -p:PackageVersion=${{needs.setup.outputs.majorMinorPatch}} - name: Upload Nuget Packages uses: actions/upload-artifact@v4 with: @@ -123,22 +123,22 @@ jobs: shell: bash run: | echo "Publishing testauthenvironment.json to deploy directory" - dotnet publish ${{github.workspace}}/testauthenvironment.json --output ${{inputs.outputPath}}/deploy --configuration ${{inputs.buildConfiguration}} --version-suffix ${{inputs.semVer}} --no-build -p:PackageVersion=${{inputs.majorMinorPatch}} -f ${{env.defaultDotNetVersion}} --force + dotnet publish ${{github.workspace}}/testauthenvironment.json --output ${{env.outputPath}}/deploy --configuration ${{env.buildConfiguration}} --version-suffix ${{needs.setup.outputs.semVer}} --no-build -p:PackageVersion=${{needs.setup.outputs.majorMinorPatch}} -f ${{env.defaultDotNetVersion}} --force - name: Publish global.json to deploy directory shell: bash run: | echo "Publishing global.json to deploy directory" - dotnet publish ${{github.workspace}}/global.json --output ${{inputs.outputPath}}/deploy --configuration ${{inputs.buildConfiguration}} --version-suffix ${{inputs.semVer}} --no-build -p:PackageVersion=${{inputs.majorMinorPatch}} -f ${{env.defaultDotNetVersion}} --force + dotnet publish ${{github.workspace}}/global.json --output ${{env.outputPath}}/deploy --configuration ${{env.buildConfiguration}} --version-suffix ${{needs.setup.outputs.semVer}} --no-build -p:PackageVersion=${{needs.setup.outputs.majorMinorPatch}} -f ${{env.defaultDotNetVersion}} --force - name: Publish test configuration jsons to deploy directory shell: bash run: | echo "Publishing test configuration jsons to deploy directory" - dotnet publish ${{github.workspace}}/test/Configuration --output ${{inputs.outputPath}}/deploy --configuration ${{inputs.buildConfiguration}} --version-suffix ${{inputs.semVer}} --no-build -p:PackageVersion=${{inputs.majorMinorPatch}} -f ${{env.defaultDotNetVersion}} --force + dotnet publish ${{github.workspace}}/test/Configuration --output ${{env.outputPath}}/deploy --configuration ${{env.buildConfiguration}} --version-suffix ${{needs.setup.outputs.semVer}} --no-build -p:PackageVersion=${{needs.setup.outputs.majorMinorPatch}} -f ${{env.defaultDotNetVersion}} --force - name: Publish release directory to deploy directory shell: bash run: | echo "Publishing release directory to deploy directory" - dotnet publish ${{github.workspace}}/release --output ${{inputs.outputPath}}/deploy --configuration ${{inputs.buildConfiguration}} --version-suffix ${{inputs.semVer}} --no-build -p:PackageVersion=${{inputs.majorMinorPatch}} -f ${{env.defaultDotNetVersion}} --force + dotnet publish ${{github.workspace}}/release --output ${{env.outputPath}}/deploy --configuration ${{env.buildConfiguration}} --version-suffix ${{needs.setup.outputs.semVer}} --no-build -p:PackageVersion=${{needs.setup.outputs.majorMinorPatch}} -f ${{env.defaultDotNetVersion}} --force - name: Upload deploy directory uses: actions/upload-artifact@v4 with: From 39ced0b380c5d7047033abcb7bf1f3ed54d66b4b Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Tue, 2 Apr 2024 12:07:07 -0700 Subject: [PATCH 102/155] Added name for each artifact upload as it was defaulting to artifact for all of them. --- .github/workflows/fhir-oss-ci-pipeline.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index 9a5db76274..ea548098b4 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -118,6 +118,7 @@ jobs: - name: Upload Nuget Packages uses: actions/upload-artifact@v4 with: + name: nupkgs path: ${{env.outputPath}}/nupkgs - name: Publish testauthenvironment.json to deploy directory shell: bash @@ -142,6 +143,7 @@ jobs: - name: Upload deploy directory uses: actions/upload-artifact@v4 with: + name: deploy path: ${{env.outputPath}}/deploy packageBuildArtifacts: runs-on: ubuntu-latest From 7223fd563ab5cd4f2ce04a7280beece13c9cc872 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Tue, 2 Apr 2024 13:00:19 -0700 Subject: [PATCH 103/155] Updated to copy for deploy folder. --- .github/workflows/fhir-oss-ci-pipeline.yml | 47 +++++++++++++++------- 1 file changed, 33 insertions(+), 14 deletions(-) diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index ea548098b4..36cd198037 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -109,42 +109,59 @@ jobs: - name: Upload a Build Artifact uses: actions/upload-artifact@v4 with: + name: build path: ${{env.outputPath}} - name: Create Nuget packages shell: bash run: | echo "Creating Nuget packages" dotnet pack ${{github.workspace}} --output ${{env.outputPath}}/nupkgs --no-build --configuration=${{env.buildConfiguration}} -p:PackageVersion=${{needs.setup.outputs.majorMinorPatch}} + - name: Upload Nuget Packages uses: actions/upload-artifact@v4 with: - name: nupkgs + name: nuget path: ${{env.outputPath}}/nupkgs - - name: Publish testauthenvironment.json to deploy directory + + - name: samples + shell: bash + run: | + echo "Copying samples to deploy directory" + cp -r ${{github.workspace}}/samples ${{env.outputPath}}/deploy + - name: Copying testauthenvironment.json to deploy directory shell: bash run: | - echo "Publishing testauthenvironment.json to deploy directory" - dotnet publish ${{github.workspace}}/testauthenvironment.json --output ${{env.outputPath}}/deploy --configuration ${{env.buildConfiguration}} --version-suffix ${{needs.setup.outputs.semVer}} --no-build -p:PackageVersion=${{needs.setup.outputs.majorMinorPatch}} -f ${{env.defaultDotNetVersion}} --force - - name: Publish global.json to deploy directory + echo "Copying testauthenvironment.json to deploy directory" + cp ${{github.workspace}}/testauthenvironment.json ${{env.outputPath}}/deploy/ + - name: Copying global.json to deploy directory shell: bash run: | - echo "Publishing global.json to deploy directory" - dotnet publish ${{github.workspace}}/global.json --output ${{env.outputPath}}/deploy --configuration ${{env.buildConfiguration}} --version-suffix ${{needs.setup.outputs.semVer}} --no-build -p:PackageVersion=${{needs.setup.outputs.majorMinorPatch}} -f ${{env.defaultDotNetVersion}} --force - - name: Publish test configuration jsons to deploy directory + echo "Copying global.json to deploy directory" + cp ${{github.workspace}}/global.json ${{env.outputPath}}/deploy/ + + - name: Copying test configuration json to deploy directory shell: bash run: | - echo "Publishing test configuration jsons to deploy directory" - dotnet publish ${{github.workspace}}/test/Configuration --output ${{env.outputPath}}/deploy --configuration ${{env.buildConfiguration}} --version-suffix ${{needs.setup.outputs.semVer}} --no-build -p:PackageVersion=${{needs.setup.outputs.majorMinorPatch}} -f ${{env.defaultDotNetVersion}} --force - - name: Publish release directory to deploy directory + echo "Copying test configuration json to deploy directory" + cp ${{github.workspace}}/test/Configuration/testconfiguration.json ${{env.outputPath}}/deploy/ + + - name: Copying release directory to deploy directory shell: bash run: | - echo "Publishing release directory to deploy directory" - dotnet publish ${{github.workspace}}/release --output ${{env.outputPath}}/deploy --configuration ${{env.buildConfiguration}} --version-suffix ${{needs.setup.outputs.semVer}} --no-build -p:PackageVersion=${{needs.setup.outputs.majorMinorPatch}} -f ${{env.defaultDotNetVersion}} --force + echo "Copying release directory to deploy directory" + cp -r ${{github.workspace}}/release ${{env.outputPath}}/deploy + - name: Upload deploy directory uses: actions/upload-artifact@v4 with: name: deploy path: ${{env.outputPath}}/deploy + + # - name: Upload Symbols + # uses: actions/ipload-artifact@v4 + # with: + # name: symbols + # path: ${{env.outputPath}}/bin/${{env.buildConfiguration}}/net5.0/publish packageBuildArtifacts: runs-on: ubuntu-latest needs: [setup, buildAndUnitTest] @@ -167,10 +184,12 @@ jobs: working-directory: ${{github.workspace}} shell: bash run: dotnet restore --verbosity detailed + - name: Download Build Artifacts from Job Cache uses: actions/download-artifact@v4 with: - path: artifacts + path: artifacts/build + - name: Package and Publish Artifacts uses: ./.github/actions/package-web-build-artifacts with: From b4dc30aee8988860106ee077b6c261f6f20b3661 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Tue, 2 Apr 2024 16:42:56 -0700 Subject: [PATCH 104/155] Added maxcpu 1 to fix file lock issue during build. --- .github/actions/dotnet-build/action.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/actions/dotnet-build/action.yml b/.github/actions/dotnet-build/action.yml index 4adc3f4bca..91251854cb 100644 --- a/.github/actions/dotnet-build/action.yml +++ b/.github/actions/dotnet-build/action.yml @@ -22,4 +22,5 @@ runs: steps: - name: Build shell: bash - run: dotnet build --configuration ${{inputs.buildConfiguration}} -p:ContinuousIntegrationBuild=true -p:AssemblyVersion="${{inputs.assemblyVersion}}" -p:FileVersion="${{inputs.fileVersion}}" -p:InformationalVersion="${{inputs.informationalVersion}}" -p:Version="${{inputs.majorMinorPatch}}" -warnaserror + run: dotnet build -maxcpucount:1 --configuration ${{inputs.buildConfiguration}} -p:ContinuousIntegrationBuild=true -p:AssemblyVersion="${{inputs.assemblyVersion}}" -p:FileVersion="${{inputs.fileVersion}}" -p:InformationalVersion="${{inputs.informationalVersion}}" -p:Version="${{inputs.majorMinorPatch}}" -warnaserror + # using max cpu count of 1 to avoid file usage conflicts From b972e599481dcc7977ab6735ff0aff65109535ef Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Wed, 3 Apr 2024 09:18:38 -0700 Subject: [PATCH 105/155] Removing package version --- .github/actions/package-web-build-artifacts/action.yml | 2 +- .github/workflows/fhir-oss-ci-pipeline.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/actions/package-web-build-artifacts/action.yml b/.github/actions/package-web-build-artifacts/action.yml index 54b7325689..09039162b7 100644 --- a/.github/actions/package-web-build-artifacts/action.yml +++ b/.github/actions/package-web-build-artifacts/action.yml @@ -23,4 +23,4 @@ runs: shell: bash run: | echo "Publishing web artifacts for FHIR schema version ${{inputs.fhirSchemaVersion}}" - dotnet publish ${{github.workspace}}/src/Microsoft.Health.Fhir.${{inputs.fhirSchemaVersion}}.Web/Microsoft.Health.Fhir.${{inputs.fhirSchemaVersion}}.Web.csproj --output ${{inputs.outputPath}}/deploy --configuration ${{inputs.buildConfiguration}} --version-suffix ${{inputs.semVer}} --no-build -p:PackageVersion=${{inputs.majorMinorPatch}} -f ${{env.defaultDotNetVersion}} + dotnet publish ${{github.workspace}}/src/Microsoft.Health.Fhir.${{inputs.fhirSchemaVersion}}.Web/Microsoft.Health.Fhir.${{inputs.fhirSchemaVersion}}.Web.csproj --output ${{inputs.outputPath}}/deploy --configuration ${{inputs.buildConfiguration}} --version-suffix ${{inputs.semVer}} --no-build -f ${{env.defaultDotNetVersion}} diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index 36cd198037..61bce4d1e6 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -104,7 +104,7 @@ jobs: # run: | # curl -Lo $RUNNER_TEMP/sbom-tool https://github.com/microsoft/sbom-tool/releases/latest/download/sbom-tool-linux-x64 # chmod +x $RUNNER_TEMP/sbom-tool - # $RUNNER_TEMP/sbom-tool generate -b . -bc . -V Verbose -ps "Organization: Microsoft" -pv ${{needs.setup.outputs.majorMinorPatch}} -pn ${{needs.setup.outputs.informationalVersion}} + # $RUNNER_TEMP/sbom-tool generate -b ${{env.outputPath}} -bc . -V Verbose -ps "Organization: Microsoft" -pv ${{needs.setup.outputs.majorMinorPatch}} -pn ${{needs.setup.outputs.informationalVersion}} - name: Upload a Build Artifact uses: actions/upload-artifact@v4 From fb562b7d858a154995bfb77fe074562e31fc46ce Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Wed, 3 Apr 2024 14:39:01 -0700 Subject: [PATCH 106/155] Added in docker image create and symbols publishing. --- .github/actions/docker-build/action.yml | 5 +- .github/workflows/fhir-oss-ci-pipeline.yml | 110 ++++++++++++++------- 2 files changed, 73 insertions(+), 42 deletions(-) diff --git a/.github/actions/docker-build/action.yml b/.github/actions/docker-build/action.yml index 5e66230270..3d9f5ebf54 100644 --- a/.github/actions/docker-build/action.yml +++ b/.github/actions/docker-build/action.yml @@ -2,9 +2,6 @@ name: Docker Build description: 'Builds images for all supported FHIR versions' inputs: - tag: - description: 'The tag to apply to the images' - required: true fhirSchemaVersion: description: 'The FHIR schema version to package' required: true @@ -18,7 +15,7 @@ inputs: runs: using: 'composite' steps: - - name: Build and Push Docker Images + - name: Build Docker Image shell: bash run: | echo "Building and pushing Docker images for FHIR schema version ${{inputs.fhirSchemaVersion}}" diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index 61bce4d1e6..c797352bc4 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -128,11 +128,13 @@ jobs: run: | echo "Copying samples to deploy directory" cp -r ${{github.workspace}}/samples ${{env.outputPath}}/deploy + - name: Copying testauthenvironment.json to deploy directory shell: bash run: | echo "Copying testauthenvironment.json to deploy directory" cp ${{github.workspace}}/testauthenvironment.json ${{env.outputPath}}/deploy/ + - name: Copying global.json to deploy directory shell: bash run: | @@ -151,57 +153,89 @@ jobs: echo "Copying release directory to deploy directory" cp -r ${{github.workspace}}/release ${{env.outputPath}}/deploy - - name: Upload deploy directory - uses: actions/upload-artifact@v4 - with: - name: deploy - path: ${{env.outputPath}}/deploy + - name: Copying pdb files to symbols directory + shell: bash + run: | + echo "Copying pdb files to deploy symbols" + shopt -s globstar + cp -r ${{github.workspace}}/src/**/*.pdb ${{env.outputPath}}/symbols - # - name: Upload Symbols - # uses: actions/ipload-artifact@v4 - # with: - # name: symbols - # path: ${{env.outputPath}}/bin/${{env.buildConfiguration}}/net5.0/publish - packageBuildArtifacts: - runs-on: ubuntu-latest - needs: [setup, buildAndUnitTest] - strategy: - matrix: - fhirSchemaVersion: ["Stu3", "R4", "R4B", "R5"] - steps: - - name: Checkout - uses: actions/checkout@v4 + - name: Publish Stu3 Web Artifacts to deploy directory + uses: ./.github/actions/package-web-build-artifacts with: - fetch-depth: 0 - - name: Install Latest .Net SDK - uses: actions/setup-dotnet@v4 + fhirschemaversion: "Stu3" + majorMinorPatch: ${{needs.setup.outputs.majorMinorPatch}} + outputPath: ${{env.outputPath}} + buildConfiguration: ${{env.buildConfiguration}} + semVer: ${{needs.setup.outputs.semVer}} + + - name: Publish R4 Web Artifacts to deploy directory + uses: ./.github/actions/package-web-build-artifacts with: - global-json-file: 'global.json' - dotnet-version: | - 6.x - 8.x - - name: Nuget Restore - working-directory: ${{github.workspace}} - shell: bash - run: dotnet restore --verbosity detailed + fhirschemaversion: "R4" + majorMinorPatch: ${{needs.setup.outputs.majorMinorPatch}} + outputPath: ${{env.outputPath}} + buildConfiguration: ${{env.buildConfiguration}} + semVer: ${{needs.setup.outputs.semVer}} - - name: Download Build Artifacts from Job Cache - uses: actions/download-artifact@v4 + - name: Publish R4B Web Artifacts to deploy directory + uses: ./.github/actions/package-web-build-artifacts with: - path: artifacts/build + fhirschemaversion: "R4B" + majorMinorPatch: ${{needs.setup.outputs.majorMinorPatch}} + outputPath: ${{env.outputPath}} + buildConfiguration: ${{env.buildConfiguration}} + semVer: ${{needs.setup.outputs.semVer}} - - name: Package and Publish Artifacts + - name: Publish R5 Web Artifacts to deploy directory uses: ./.github/actions/package-web-build-artifacts with: - fhirschemaversion: ${{ matrix.fhirSchemaVersion }} + fhirschemaversion: "R5" majorMinorPatch: ${{needs.setup.outputs.majorMinorPatch}} outputPath: ${{env.outputPath}} buildConfiguration: ${{env.buildConfiguration}} semVer: ${{needs.setup.outputs.semVer}} - # - name: Docker Build - # uses: ./.github/actions/docker-build + + - name: Docker Build Stu3 Image + uses: ./.github/actions/docker-build + with: + assemblySemFileVer: ${{needs.setup.outputs.semVer}} + fhirSchemaVersion: "Stu3" + composeLocation: ${{env.composeLocation}} + + - name: Docker Build R4 Image + uses: ./.github/actions/docker-build + with: + assemblySemFileVer: ${{needs.setup.outputs.semVer}} + fhirSchemaVersion: "R4" + composeLocation: ${{env.composeLocation}} + + - name: Docker Build R4B Image + uses: ./.github/actions/docker-build + with: + assemblySemFileVer: ${{needs.setup.outputs.semVer}} + fhirSchemaVersion: "R4B" + composeLocation: ${{env.composeLocation}} + + - name: Docker Build R5 Image + uses: ./.github/actions/docker-build + with: + assemblySemFileVer: ${{needs.setup.outputs.semVer}} + fhirSchemaVersion: "R5" + composeLocation: ${{env.composeLocation}} + + - name: Upload deploy directory + uses: actions/upload-artifact@v4 + with: + name: deploy + path: ${{env.outputPath}}/deploy + + # - name: Upload Symbols + # uses: actions/upload-artifact@v4 # with: - # assemblySemFileVer: ${{needs.setup.outputs.semVer}} + # name: symbols + # path: ${{env.outputPath}}/bin/${{env.buildConfiguration}}/net5.0/publish + # runIntegrationTests: # runs-on: ubuntu-latest # needs : buildAndUnitTest From f3bbcc2e6f9b8f9f17cdf8edc08a6d603b5dcab8 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Wed, 3 Apr 2024 14:50:03 -0700 Subject: [PATCH 107/155] Commenting out symbols for now --- .github/workflows/fhir-oss-ci-pipeline.yml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index c797352bc4..f00b79a298 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -153,12 +153,12 @@ jobs: echo "Copying release directory to deploy directory" cp -r ${{github.workspace}}/release ${{env.outputPath}}/deploy - - name: Copying pdb files to symbols directory - shell: bash - run: | - echo "Copying pdb files to deploy symbols" - shopt -s globstar - cp -r ${{github.workspace}}/src/**/*.pdb ${{env.outputPath}}/symbols + # - name: Copying pdb files to symbols directory + # shell: bash + # run: | + # echo "Copying pdb files to deploy symbols" + # shopt -s globstar + # cp -r ${{github.workspace}}/src/**/*.pdb ${{env.outputPath}}/symbols - name: Publish Stu3 Web Artifacts to deploy directory uses: ./.github/actions/package-web-build-artifacts From 552102e4826840642e45c1fd378dbcbdbaa329cb Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Wed, 3 Apr 2024 17:05:15 -0700 Subject: [PATCH 108/155] Fixes --- .github/actions/package-web-build-artifacts/action.yml | 2 +- .github/workflows/fhir-oss-ci-pipeline.yml | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/actions/package-web-build-artifacts/action.yml b/.github/actions/package-web-build-artifacts/action.yml index 09039162b7..e1d33f694c 100644 --- a/.github/actions/package-web-build-artifacts/action.yml +++ b/.github/actions/package-web-build-artifacts/action.yml @@ -23,4 +23,4 @@ runs: shell: bash run: | echo "Publishing web artifacts for FHIR schema version ${{inputs.fhirSchemaVersion}}" - dotnet publish ${{github.workspace}}/src/Microsoft.Health.Fhir.${{inputs.fhirSchemaVersion}}.Web/Microsoft.Health.Fhir.${{inputs.fhirSchemaVersion}}.Web.csproj --output ${{inputs.outputPath}}/deploy --configuration ${{inputs.buildConfiguration}} --version-suffix ${{inputs.semVer}} --no-build -f ${{env.defaultDotNetVersion}} + # dotnet publish ${{github.workspace}}/src/Microsoft.Health.Fhir.${{inputs.fhirSchemaVersion}}.Web/Microsoft.Health.Fhir.${{inputs.fhirSchemaVersion}}.Web.csproj --output ${{inputs.outputPath}}/deploy/Microsoft.Health.Fhir.${{inputs.fhirSchemaVersion}}.Web --configuration ${{inputs.buildConfiguration}} --version-suffix ${{inputs.semVer}} --no-build -f ${{env.defaultDotNetVersion}} diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index f00b79a298..4306af386f 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -199,28 +199,28 @@ jobs: - name: Docker Build Stu3 Image uses: ./.github/actions/docker-build with: - assemblySemFileVer: ${{needs.setup.outputs.semVer}} + assemblyVersion: ${{needs.setup.outputs.semVer}} fhirSchemaVersion: "Stu3" composeLocation: ${{env.composeLocation}} - name: Docker Build R4 Image uses: ./.github/actions/docker-build with: - assemblySemFileVer: ${{needs.setup.outputs.semVer}} + assemblyVersion: ${{needs.setup.outputs.semVer}} fhirSchemaVersion: "R4" composeLocation: ${{env.composeLocation}} - name: Docker Build R4B Image uses: ./.github/actions/docker-build with: - assemblySemFileVer: ${{needs.setup.outputs.semVer}} + assemblyVersion: ${{needs.setup.outputs.semVer}} fhirSchemaVersion: "R4B" composeLocation: ${{env.composeLocation}} - name: Docker Build R5 Image uses: ./.github/actions/docker-build with: - assemblySemFileVer: ${{needs.setup.outputs.semVer}} + assemblyVersion: ${{needs.setup.outputs.semVer}} fhirSchemaVersion: "R5" composeLocation: ${{env.composeLocation}} From 92cbae7165d5dea0035def5ca0d2056dddd94a59 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Wed, 3 Apr 2024 17:28:10 -0700 Subject: [PATCH 109/155] Adding zip command --- .github/actions/package-web-build-artifacts/action.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/actions/package-web-build-artifacts/action.yml b/.github/actions/package-web-build-artifacts/action.yml index e1d33f694c..62cbebe807 100644 --- a/.github/actions/package-web-build-artifacts/action.yml +++ b/.github/actions/package-web-build-artifacts/action.yml @@ -23,4 +23,5 @@ runs: shell: bash run: | echo "Publishing web artifacts for FHIR schema version ${{inputs.fhirSchemaVersion}}" - # dotnet publish ${{github.workspace}}/src/Microsoft.Health.Fhir.${{inputs.fhirSchemaVersion}}.Web/Microsoft.Health.Fhir.${{inputs.fhirSchemaVersion}}.Web.csproj --output ${{inputs.outputPath}}/deploy/Microsoft.Health.Fhir.${{inputs.fhirSchemaVersion}}.Web --configuration ${{inputs.buildConfiguration}} --version-suffix ${{inputs.semVer}} --no-build -f ${{env.defaultDotNetVersion}} + dotnet publish ${{github.workspace}}/src/Microsoft.Health.Fhir.${{inputs.fhirSchemaVersion}}.Web/Microsoft.Health.Fhir.${{inputs.fhirSchemaVersion}}.Web.csproj --output ${{inputs.outputPath}}/deploy/Microsoft.Health.Fhir.${{inputs.fhirSchemaVersion}}.Web --configuration ${{inputs.buildConfiguration}} --version-suffix ${{inputs.semVer}} --no-build -f ${{env.defaultDotNetVersion}} + zip -r Microsoft.Health.Fhir.${{inputs.fhirSchemaVersion}}.Web.zip ${{inputs.outputPath}}/deploy/Microsoft.Health.Fhir.${{inputs.fhirSchemaVersion}}.Web From a33d79f4b1dbc7838e020c7f5f5aa32f6f82cd86 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Wed, 3 Apr 2024 18:19:07 -0700 Subject: [PATCH 110/155] Copy root docker yaml file --- .github/workflows/fhir-oss-ci-pipeline.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index 4306af386f..91159cc768 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -147,11 +147,11 @@ jobs: echo "Copying test configuration json to deploy directory" cp ${{github.workspace}}/test/Configuration/testconfiguration.json ${{env.outputPath}}/deploy/ - - name: Copying release directory to deploy directory + - name: Copying docker compose root file to deploy directory shell: bash run: | - echo "Copying release directory to deploy directory" - cp -r ${{github.workspace}}/release ${{env.outputPath}}/deploy + echo "Copying docker compose root file to deploy directory" + cp ${{github.workspace}}/docker-compose.yaml ${{env.outputPath}}/deploy # - name: Copying pdb files to symbols directory # shell: bash From f0c1e7703d7dcee8b3cc6859dd96b176760704ee Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Thu, 4 Apr 2024 09:30:27 -0700 Subject: [PATCH 111/155] Updated docker compose and web package --- .github/actions/docker-build/action.yml | 2 +- .github/actions/package-web-build-artifacts/action.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/actions/docker-build/action.yml b/.github/actions/docker-build/action.yml index 3d9f5ebf54..74a0468794 100644 --- a/.github/actions/docker-build/action.yml +++ b/.github/actions/docker-build/action.yml @@ -19,4 +19,4 @@ runs: shell: bash run: | echo "Building and pushing Docker images for FHIR schema version ${{inputs.fhirSchemaVersion}}" - docker-compose -f ${{inputs.composeLocation}} build -p ${{inputs.fhirSchemaVersion}} --build-arg FHIR_VERSION=${{inputs.fhirSchemaVersion}} --build-arg ASSEMBLY_VER=${{inputs.assemblyVersion}} + docker-compose build --build-arg FHIR_VERSION=${{inputs.fhirSchemaVersion}} --build-arg ASSEMBLY_VER=${{inputs.assemblyVersion}} diff --git a/.github/actions/package-web-build-artifacts/action.yml b/.github/actions/package-web-build-artifacts/action.yml index 62cbebe807..682417286f 100644 --- a/.github/actions/package-web-build-artifacts/action.yml +++ b/.github/actions/package-web-build-artifacts/action.yml @@ -24,4 +24,4 @@ runs: run: | echo "Publishing web artifacts for FHIR schema version ${{inputs.fhirSchemaVersion}}" dotnet publish ${{github.workspace}}/src/Microsoft.Health.Fhir.${{inputs.fhirSchemaVersion}}.Web/Microsoft.Health.Fhir.${{inputs.fhirSchemaVersion}}.Web.csproj --output ${{inputs.outputPath}}/deploy/Microsoft.Health.Fhir.${{inputs.fhirSchemaVersion}}.Web --configuration ${{inputs.buildConfiguration}} --version-suffix ${{inputs.semVer}} --no-build -f ${{env.defaultDotNetVersion}} - zip -r Microsoft.Health.Fhir.${{inputs.fhirSchemaVersion}}.Web.zip ${{inputs.outputPath}}/deploy/Microsoft.Health.Fhir.${{inputs.fhirSchemaVersion}}.Web + zip Microsoft.Health.Fhir.${{inputs.fhirSchemaVersion}}.Web.zip ${{inputs.outputPath}}/deploy/Microsoft.Health.Fhir.${{inputs.fhirSchemaVersion}}.Web From 19727dcb209e39a2ff4e5562c1b593b5996d89a1 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Thu, 4 Apr 2024 09:58:07 -0700 Subject: [PATCH 112/155] Updated created docker compose file from release --- .github/workflows/fhir-oss-ci-pipeline.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index 91159cc768..fba43cd34a 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -151,7 +151,7 @@ jobs: shell: bash run: | echo "Copying docker compose root file to deploy directory" - cp ${{github.workspace}}/docker-compose.yaml ${{env.outputPath}}/deploy + cp ${{github.workspace}}/release/docker-compose.yaml ${{env.outputPath}}/deploy # - name: Copying pdb files to symbols directory # shell: bash From fe1e3a0fa7eb6aa50131054a695de993ca857aed Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Thu, 4 Apr 2024 10:30:11 -0700 Subject: [PATCH 113/155] Updating working path for docker build --- .github/actions/docker-build/action.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/actions/docker-build/action.yml b/.github/actions/docker-build/action.yml index 74a0468794..f88b723d79 100644 --- a/.github/actions/docker-build/action.yml +++ b/.github/actions/docker-build/action.yml @@ -19,4 +19,5 @@ runs: shell: bash run: | echo "Building and pushing Docker images for FHIR schema version ${{inputs.fhirSchemaVersion}}" + cd ${{github.workspace}}/build docker-compose build --build-arg FHIR_VERSION=${{inputs.fhirSchemaVersion}} --build-arg ASSEMBLY_VER=${{inputs.assemblyVersion}} From deb4482a069af087b4c2773ebba027f72a1763b0 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Thu, 4 Apr 2024 10:49:00 -0700 Subject: [PATCH 114/155] Shortening root path --- .github/actions/docker-build/action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/actions/docker-build/action.yml b/.github/actions/docker-build/action.yml index f88b723d79..8dd80f5c20 100644 --- a/.github/actions/docker-build/action.yml +++ b/.github/actions/docker-build/action.yml @@ -19,5 +19,5 @@ runs: shell: bash run: | echo "Building and pushing Docker images for FHIR schema version ${{inputs.fhirSchemaVersion}}" - cd ${{github.workspace}}/build + cd ${{github.workspace}} docker-compose build --build-arg FHIR_VERSION=${{inputs.fhirSchemaVersion}} --build-arg ASSEMBLY_VER=${{inputs.assemblyVersion}} From 2cd31c568e9c33125a570bf96f31b26e90736cbe Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Thu, 4 Apr 2024 11:02:50 -0700 Subject: [PATCH 115/155] Trying to figure out current working path during docker creation --- .github/actions/docker-build/action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/actions/docker-build/action.yml b/.github/actions/docker-build/action.yml index 8dd80f5c20..8ded812923 100644 --- a/.github/actions/docker-build/action.yml +++ b/.github/actions/docker-build/action.yml @@ -19,5 +19,5 @@ runs: shell: bash run: | echo "Building and pushing Docker images for FHIR schema version ${{inputs.fhirSchemaVersion}}" - cd ${{github.workspace}} + pwd docker-compose build --build-arg FHIR_VERSION=${{inputs.fhirSchemaVersion}} --build-arg ASSEMBLY_VER=${{inputs.assemblyVersion}} From 7a1b427c88e65f8e52b0bd887cce838203bf661b Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Thu, 4 Apr 2024 11:20:19 -0700 Subject: [PATCH 116/155] Docker compose path update --- .github/actions/docker-build/action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/actions/docker-build/action.yml b/.github/actions/docker-build/action.yml index 8ded812923..1ce5a0ec13 100644 --- a/.github/actions/docker-build/action.yml +++ b/.github/actions/docker-build/action.yml @@ -19,5 +19,5 @@ runs: shell: bash run: | echo "Building and pushing Docker images for FHIR schema version ${{inputs.fhirSchemaVersion}}" - pwd + cd build/docker docker-compose build --build-arg FHIR_VERSION=${{inputs.fhirSchemaVersion}} --build-arg ASSEMBLY_VER=${{inputs.assemblyVersion}} From 885d2b397730a22b670b1776f59234730cc1b118 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Thu, 4 Apr 2024 11:38:51 -0700 Subject: [PATCH 117/155] Improper version format --- .github/workflows/fhir-oss-ci-pipeline.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index fba43cd34a..a4dee26c7f 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -199,28 +199,28 @@ jobs: - name: Docker Build Stu3 Image uses: ./.github/actions/docker-build with: - assemblyVersion: ${{needs.setup.outputs.semVer}} + assemblyVersion: ${{needs.setup.outputs.majorMinorPatch}} fhirSchemaVersion: "Stu3" composeLocation: ${{env.composeLocation}} - name: Docker Build R4 Image uses: ./.github/actions/docker-build with: - assemblyVersion: ${{needs.setup.outputs.semVer}} + assemblyVersion: ${{needs.setup.outputs.majorMinorPatch}} fhirSchemaVersion: "R4" composeLocation: ${{env.composeLocation}} - name: Docker Build R4B Image uses: ./.github/actions/docker-build with: - assemblyVersion: ${{needs.setup.outputs.semVer}} + assemblyVersion: ${{needs.setup.outputs.majorMinorPatch}} fhirSchemaVersion: "R4B" composeLocation: ${{env.composeLocation}} - name: Docker Build R5 Image uses: ./.github/actions/docker-build with: - assemblyVersion: ${{needs.setup.outputs.semVer}} + assemblyVersion: ${{needs.setup.outputs.majorMinorPatch}} fhirSchemaVersion: "R5" composeLocation: ${{env.composeLocation}} From 9e51b099b52a208b30a01c983a0e676649a1222e Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Thu, 4 Apr 2024 12:08:20 -0700 Subject: [PATCH 118/155] Updated to use our self hosted runner --- .github/workflows/fhir-oss-ci-pipeline.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index a4dee26c7f..325148ed3f 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -25,7 +25,7 @@ env: jobs: setup: - runs-on: ubuntu-latest + runs-on: [self-hosted, 1ES.Pool=GithubRunPool] env: deploymentEnvironmentName: $vars.CIRESOURCEGROUPROOT appServicePlanName: $vars.CIRESOURCEGROUPROOT-linux @@ -71,7 +71,7 @@ jobs: # with: # environmentName: ${{vars.CIRESOURCEGROUPROOT}} buildAndUnitTest: - runs-on: ubuntu-latest + runs-on: [self-hosted, 1ES.Pool=GithubRunPool] needs: setup steps: - name: Checkout @@ -237,7 +237,7 @@ jobs: # path: ${{env.outputPath}}/bin/${{env.buildConfiguration}}/net5.0/publish # runIntegrationTests: - # runs-on: ubuntu-latest + # runs-on: [self-hosted, 1ES.Pool=GithubRunPool] # needs : buildAndUnitTest # steps: # - name: Checkout From 7d79fad2b5e455d1f467a5526b3070841241d5b9 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Thu, 4 Apr 2024 14:27:18 -0700 Subject: [PATCH 119/155] Uncommented unit tests to try again. Updated symbols copy --- .github/workflows/fhir-oss-ci-pipeline.yml | 21 ++++++++++----------- 1 file changed, 10 insertions(+), 11 deletions(-) diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index 325148ed3f..cfaae47203 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -95,17 +95,17 @@ jobs: fileVersion: ${{needs.setup.outputs.fileVersion}} informationalVersion: ${{needs.setup.outputs.informationalVersion}} majorMinorPatch: ${{needs.setup.outputs.majorMinorPatch}} - # - name: Test - # uses: ./.github/actions/dotnet-test - # with: - # buildConfiguration: ${{env.buildConfiguration}} + - name: Test + uses: ./.github/actions/dotnet-test + with: + buildConfiguration: ${{env.buildConfiguration}} # - name: Generate SBOM # run: | # curl -Lo $RUNNER_TEMP/sbom-tool https://github.com/microsoft/sbom-tool/releases/latest/download/sbom-tool-linux-x64 # chmod +x $RUNNER_TEMP/sbom-tool # $RUNNER_TEMP/sbom-tool generate -b ${{env.outputPath}} -bc . -V Verbose -ps "Organization: Microsoft" -pv ${{needs.setup.outputs.majorMinorPatch}} -pn ${{needs.setup.outputs.informationalVersion}} - + - name: Upload a Build Artifact uses: actions/upload-artifact@v4 with: @@ -153,12 +153,11 @@ jobs: echo "Copying docker compose root file to deploy directory" cp ${{github.workspace}}/release/docker-compose.yaml ${{env.outputPath}}/deploy - # - name: Copying pdb files to symbols directory - # shell: bash - # run: | - # echo "Copying pdb files to deploy symbols" - # shopt -s globstar - # cp -r ${{github.workspace}}/src/**/*.pdb ${{env.outputPath}}/symbols + - name: Copying pdb files to symbols directory + shell: bash + run: | + echo "Copying pdb files to deploy symbols" + find ${{github.workspace}}/src -type f -name "*.pdb" ! -name "*UnitTest*"-exec cp {} ${{env.outputPath}}/symbols \; - name: Publish Stu3 Web Artifacts to deploy directory uses: ./.github/actions/package-web-build-artifacts From a9103a4ff981fc4030fefd6f19e6a770b3ceeb44 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Thu, 4 Apr 2024 19:32:27 -0700 Subject: [PATCH 120/155] Added max cpu 1 to get around file in use issues. --- .github/actions/dotnet-test/action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/actions/dotnet-test/action.yml b/.github/actions/dotnet-test/action.yml index e15a750bb4..0887ea3187 100644 --- a/.github/actions/dotnet-test/action.yml +++ b/.github/actions/dotnet-test/action.yml @@ -9,4 +9,4 @@ runs: steps: - name: Run Unit Tests shell: bash - run: dotnet test "Microsoft.Health.Fhir.sln" --filter "FullyQualifiedName~UnitTests" --configuration ${{inputs.buildConfiguration}} --no-build --verbosity normal + run: dotnet test "Microsoft.Health.Fhir.sln" -maxcpucount:1 --filter "FullyQualifiedName~UnitTests" --configuration ${{inputs.buildConfiguration}} --no-build --verbosity normal From 366e4777e1ba9cc9bb029154d94ea52a37bb8027 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Fri, 5 Apr 2024 10:09:40 -0700 Subject: [PATCH 121/155] Switched to windows runner and removed maxcpu as that was slowing things down too much. --- .github/actions/dotnet-build/action.yml | 2 +- .github/actions/dotnet-test/action.yml | 2 +- .github/workflows/fhir-oss-ci-pipeline.yml | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/actions/dotnet-build/action.yml b/.github/actions/dotnet-build/action.yml index 91251854cb..68ffa1fc71 100644 --- a/.github/actions/dotnet-build/action.yml +++ b/.github/actions/dotnet-build/action.yml @@ -22,5 +22,5 @@ runs: steps: - name: Build shell: bash - run: dotnet build -maxcpucount:1 --configuration ${{inputs.buildConfiguration}} -p:ContinuousIntegrationBuild=true -p:AssemblyVersion="${{inputs.assemblyVersion}}" -p:FileVersion="${{inputs.fileVersion}}" -p:InformationalVersion="${{inputs.informationalVersion}}" -p:Version="${{inputs.majorMinorPatch}}" -warnaserror + run: dotnet build --configuration ${{inputs.buildConfiguration}} -p:ContinuousIntegrationBuild=true -p:AssemblyVersion="${{inputs.assemblyVersion}}" -p:FileVersion="${{inputs.fileVersion}}" -p:InformationalVersion="${{inputs.informationalVersion}}" -p:Version="${{inputs.majorMinorPatch}}" -warnaserror # using max cpu count of 1 to avoid file usage conflicts diff --git a/.github/actions/dotnet-test/action.yml b/.github/actions/dotnet-test/action.yml index 0887ea3187..e15a750bb4 100644 --- a/.github/actions/dotnet-test/action.yml +++ b/.github/actions/dotnet-test/action.yml @@ -9,4 +9,4 @@ runs: steps: - name: Run Unit Tests shell: bash - run: dotnet test "Microsoft.Health.Fhir.sln" -maxcpucount:1 --filter "FullyQualifiedName~UnitTests" --configuration ${{inputs.buildConfiguration}} --no-build --verbosity normal + run: dotnet test "Microsoft.Health.Fhir.sln" --filter "FullyQualifiedName~UnitTests" --configuration ${{inputs.buildConfiguration}} --no-build --verbosity normal diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index cfaae47203..6cc33abb1f 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -71,7 +71,7 @@ jobs: # with: # environmentName: ${{vars.CIRESOURCEGROUPROOT}} buildAndUnitTest: - runs-on: [self-hosted, 1ES.Pool=GithubRunPool] + runs-on: [self-hosted, 1ES.Pool=GithubRunPool, Windows] needs: setup steps: - name: Checkout @@ -236,7 +236,7 @@ jobs: # path: ${{env.outputPath}}/bin/${{env.buildConfiguration}}/net5.0/publish # runIntegrationTests: - # runs-on: [self-hosted, 1ES.Pool=GithubRunPool] + # runs-on: [self-hosted, 1ES.Pool=GithubRunPool, Windows] # needs : buildAndUnitTest # steps: # - name: Checkout From f6ca76b784f55c9bd8100d71ad43d3546def52f0 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Fri, 5 Apr 2024 11:13:15 -0700 Subject: [PATCH 122/155] Fixing symbols publishing --- .github/workflows/fhir-oss-ci-pipeline.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index 6cc33abb1f..cf7e4514da 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -157,7 +157,7 @@ jobs: shell: bash run: | echo "Copying pdb files to deploy symbols" - find ${{github.workspace}}/src -type f -name "*.pdb" ! -name "*UnitTest*"-exec cp {} ${{env.outputPath}}/symbols \; + find ${{github.workspace}}/src -type f -name "*.pdb" ! -name "*UnitTest*" -exec cp {} ${{env.outputPath}}/symbols \; - name: Publish Stu3 Web Artifacts to deploy directory uses: ./.github/actions/package-web-build-artifacts From 9dfc420357dccc7738cad6ece591c804016738e7 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Fri, 5 Apr 2024 11:23:23 -0700 Subject: [PATCH 123/155] Switching build job to windows latest runner --- .github/workflows/fhir-oss-ci-pipeline.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index cf7e4514da..839276a537 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -71,7 +71,7 @@ jobs: # with: # environmentName: ${{vars.CIRESOURCEGROUPROOT}} buildAndUnitTest: - runs-on: [self-hosted, 1ES.Pool=GithubRunPool, Windows] + runs-on: windows-latest needs: setup steps: - name: Checkout From a90cd0cc2003861c8e3a57edf6f3f1e538c21c71 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Fri, 5 Apr 2024 12:19:28 -0700 Subject: [PATCH 124/155] Commenting out unit tests --- .github/workflows/fhir-oss-ci-pipeline.yml | 26 ++++++++-------------- 1 file changed, 9 insertions(+), 17 deletions(-) diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index 839276a537..c1107d3ed2 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -79,14 +79,6 @@ jobs: with: fetch-depth: 0 - - name: Install Latest .Net SDK - uses: actions/setup-dotnet@v4 - with: - global-json-file: 'global.json' - dotnet-version: | - 6.x - 8.x - - name: Build uses: ./.github/actions/dotnet-build with: @@ -95,10 +87,10 @@ jobs: fileVersion: ${{needs.setup.outputs.fileVersion}} informationalVersion: ${{needs.setup.outputs.informationalVersion}} majorMinorPatch: ${{needs.setup.outputs.majorMinorPatch}} - - name: Test - uses: ./.github/actions/dotnet-test - with: - buildConfiguration: ${{env.buildConfiguration}} + # - name: Test + # uses: ./.github/actions/dotnet-test + # with: + # buildConfiguration: ${{env.buildConfiguration}} # - name: Generate SBOM # run: | @@ -106,11 +98,11 @@ jobs: # chmod +x $RUNNER_TEMP/sbom-tool # $RUNNER_TEMP/sbom-tool generate -b ${{env.outputPath}} -bc . -V Verbose -ps "Organization: Microsoft" -pv ${{needs.setup.outputs.majorMinorPatch}} -pn ${{needs.setup.outputs.informationalVersion}} - - name: Upload a Build Artifact - uses: actions/upload-artifact@v4 - with: - name: build - path: ${{env.outputPath}} + # - name: Upload a Build Artifact + # uses: actions/upload-artifact@v4 + # with: + # name: build + # path: ${{env.outputPath}} - name: Create Nuget packages shell: bash run: | From 0c35741702877b737d60cb07ba928cbc5364b856 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Fri, 5 Apr 2024 12:50:00 -0700 Subject: [PATCH 125/155] Adding no restore to test run --- .github/actions/dotnet-test/action.yml | 2 +- .github/workflows/fhir-oss-ci-pipeline.yml | 10 +++++----- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/actions/dotnet-test/action.yml b/.github/actions/dotnet-test/action.yml index e15a750bb4..543f0b6d6f 100644 --- a/.github/actions/dotnet-test/action.yml +++ b/.github/actions/dotnet-test/action.yml @@ -9,4 +9,4 @@ runs: steps: - name: Run Unit Tests shell: bash - run: dotnet test "Microsoft.Health.Fhir.sln" --filter "FullyQualifiedName~UnitTests" --configuration ${{inputs.buildConfiguration}} --no-build --verbosity normal + run: dotnet test "Microsoft.Health.Fhir.sln" --no-restore --filter "FullyQualifiedName~UnitTests" --configuration ${{inputs.buildConfiguration}} --no-build --verbosity normal diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index c1107d3ed2..6c85a8fdc8 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -87,10 +87,10 @@ jobs: fileVersion: ${{needs.setup.outputs.fileVersion}} informationalVersion: ${{needs.setup.outputs.informationalVersion}} majorMinorPatch: ${{needs.setup.outputs.majorMinorPatch}} - # - name: Test - # uses: ./.github/actions/dotnet-test - # with: - # buildConfiguration: ${{env.buildConfiguration}} + - name: Test + uses: ./.github/actions/dotnet-test + with: + buildConfiguration: ${{env.buildConfiguration}} # - name: Generate SBOM # run: | @@ -107,7 +107,7 @@ jobs: shell: bash run: | echo "Creating Nuget packages" - dotnet pack ${{github.workspace}} --output ${{env.outputPath}}/nupkgs --no-build --configuration=${{env.buildConfiguration}} -p:PackageVersion=${{needs.setup.outputs.majorMinorPatch}} + dotnet pack ${{github.workspace}}\Microsoft.Health.Fhir.sln --output ${{env.outputPath}}/nupkgs --no-build --configuration=${{env.buildConfiguration}} -p:PackageVersion=${{needs.setup.outputs.majorMinorPatch}} - name: Upload Nuget Packages uses: actions/upload-artifact@v4 From 56cda9c85b60791fc7a3ca6404d07696f3cc9dd5 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Fri, 5 Apr 2024 14:52:53 -0700 Subject: [PATCH 126/155] Minor tweaks to see if that resolves file in use issue --- .github/actions/dotnet-build/action.yml | 3 +-- .github/actions/dotnet-test/action.yml | 2 +- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/.github/actions/dotnet-build/action.yml b/.github/actions/dotnet-build/action.yml index 68ffa1fc71..b9127d813a 100644 --- a/.github/actions/dotnet-build/action.yml +++ b/.github/actions/dotnet-build/action.yml @@ -22,5 +22,4 @@ runs: steps: - name: Build shell: bash - run: dotnet build --configuration ${{inputs.buildConfiguration}} -p:ContinuousIntegrationBuild=true -p:AssemblyVersion="${{inputs.assemblyVersion}}" -p:FileVersion="${{inputs.fileVersion}}" -p:InformationalVersion="${{inputs.informationalVersion}}" -p:Version="${{inputs.majorMinorPatch}}" -warnaserror - # using max cpu count of 1 to avoid file usage conflicts + run: dotnet build "./Microsoft.Health.Fhir.sln" --configuration ${{inputs.buildConfiguration}} "-p:ContinuousIntegrationBuild=true;AssemblyVersion=${{inputs.assemblyVersion}};FileVersion=${{inputs.fileVersion}};InformationalVersion=${{inputs.informationalVersion}};Version=${{inputs.majorMinorPatch}}" -warnaserror diff --git a/.github/actions/dotnet-test/action.yml b/.github/actions/dotnet-test/action.yml index 543f0b6d6f..26f17de674 100644 --- a/.github/actions/dotnet-test/action.yml +++ b/.github/actions/dotnet-test/action.yml @@ -9,4 +9,4 @@ runs: steps: - name: Run Unit Tests shell: bash - run: dotnet test "Microsoft.Health.Fhir.sln" --no-restore --filter "FullyQualifiedName~UnitTests" --configuration ${{inputs.buildConfiguration}} --no-build --verbosity normal + run: dotnet test "Microsoft.Health.Fhir.sln" -p:ContinuousIntegrationBuild=true; --filter "FullyQualifiedName~UnitTests" --configuration ${{inputs.buildConfiguration}} --no-build --verbosity normal From 13399c280770472e15dc0b33bd590b80cb7a94e2 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Fri, 5 Apr 2024 15:13:20 -0700 Subject: [PATCH 127/155] Adding maxcpucount back in. It exponentially increases build time. --- .github/actions/dotnet-build/action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/actions/dotnet-build/action.yml b/.github/actions/dotnet-build/action.yml index b9127d813a..b87c47437a 100644 --- a/.github/actions/dotnet-build/action.yml +++ b/.github/actions/dotnet-build/action.yml @@ -22,4 +22,4 @@ runs: steps: - name: Build shell: bash - run: dotnet build "./Microsoft.Health.Fhir.sln" --configuration ${{inputs.buildConfiguration}} "-p:ContinuousIntegrationBuild=true;AssemblyVersion=${{inputs.assemblyVersion}};FileVersion=${{inputs.fileVersion}};InformationalVersion=${{inputs.informationalVersion}};Version=${{inputs.majorMinorPatch}}" -warnaserror + run: dotnet build "./Microsoft.Health.Fhir.sln" --maxcpucount:1 --configuration ${{inputs.buildConfiguration}} "-p:ContinuousIntegrationBuild=true;AssemblyVersion=${{inputs.assemblyVersion}};FileVersion=${{inputs.fileVersion}};InformationalVersion=${{inputs.informationalVersion}};Version=${{inputs.majorMinorPatch}}" -warnaserror From 00ae7424fef67ee64569ef66cd46d6ad89e6c367 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Fri, 5 Apr 2024 15:54:53 -0700 Subject: [PATCH 128/155] Bundling unit tests into collection to run them sequentially to prevent file locks --- .../Features/Operations/Import/ImportOrchestratorJobTests.cs | 1 + .../Features/Operations/Import/ImportProcessingJobTests.cs | 1 + .../Features/Search/CustomQueriesUnitTests.cs | 1 + .../Features/Search/Expressions/FlatteningRewriterTests.cs | 1 + .../Expressions/LastUpdatedToResourceSurrogateIdRewriterTests.cs | 1 + .../Search/Expressions/SqlServerSortingValidatorTests.cs | 1 + 6 files changed, 6 insertions(+) diff --git a/src/Microsoft.Health.Fhir.SqlServer.UnitTests/Features/Operations/Import/ImportOrchestratorJobTests.cs b/src/Microsoft.Health.Fhir.SqlServer.UnitTests/Features/Operations/Import/ImportOrchestratorJobTests.cs index 2175b86a41..d6b3e0b399 100644 --- a/src/Microsoft.Health.Fhir.SqlServer.UnitTests/Features/Operations/Import/ImportOrchestratorJobTests.cs +++ b/src/Microsoft.Health.Fhir.SqlServer.UnitTests/Features/Operations/Import/ImportOrchestratorJobTests.cs @@ -32,6 +32,7 @@ namespace Microsoft.Health.Fhir.SqlServer.UnitTests.Features.Operations.Import { + [Collection("Sequential")] [Trait(Traits.OwningTeam, OwningTeam.FhirImport)] [Trait(Traits.Category, Categories.Import)] public class ImportOrchestratorJobTests diff --git a/src/Microsoft.Health.Fhir.SqlServer.UnitTests/Features/Operations/Import/ImportProcessingJobTests.cs b/src/Microsoft.Health.Fhir.SqlServer.UnitTests/Features/Operations/Import/ImportProcessingJobTests.cs index 1769b3aebb..38c1c0f0be 100644 --- a/src/Microsoft.Health.Fhir.SqlServer.UnitTests/Features/Operations/Import/ImportProcessingJobTests.cs +++ b/src/Microsoft.Health.Fhir.SqlServer.UnitTests/Features/Operations/Import/ImportProcessingJobTests.cs @@ -26,6 +26,7 @@ namespace Microsoft.Health.Fhir.SqlServer.UnitTests.Features.Operations.Import { + [Collection("Sequential")] [Trait(Traits.OwningTeam, OwningTeam.FhirImport)] [Trait(Traits.Category, Categories.Import)] public class ImportProcessingJobTests diff --git a/src/Microsoft.Health.Fhir.SqlServer.UnitTests/Features/Search/CustomQueriesUnitTests.cs b/src/Microsoft.Health.Fhir.SqlServer.UnitTests/Features/Search/CustomQueriesUnitTests.cs index 736493dd7b..09c328ae80 100644 --- a/src/Microsoft.Health.Fhir.SqlServer.UnitTests/Features/Search/CustomQueriesUnitTests.cs +++ b/src/Microsoft.Health.Fhir.SqlServer.UnitTests/Features/Search/CustomQueriesUnitTests.cs @@ -14,6 +14,7 @@ namespace Microsoft.Health.Fhir.SqlServer.UnitTests.Features.Search { + [Collection("Sequential")] [Trait(Traits.OwningTeam, OwningTeam.Fhir)] [Trait(Traits.Category, Categories.Search)] public class CustomQueriesUnitTests diff --git a/src/Microsoft.Health.Fhir.SqlServer.UnitTests/Features/Search/Expressions/FlatteningRewriterTests.cs b/src/Microsoft.Health.Fhir.SqlServer.UnitTests/Features/Search/Expressions/FlatteningRewriterTests.cs index da35abbc56..568095f336 100644 --- a/src/Microsoft.Health.Fhir.SqlServer.UnitTests/Features/Search/Expressions/FlatteningRewriterTests.cs +++ b/src/Microsoft.Health.Fhir.SqlServer.UnitTests/Features/Search/Expressions/FlatteningRewriterTests.cs @@ -11,6 +11,7 @@ namespace Microsoft.Health.Fhir.SqlServer.UnitTests.Features.Search.Expressions { + [Collection("Sequential")] [Trait(Traits.OwningTeam, OwningTeam.Fhir)] [Trait(Traits.Category, Categories.Search)] public class FlatteningRewriterTests diff --git a/src/Microsoft.Health.Fhir.SqlServer.UnitTests/Features/Search/Expressions/LastUpdatedToResourceSurrogateIdRewriterTests.cs b/src/Microsoft.Health.Fhir.SqlServer.UnitTests/Features/Search/Expressions/LastUpdatedToResourceSurrogateIdRewriterTests.cs index 058edaaac3..578f8fcecf 100644 --- a/src/Microsoft.Health.Fhir.SqlServer.UnitTests/Features/Search/Expressions/LastUpdatedToResourceSurrogateIdRewriterTests.cs +++ b/src/Microsoft.Health.Fhir.SqlServer.UnitTests/Features/Search/Expressions/LastUpdatedToResourceSurrogateIdRewriterTests.cs @@ -14,6 +14,7 @@ namespace Microsoft.Health.Fhir.SqlServer.UnitTests.Features.Search.Expressions { + [Collection("Sequential")] [Trait(Traits.OwningTeam, OwningTeam.Fhir)] [Trait(Traits.Category, Categories.Search)] public class LastUpdatedToResourceSurrogateIdRewriterTests diff --git a/src/Microsoft.Health.Fhir.SqlServer.UnitTests/Features/Search/Expressions/SqlServerSortingValidatorTests.cs b/src/Microsoft.Health.Fhir.SqlServer.UnitTests/Features/Search/Expressions/SqlServerSortingValidatorTests.cs index 9cabbd508a..3d5f664f87 100644 --- a/src/Microsoft.Health.Fhir.SqlServer.UnitTests/Features/Search/Expressions/SqlServerSortingValidatorTests.cs +++ b/src/Microsoft.Health.Fhir.SqlServer.UnitTests/Features/Search/Expressions/SqlServerSortingValidatorTests.cs @@ -17,6 +17,7 @@ namespace Microsoft.Health.Fhir.SqlServer.UnitTests.Features.Search.Expressions { + [Collection("Sequential")] [Trait(Traits.OwningTeam, OwningTeam.Fhir)] [Trait(Traits.Category, Categories.Search)] public class SqlServerSortingValidatorTests From 5da9f620a9539104eef326513dadce7f9ceffd1a Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Mon, 8 Apr 2024 11:13:31 -0700 Subject: [PATCH 129/155] Setting BuildTimeCodeGenerator to build time dependency to try and resolve file issue. --- .github/actions/dotnet-build/action.yml | 2 +- .../Microsoft.Health.Fhir.SqlServer.csproj | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/actions/dotnet-build/action.yml b/.github/actions/dotnet-build/action.yml index b87c47437a..b9127d813a 100644 --- a/.github/actions/dotnet-build/action.yml +++ b/.github/actions/dotnet-build/action.yml @@ -22,4 +22,4 @@ runs: steps: - name: Build shell: bash - run: dotnet build "./Microsoft.Health.Fhir.sln" --maxcpucount:1 --configuration ${{inputs.buildConfiguration}} "-p:ContinuousIntegrationBuild=true;AssemblyVersion=${{inputs.assemblyVersion}};FileVersion=${{inputs.fileVersion}};InformationalVersion=${{inputs.informationalVersion}};Version=${{inputs.majorMinorPatch}}" -warnaserror + run: dotnet build "./Microsoft.Health.Fhir.sln" --configuration ${{inputs.buildConfiguration}} "-p:ContinuousIntegrationBuild=true;AssemblyVersion=${{inputs.assemblyVersion}};FileVersion=${{inputs.fileVersion}};InformationalVersion=${{inputs.informationalVersion}};Version=${{inputs.majorMinorPatch}}" -warnaserror diff --git a/src/Microsoft.Health.Fhir.SqlServer/Microsoft.Health.Fhir.SqlServer.csproj b/src/Microsoft.Health.Fhir.SqlServer/Microsoft.Health.Fhir.SqlServer.csproj index 361ee5e9f9..3fe892074b 100644 --- a/src/Microsoft.Health.Fhir.SqlServer/Microsoft.Health.Fhir.SqlServer.csproj +++ b/src/Microsoft.Health.Fhir.SqlServer/Microsoft.Health.Fhir.SqlServer.csproj @@ -10,7 +10,7 @@ - + From a4b3bc7c456147f6639249d5d19317a42c4a4ab4 Mon Sep 17 00:00:00 2001 From: Paul Taladay Date: Mon, 8 Apr 2024 12:12:29 -0700 Subject: [PATCH 130/155] Build component change --- .../Microsoft.Health.Fhir.SqlServer.csproj | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/Microsoft.Health.Fhir.SqlServer/Microsoft.Health.Fhir.SqlServer.csproj b/src/Microsoft.Health.Fhir.SqlServer/Microsoft.Health.Fhir.SqlServer.csproj index 3fe892074b..d1b39787e7 100644 --- a/src/Microsoft.Health.Fhir.SqlServer/Microsoft.Health.Fhir.SqlServer.csproj +++ b/src/Microsoft.Health.Fhir.SqlServer/Microsoft.Health.Fhir.SqlServer.csproj @@ -13,7 +13,7 @@ - + From 572e47477405127d80f6405ffac9638b4509d83b Mon Sep 17 00:00:00 2001 From: ketki <3861570+EXPEkesheth@users.noreply.github.com> Date: Wed, 27 Mar 2024 16:54:28 -0700 Subject: [PATCH 131/155] Update BulkExport.md (#3773) Adding information about max_count parameter --- docs/BulkExport.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/docs/BulkExport.md b/docs/BulkExport.md index 84f7f9dbe9..2761bd5598 100644 --- a/docs/BulkExport.md +++ b/docs/BulkExport.md @@ -40,10 +40,12 @@ https://test-fhir-server/$export For more details on Bulk Export, see the [Azure API for FHIR Export Data page](https://docs.microsoft.com/en-us/azure/healthcare-apis/fhir/export-data). -In addition to the query parameters specified in the Azure API For FHIR documentation, users can also use the \_format in FHIR Server. \_format allows a user to select a format for the file structure that the export job creates. Different formats can be defined in the appSettings by combining constants, folder level breaks ('/'), and known tags. The tags will be replaced with data when the job is run. The three supported tags are: +Below are set of additional query parameters users can specify in addition to ones defined in the Azure API For FHIR documentation +1. \_format in FHIR Server: \_format allows a user to select a format for the file structure that the export job creates. Different formats can be defined in the appSettings by combining constants, folder level breaks ('/'), and known tags. The tags will be replaced with data when the job is run. The three supported tags are: * **resourcename**: replaces with the resource type being exported * **timestamp**: replaces with a timestamp of the job's queried time * **id**: replaces with the GUID of the export job +1. \_max_count: \_max_count allows to reduce the number of resources exported by a single job. Users can use the _maxCount=xxxx query parameter or set MaximumNumberOfResourcesPerQuery in the export configuration section. The default is 10,000. Export operation needs memory to serialize the data when it is writing to the lake. To reduce out of memory exceptions due to additional memory, user can choose to reduce the _max_count value by decrements of 1000. It would be beneficial for user to increase the compute memory on FHIR server as well. To use the format, you will need to set the following settings in the appSettings: From 2651981f2e53a4c61ff850141913c0025a64d5dc Mon Sep 17 00:00:00 2001 From: SergeyGaluzo <95932081+SergeyGaluzo@users.noreply.github.com> Date: Thu, 28 Mar 2024 12:59:55 -0700 Subject: [PATCH 132/155] Some cleanup (#3782) * Some cleanup * add user defined restriction * Removed bulk classes --- .../Features/Schema/Migrations/77.diff.sql | 23 + .../Features/Schema/Migrations/77.sql | 5156 +++++++++++++++++ .../Features/Schema/SchemaVersion.cs | 1 + .../Features/Schema/SchemaVersionConstants.cs | 2 +- .../TransactionCheckWithInitialiScript.sql | 2 +- .../Sql/Types/CompartmentAssignmentList.sql | 12 - .../Features/Schema/Sql/Types/Types.sql | 405 +- ...BulkCompartmentAssignmentV1RowGenerator.cs | 114 - ...ulkCompositeSearchParameterRowGenerator.cs | 83 - ...lkDateTimeSearchParameterV1RowGenerator.cs | 49 - ...lkDateTimeSearchParameterV2RowGenerator.cs | 48 - ...BulkNumberSearchParameterV1RowGenerator.cs | 32 - ...lkQuantitySearchParameterV1RowGenerator.cs | 34 - ...kReferenceSearchParameterV1RowGenerator.cs | 31 - ...nCompositeSearchParameterV1RowGenerator.cs | 49 - ...nCompositeSearchParameterV2RowGenerator.cs | 50 - .../BulkReindexResourceV1RowGenerator.cs | 48 - .../BulkResourceWriteClaimV1RowGenerator.cs | 53 - .../BulkSearchParameterRowGenerator.cs | 95 - ...BulkStringSearchParameterV1RowGenerator.cs | 40 - ...BulkStringSearchParameterV2RowGenerator.cs | 40 - ...eCompositeSearchParameterV1RowGenerator.cs | 52 - ...eCompositeSearchParameterV2RowGenerator.cs | 53 - ...rCompositeSearchParameterV1RowGenerator.cs | 54 - ...rCompositeSearchParameterV2RowGenerator.cs | 55 - ...yCompositeSearchParameterV1RowGenerator.cs | 54 - ...yCompositeSearchParameterV2RowGenerator.cs | 55 - .../BulkTokenSearchParameterV1RowGenerator.cs | 45 - .../BulkTokenSearchParameterV2RowGenerator.cs | 61 - ...gCompositeSearchParameterV1RowGenerator.cs | 51 - ...gCompositeSearchParameterV2RowGenerator.cs | 52 - ...kTokenTextSearchParameterV1RowGenerator.cs | 30 - ...nCompositeSearchParameterV1RowGenerator.cs | 41 - ...nCompositeSearchParameterV2RowGenerator.cs | 43 - .../BulkUriSearchParameterV1RowGenerator.cs | 24 - .../SearchParameterStatusV1RowGenerator.cs | 25 - .../Microsoft.Health.Fhir.SqlServer.csproj | 2 +- 37 files changed, 5184 insertions(+), 1880 deletions(-) create mode 100644 src/Microsoft.Health.Fhir.SqlServer/Features/Schema/Migrations/77.diff.sql create mode 100644 src/Microsoft.Health.Fhir.SqlServer/Features/Schema/Migrations/77.sql delete mode 100644 src/Microsoft.Health.Fhir.SqlServer/Features/Schema/Sql/Types/CompartmentAssignmentList.sql delete mode 100644 src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkCompartmentAssignmentV1RowGenerator.cs delete mode 100644 src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkCompositeSearchParameterRowGenerator.cs delete mode 100644 src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkDateTimeSearchParameterV1RowGenerator.cs delete mode 100644 src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkDateTimeSearchParameterV2RowGenerator.cs delete mode 100644 src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkNumberSearchParameterV1RowGenerator.cs delete mode 100644 src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkQuantitySearchParameterV1RowGenerator.cs delete mode 100644 src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkReferenceSearchParameterV1RowGenerator.cs delete mode 100644 src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkReferenceTokenCompositeSearchParameterV1RowGenerator.cs delete mode 100644 src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkReferenceTokenCompositeSearchParameterV2RowGenerator.cs delete mode 100644 src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkReindexResourceV1RowGenerator.cs delete mode 100644 src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkResourceWriteClaimV1RowGenerator.cs delete mode 100644 src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkSearchParameterRowGenerator.cs delete mode 100644 src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkStringSearchParameterV1RowGenerator.cs delete mode 100644 src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkStringSearchParameterV2RowGenerator.cs delete mode 100644 src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkTokenDateTimeCompositeSearchParameterV1RowGenerator.cs delete mode 100644 src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkTokenDateTimeCompositeSearchParameterV2RowGenerator.cs delete mode 100644 src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkTokenNumberNumberCompositeSearchParameterV1RowGenerator.cs delete mode 100644 src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkTokenNumberNumberCompositeSearchParameterV2RowGenerator.cs delete mode 100644 src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkTokenQuantityCompositeSearchParameterV1RowGenerator.cs delete mode 100644 src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkTokenQuantityCompositeSearchParameterV2RowGenerator.cs delete mode 100644 src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkTokenSearchParameterV1RowGenerator.cs delete mode 100644 src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkTokenSearchParameterV2RowGenerator.cs delete mode 100644 src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkTokenStringCompositeSearchParameterV1RowGenerator.cs delete mode 100644 src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkTokenStringCompositeSearchParameterV2RowGenerator.cs delete mode 100644 src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkTokenTextSearchParameterV1RowGenerator.cs delete mode 100644 src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkTokenTokenCompositeSearchParameterV1RowGenerator.cs delete mode 100644 src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkTokenTokenCompositeSearchParameterV2RowGenerator.cs delete mode 100644 src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkUriSearchParameterV1RowGenerator.cs delete mode 100644 src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/SearchParameterStatusV1RowGenerator.cs diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/Migrations/77.diff.sql b/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/Migrations/77.diff.sql new file mode 100644 index 0000000000..2d6332e3c2 --- /dev/null +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/Migrations/77.diff.sql @@ -0,0 +1,23 @@ +DECLARE @Names TABLE (Name varchar(100) PRIMARY KEY) +INSERT INTO @Names SELECT name FROM sys.objects WHERE type = 'p' AND name LIKE '%[0-9]' AND name NOT LIKE '%ResourceChanges%' +DECLARE @Name varchar(100) +WHILE EXISTS (SELECT * FROM @Names) +BEGIN + SET @Name = (SELECT TOP 1 Name FROM @Names) + EXECUTE('DROP PROCEDURE dbo.'+@Name) + DELETE FROM @Names WHERE Name = @Name +END +GO +DECLARE @Names TABLE (Name varchar(100) PRIMARY KEY) +INSERT INTO @Names SELECT name FROM sys.types WHERE is_user_defined = 1 AND name LIKE '%[0-9]' AND name NOT IN ('SearchParamTableType_2','BulkReindexResourceTableType_1') +DECLARE @Name varchar(100) +WHILE EXISTS (SELECT * FROM @Names) +BEGIN + SET @Name = (SELECT TOP 1 Name FROM @Names) + EXECUTE('DROP TYPE dbo.'+@Name) + DELETE FROM @Names WHERE Name = @Name +END +GO +IF EXISTS (SELECT * FROM sys.types WHERE name = 'CompartmentAssignmentList') + DROP TYPE dbo.CompartmentAssignmentList +GO diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/Migrations/77.sql b/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/Migrations/77.sql new file mode 100644 index 0000000000..ba0e5475e7 --- /dev/null +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/Migrations/77.sql @@ -0,0 +1,5156 @@ + +/************************************************************************************************* + Auto-Generated from Sql build task. Do not manually edit it. +**************************************************************************************************/ +SET XACT_ABORT ON +BEGIN TRAN +IF EXISTS (SELECT * + FROM sys.tables + WHERE name = 'ClaimType') + BEGIN + ROLLBACK; + RETURN; + END + + +GO +INSERT INTO dbo.SchemaVersion +VALUES (77, 'started'); + +CREATE PARTITION FUNCTION PartitionFunction_ResourceTypeId(SMALLINT) + AS RANGE RIGHT + FOR VALUES (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150); + +CREATE PARTITION SCHEME PartitionScheme_ResourceTypeId + AS PARTITION PartitionFunction_ResourceTypeId + ALL TO ([PRIMARY]); + + +GO +CREATE PARTITION FUNCTION PartitionFunction_ResourceChangeData_Timestamp(DATETIME2 (7)) + AS RANGE RIGHT + FOR VALUES (N'1970-01-01T00:00:00.0000000'); + +CREATE PARTITION SCHEME PartitionScheme_ResourceChangeData_Timestamp + AS PARTITION PartitionFunction_ResourceChangeData_Timestamp + ALL TO ([PRIMARY]); + +DECLARE @numberOfHistoryPartitions AS INT = 48; + +DECLARE @numberOfFuturePartitions AS INT = 720; + +DECLARE @rightPartitionBoundary AS DATETIME2 (7); + +DECLARE @currentDateTime AS DATETIME2 (7) = sysutcdatetime(); + +WHILE @numberOfHistoryPartitions >= -@numberOfFuturePartitions + BEGIN + SET @rightPartitionBoundary = DATEADD(hour, DATEDIFF(hour, 0, @currentDateTime) - @numberOfHistoryPartitions, 0); + ALTER PARTITION SCHEME PartitionScheme_ResourceChangeData_Timestamp NEXT USED [Primary]; + ALTER PARTITION FUNCTION PartitionFunction_ResourceChangeData_Timestamp( ) + SPLIT RANGE (@rightPartitionBoundary); + SET @numberOfHistoryPartitions -= 1; + END + +CREATE SEQUENCE dbo.ResourceSurrogateIdUniquifierSequence + AS INT + START WITH 0 + INCREMENT BY 1 + MINVALUE 0 + MAXVALUE 79999 + CYCLE + CACHE 1000000; + +CREATE TYPE dbo.BigintList AS TABLE ( + Id BIGINT NOT NULL PRIMARY KEY); + +CREATE TYPE dbo.DateTimeSearchParamList AS TABLE ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + StartDateTime DATETIMEOFFSET (7) NOT NULL, + EndDateTime DATETIMEOFFSET (7) NOT NULL, + IsLongerThanADay BIT NOT NULL, + IsMin BIT NOT NULL, + IsMax BIT NOT NULL UNIQUE (ResourceTypeId, ResourceSurrogateId, SearchParamId, StartDateTime, EndDateTime, IsLongerThanADay, IsMin, IsMax)); + +CREATE TYPE dbo.NumberSearchParamList AS TABLE ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + SingleValue DECIMAL (36, 18) NULL, + LowValue DECIMAL (36, 18) NULL, + HighValue DECIMAL (36, 18) NULL UNIQUE (ResourceTypeId, ResourceSurrogateId, SearchParamId, SingleValue, LowValue, HighValue)); + +CREATE TYPE dbo.QuantitySearchParamList AS TABLE ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + SystemId INT NULL, + QuantityCodeId INT NULL, + SingleValue DECIMAL (36, 18) NULL, + LowValue DECIMAL (36, 18) NULL, + HighValue DECIMAL (36, 18) NULL UNIQUE (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId, QuantityCodeId, SingleValue, LowValue, HighValue)); + +CREATE TYPE dbo.ReferenceSearchParamList AS TABLE ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + BaseUri VARCHAR (128) COLLATE Latin1_General_100_CS_AS NULL, + ReferenceResourceTypeId SMALLINT NULL, + ReferenceResourceId VARCHAR (64) COLLATE Latin1_General_100_CS_AS NOT NULL, + ReferenceResourceVersion INT NULL UNIQUE (ResourceTypeId, ResourceSurrogateId, SearchParamId, BaseUri, ReferenceResourceTypeId, ReferenceResourceId)); + +CREATE TYPE dbo.ReferenceTokenCompositeSearchParamList AS TABLE ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + BaseUri1 VARCHAR (128) COLLATE Latin1_General_100_CS_AS NULL, + ReferenceResourceTypeId1 SMALLINT NULL, + ReferenceResourceId1 VARCHAR (64) COLLATE Latin1_General_100_CS_AS NOT NULL, + ReferenceResourceVersion1 INT NULL, + SystemId2 INT NULL, + Code2 VARCHAR (256) COLLATE Latin1_General_100_CS_AS NOT NULL, + CodeOverflow2 VARCHAR (MAX) COLLATE Latin1_General_100_CS_AS NULL); + +CREATE TYPE dbo.ResourceDateKeyList AS TABLE ( + ResourceTypeId SMALLINT NOT NULL, + ResourceId VARCHAR (64) COLLATE Latin1_General_100_CS_AS NOT NULL, + ResourceSurrogateId BIGINT NOT NULL PRIMARY KEY (ResourceTypeId, ResourceId, ResourceSurrogateId)); + +CREATE TYPE dbo.ResourceKeyList AS TABLE ( + ResourceTypeId SMALLINT NOT NULL, + ResourceId VARCHAR (64) COLLATE Latin1_General_100_CS_AS NOT NULL, + Version INT NULL UNIQUE (ResourceTypeId, ResourceId, Version)); + +CREATE TYPE dbo.ResourceList AS TABLE ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + ResourceId VARCHAR (64) COLLATE Latin1_General_100_CS_AS NOT NULL, + Version INT NOT NULL, + HasVersionToCompare BIT NOT NULL, + IsDeleted BIT NOT NULL, + IsHistory BIT NOT NULL, + KeepHistory BIT NOT NULL, + RawResource VARBINARY (MAX) NOT NULL, + IsRawResourceMetaSet BIT NOT NULL, + RequestMethod VARCHAR (10) NULL, + SearchParamHash VARCHAR (64) NULL PRIMARY KEY (ResourceTypeId, ResourceSurrogateId), + UNIQUE (ResourceTypeId, ResourceId, Version)); + +CREATE TYPE dbo.ResourceWriteClaimList AS TABLE ( + ResourceSurrogateId BIGINT NOT NULL, + ClaimTypeId TINYINT NOT NULL, + ClaimValue NVARCHAR (128) NOT NULL); + +CREATE TYPE dbo.StringList AS TABLE ( + String VARCHAR (MAX)); + +CREATE TYPE dbo.StringSearchParamList AS TABLE ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + Text NVARCHAR (256) COLLATE Latin1_General_100_CI_AI_SC NOT NULL, + TextOverflow NVARCHAR (MAX) COLLATE Latin1_General_100_CI_AI_SC NULL, + IsMin BIT NOT NULL, + IsMax BIT NOT NULL); + +CREATE TYPE dbo.TokenDateTimeCompositeSearchParamList AS TABLE ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + SystemId1 INT NULL, + Code1 VARCHAR (256) COLLATE Latin1_General_100_CS_AS NOT NULL, + CodeOverflow1 VARCHAR (MAX) COLLATE Latin1_General_100_CS_AS NULL, + StartDateTime2 DATETIMEOFFSET (7) NOT NULL, + EndDateTime2 DATETIMEOFFSET (7) NOT NULL, + IsLongerThanADay2 BIT NOT NULL); + +CREATE TYPE dbo.TokenNumberNumberCompositeSearchParamList AS TABLE ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + SystemId1 INT NULL, + Code1 VARCHAR (256) COLLATE Latin1_General_100_CS_AS NOT NULL, + CodeOverflow1 VARCHAR (MAX) COLLATE Latin1_General_100_CS_AS NULL, + SingleValue2 DECIMAL (36, 18) NULL, + LowValue2 DECIMAL (36, 18) NULL, + HighValue2 DECIMAL (36, 18) NULL, + SingleValue3 DECIMAL (36, 18) NULL, + LowValue3 DECIMAL (36, 18) NULL, + HighValue3 DECIMAL (36, 18) NULL, + HasRange BIT NOT NULL); + +CREATE TYPE dbo.TokenQuantityCompositeSearchParamList AS TABLE ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + SystemId1 INT NULL, + Code1 VARCHAR (256) COLLATE Latin1_General_100_CS_AS NOT NULL, + CodeOverflow1 VARCHAR (MAX) COLLATE Latin1_General_100_CS_AS NULL, + SystemId2 INT NULL, + QuantityCodeId2 INT NULL, + SingleValue2 DECIMAL (36, 18) NULL, + LowValue2 DECIMAL (36, 18) NULL, + HighValue2 DECIMAL (36, 18) NULL); + +CREATE TYPE dbo.TokenSearchParamList AS TABLE ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + SystemId INT NULL, + Code VARCHAR (256) COLLATE Latin1_General_100_CS_AS NOT NULL, + CodeOverflow VARCHAR (MAX) COLLATE Latin1_General_100_CS_AS NULL); + +CREATE TYPE dbo.TokenStringCompositeSearchParamList AS TABLE ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + SystemId1 INT NULL, + Code1 VARCHAR (256) COLLATE Latin1_General_100_CS_AS NOT NULL, + CodeOverflow1 VARCHAR (MAX) COLLATE Latin1_General_100_CS_AS NULL, + Text2 NVARCHAR (256) COLLATE Latin1_General_100_CI_AI_SC NOT NULL, + TextOverflow2 NVARCHAR (MAX) COLLATE Latin1_General_100_CI_AI_SC NULL); + +CREATE TYPE dbo.TokenTextList AS TABLE ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + Text NVARCHAR (400) COLLATE Latin1_General_CI_AI NOT NULL); + +CREATE TYPE dbo.TokenTokenCompositeSearchParamList AS TABLE ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + SystemId1 INT NULL, + Code1 VARCHAR (256) COLLATE Latin1_General_100_CS_AS NOT NULL, + CodeOverflow1 VARCHAR (MAX) COLLATE Latin1_General_100_CS_AS NULL, + SystemId2 INT NULL, + Code2 VARCHAR (256) COLLATE Latin1_General_100_CS_AS NOT NULL, + CodeOverflow2 VARCHAR (MAX) COLLATE Latin1_General_100_CS_AS NULL); + +CREATE TYPE dbo.SearchParamTableType_2 AS TABLE ( + Uri VARCHAR (128) COLLATE Latin1_General_100_CS_AS NOT NULL, + Status VARCHAR (20) NOT NULL, + IsPartiallySupported BIT NOT NULL); + +CREATE TYPE dbo.BulkReindexResourceTableType_1 AS TABLE ( + Offset INT NOT NULL, + ResourceTypeId SMALLINT NOT NULL, + ResourceId VARCHAR (64) COLLATE Latin1_General_100_CS_AS NOT NULL, + ETag INT NULL, + SearchParamHash VARCHAR (64) NOT NULL); + +CREATE TYPE dbo.UriSearchParamList AS TABLE ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + Uri VARCHAR (256) COLLATE Latin1_General_100_CS_AS NOT NULL PRIMARY KEY (ResourceTypeId, ResourceSurrogateId, SearchParamId, Uri)); + +CREATE TABLE dbo.ClaimType ( + ClaimTypeId TINYINT IDENTITY (1, 1) NOT NULL, + Name VARCHAR (128) COLLATE Latin1_General_100_CS_AS NOT NULL, + CONSTRAINT UQ_ClaimType_ClaimTypeId UNIQUE (ClaimTypeId), + CONSTRAINT PKC_ClaimType PRIMARY KEY CLUSTERED (Name) WITH (DATA_COMPRESSION = PAGE) +); + +CREATE TABLE dbo.CompartmentAssignment ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + CompartmentTypeId TINYINT NOT NULL, + ReferenceResourceId VARCHAR (64) COLLATE Latin1_General_100_CS_AS NOT NULL, + IsHistory BIT NOT NULL, + CONSTRAINT PKC_CompartmentAssignment PRIMARY KEY CLUSTERED (ResourceTypeId, ResourceSurrogateId, CompartmentTypeId, ReferenceResourceId) WITH (DATA_COMPRESSION = PAGE) ON PartitionScheme_ResourceTypeId (ResourceTypeId) +); + + +GO +ALTER TABLE dbo.CompartmentAssignment + ADD CONSTRAINT DF_CompartmentAssignment_IsHistory DEFAULT 0 FOR IsHistory; + + +GO +ALTER TABLE dbo.CompartmentAssignment SET (LOCK_ESCALATION = AUTO); + + +GO +CREATE NONCLUSTERED INDEX IX_CompartmentAssignment_CompartmentTypeId_ReferenceResourceId + ON dbo.CompartmentAssignment(ResourceTypeId, CompartmentTypeId, ReferenceResourceId, ResourceSurrogateId) WHERE IsHistory = 0 WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE TABLE dbo.CompartmentType ( + CompartmentTypeId TINYINT IDENTITY (1, 1) NOT NULL, + Name VARCHAR (128) COLLATE Latin1_General_100_CS_AS NOT NULL, + CONSTRAINT UQ_CompartmentType_CompartmentTypeId UNIQUE (CompartmentTypeId), + CONSTRAINT PKC_CompartmentType PRIMARY KEY CLUSTERED (Name) WITH (DATA_COMPRESSION = PAGE) +); + +CREATE TABLE dbo.DateTimeSearchParam ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + StartDateTime DATETIME2 (7) NOT NULL, + EndDateTime DATETIME2 (7) NOT NULL, + IsLongerThanADay BIT NOT NULL, + IsMin BIT CONSTRAINT date_IsMin_Constraint DEFAULT 0 NOT NULL, + IsMax BIT CONSTRAINT date_IsMax_Constraint DEFAULT 0 NOT NULL +); + +ALTER TABLE dbo.DateTimeSearchParam SET (LOCK_ESCALATION = AUTO); + +CREATE CLUSTERED INDEX IXC_DateTimeSearchParam + ON dbo.DateTimeSearchParam(ResourceTypeId, ResourceSurrogateId, SearchParamId) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_StartDateTime_EndDateTime_INCLUDE_IsLongerThanADay_IsMin_IsMax + ON dbo.DateTimeSearchParam(SearchParamId, StartDateTime, EndDateTime) + INCLUDE(IsLongerThanADay, IsMin, IsMax) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_EndDateTime_StartDateTime_INCLUDE_IsLongerThanADay_IsMin_IsMax + ON dbo.DateTimeSearchParam(SearchParamId, EndDateTime, StartDateTime) + INCLUDE(IsLongerThanADay, IsMin, IsMax) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_StartDateTime_EndDateTime_INCLUDE_IsMin_IsMax_WHERE_IsLongerThanADay_1 + ON dbo.DateTimeSearchParam(SearchParamId, StartDateTime, EndDateTime) + INCLUDE(IsMin, IsMax) WHERE IsLongerThanADay = 1 + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_EndDateTime_StartDateTime_INCLUDE_IsMin_IsMax_WHERE_IsLongerThanADay_1 + ON dbo.DateTimeSearchParam(SearchParamId, EndDateTime, StartDateTime) + INCLUDE(IsMin, IsMax) WHERE IsLongerThanADay = 1 + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +IF NOT EXISTS (SELECT 1 + FROM sys.tables + WHERE name = 'EventAgentCheckpoint') + BEGIN + CREATE TABLE dbo.EventAgentCheckpoint ( + CheckpointId VARCHAR (64) NOT NULL, + LastProcessedDateTime DATETIMEOFFSET (7), + LastProcessedIdentifier VARCHAR (64) , + UpdatedOn DATETIME2 (7) DEFAULT sysutcdatetime() NOT NULL, + CONSTRAINT PK_EventAgentCheckpoint PRIMARY KEY CLUSTERED (CheckpointId) + ) ON [PRIMARY]; + END + +CREATE PARTITION FUNCTION EventLogPartitionFunction(TINYINT) + AS RANGE RIGHT + FOR VALUES (0, 1, 2, 3, 4, 5, 6, 7); + + +GO +CREATE PARTITION SCHEME EventLogPartitionScheme + AS PARTITION EventLogPartitionFunction + ALL TO ([PRIMARY]); + + +GO +CREATE TABLE dbo.EventLog ( + PartitionId AS isnull(CONVERT (TINYINT, EventId % 8), 0) PERSISTED, + EventId BIGINT IDENTITY (1, 1) NOT NULL, + EventDate DATETIME NOT NULL, + Process VARCHAR (100) NOT NULL, + Status VARCHAR (10) NOT NULL, + Mode VARCHAR (200) NULL, + Action VARCHAR (20) NULL, + Target VARCHAR (100) NULL, + Rows BIGINT NULL, + Milliseconds INT NULL, + EventText NVARCHAR (3500) NULL, + SPID SMALLINT NOT NULL, + HostName VARCHAR (64) NOT NULL CONSTRAINT PKC_EventLog_EventDate_EventId_PartitionId PRIMARY KEY CLUSTERED (EventDate, EventId, PartitionId) ON EventLogPartitionScheme (PartitionId) +); + +CREATE TABLE dbo.ExportJob ( + Id VARCHAR (64) COLLATE Latin1_General_100_CS_AS NOT NULL, + Hash VARCHAR (64) COLLATE Latin1_General_100_CS_AS NOT NULL, + Status VARCHAR (10) NOT NULL, + HeartbeatDateTime DATETIME2 (7) NULL, + RawJobRecord VARCHAR (MAX) NOT NULL, + JobVersion ROWVERSION NOT NULL, + CONSTRAINT PKC_ExportJob PRIMARY KEY CLUSTERED (Id) +); + +CREATE UNIQUE NONCLUSTERED INDEX IX_ExportJob_Hash_Status_HeartbeatDateTime + ON dbo.ExportJob(Hash, Status, HeartbeatDateTime); + +CREATE TABLE dbo.IndexProperties ( + TableName VARCHAR (100) NOT NULL, + IndexName VARCHAR (200) NOT NULL, + PropertyName VARCHAR (100) NOT NULL, + PropertyValue VARCHAR (100) NOT NULL, + CreateDate DATETIME CONSTRAINT DF_IndexProperties_CreateDate DEFAULT getUTCdate() NOT NULL CONSTRAINT PKC_IndexProperties_TableName_IndexName_PropertyName PRIMARY KEY CLUSTERED (TableName, IndexName, PropertyName) +); + +CREATE PARTITION FUNCTION TinyintPartitionFunction(TINYINT) + AS RANGE RIGHT + FOR VALUES (0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255); + + +GO +CREATE PARTITION SCHEME TinyintPartitionScheme + AS PARTITION TinyintPartitionFunction + ALL TO ([PRIMARY]); + + +GO +CREATE TABLE dbo.JobQueue ( + QueueType TINYINT NOT NULL, + GroupId BIGINT NOT NULL, + JobId BIGINT NOT NULL, + PartitionId AS CONVERT (TINYINT, JobId % 16) PERSISTED, + Definition VARCHAR (MAX) NOT NULL, + DefinitionHash VARBINARY (20) NOT NULL, + Version BIGINT CONSTRAINT DF_JobQueue_Version DEFAULT datediff_big(millisecond, '0001-01-01', getUTCdate()) NOT NULL, + Status TINYINT CONSTRAINT DF_JobQueue_Status DEFAULT 0 NOT NULL, + Priority TINYINT CONSTRAINT DF_JobQueue_Priority DEFAULT 100 NOT NULL, + Data BIGINT NULL, + Result VARCHAR (MAX) NULL, + CreateDate DATETIME CONSTRAINT DF_JobQueue_CreateDate DEFAULT getUTCdate() NOT NULL, + StartDate DATETIME NULL, + EndDate DATETIME NULL, + HeartbeatDate DATETIME CONSTRAINT DF_JobQueue_HeartbeatDate DEFAULT getUTCdate() NOT NULL, + Worker VARCHAR (100) NULL, + Info VARCHAR (1000) NULL, + CancelRequested BIT CONSTRAINT DF_JobQueue_CancelRequested DEFAULT 0 NOT NULL CONSTRAINT PKC_JobQueue_QueueType_PartitionId_JobId PRIMARY KEY CLUSTERED (QueueType, PartitionId, JobId) ON TinyintPartitionScheme (QueueType), + CONSTRAINT U_JobQueue_QueueType_JobId UNIQUE (QueueType, JobId) +); + + +GO +CREATE INDEX IX_QueueType_PartitionId_Status_Priority + ON dbo.JobQueue(PartitionId, Status, Priority) + ON TinyintPartitionScheme (QueueType); + + +GO +CREATE INDEX IX_QueueType_GroupId + ON dbo.JobQueue(QueueType, GroupId) + ON TinyintPartitionScheme (QueueType); + + +GO +CREATE INDEX IX_QueueType_DefinitionHash + ON dbo.JobQueue(QueueType, DefinitionHash) + ON TinyintPartitionScheme (QueueType); + +CREATE TABLE dbo.NumberSearchParam ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + SingleValue DECIMAL (36, 18) NULL, + LowValue DECIMAL (36, 18) NOT NULL, + HighValue DECIMAL (36, 18) NOT NULL +); + +ALTER TABLE dbo.NumberSearchParam SET (LOCK_ESCALATION = AUTO); + +CREATE CLUSTERED INDEX IXC_NumberSearchParam + ON dbo.NumberSearchParam(ResourceTypeId, ResourceSurrogateId, SearchParamId) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_SingleValue_WHERE_SingleValue_NOT_NULL + ON dbo.NumberSearchParam(SearchParamId, SingleValue) WHERE SingleValue IS NOT NULL + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_LowValue_HighValue + ON dbo.NumberSearchParam(SearchParamId, LowValue, HighValue) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_HighValue_LowValue + ON dbo.NumberSearchParam(SearchParamId, HighValue, LowValue) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE TABLE dbo.Parameters ( + Id VARCHAR (100) NOT NULL, + Date DATETIME NULL, + Number FLOAT NULL, + Bigint BIGINT NULL, + Char VARCHAR (4000) NULL, + Binary VARBINARY (MAX) NULL, + UpdatedDate DATETIME NULL, + UpdatedBy NVARCHAR (255) NULL CONSTRAINT PKC_Parameters_Id PRIMARY KEY CLUSTERED (Id) WITH (IGNORE_DUP_KEY = ON) +); + + +GO +CREATE TABLE dbo.ParametersHistory ( + ChangeId INT IDENTITY (1, 1) NOT NULL, + Id VARCHAR (100) NOT NULL, + Date DATETIME NULL, + Number FLOAT NULL, + Bigint BIGINT NULL, + Char VARCHAR (4000) NULL, + Binary VARBINARY (MAX) NULL, + UpdatedDate DATETIME NULL, + UpdatedBy NVARCHAR (255) NULL +); + +CREATE TABLE dbo.QuantityCode ( + QuantityCodeId INT IDENTITY (1, 1) NOT NULL, + Value NVARCHAR (256) COLLATE Latin1_General_100_CS_AS NOT NULL, + CONSTRAINT UQ_QuantityCode_QuantityCodeId UNIQUE (QuantityCodeId), + CONSTRAINT PKC_QuantityCode PRIMARY KEY CLUSTERED (Value) WITH (DATA_COMPRESSION = PAGE) +); + +CREATE TABLE dbo.QuantitySearchParam ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + SystemId INT NULL, + QuantityCodeId INT NULL, + SingleValue DECIMAL (36, 18) NULL, + LowValue DECIMAL (36, 18) NOT NULL, + HighValue DECIMAL (36, 18) NOT NULL +); + +ALTER TABLE dbo.QuantitySearchParam SET (LOCK_ESCALATION = AUTO); + +CREATE CLUSTERED INDEX IXC_QuantitySearchParam + ON dbo.QuantitySearchParam(ResourceTypeId, ResourceSurrogateId, SearchParamId) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_QuantityCodeId_SingleValue_INCLUDE_SystemId_WHERE_SingleValue_NOT_NULL + ON dbo.QuantitySearchParam(SearchParamId, QuantityCodeId, SingleValue) + INCLUDE(SystemId) WHERE SingleValue IS NOT NULL + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_QuantityCodeId_LowValue_HighValue_INCLUDE_SystemId + ON dbo.QuantitySearchParam(SearchParamId, QuantityCodeId, LowValue, HighValue) + INCLUDE(SystemId) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_QuantityCodeId_HighValue_LowValue_INCLUDE_SystemId + ON dbo.QuantitySearchParam(SearchParamId, QuantityCodeId, HighValue, LowValue) + INCLUDE(SystemId) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE TABLE dbo.ReferenceSearchParam ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + BaseUri VARCHAR (128) COLLATE Latin1_General_100_CS_AS NULL, + ReferenceResourceTypeId SMALLINT NULL, + ReferenceResourceId VARCHAR (64) COLLATE Latin1_General_100_CS_AS NOT NULL, + ReferenceResourceVersion INT NULL +); + +ALTER TABLE dbo.ReferenceSearchParam SET (LOCK_ESCALATION = AUTO); + +CREATE CLUSTERED INDEX IXC_ReferenceSearchParam + ON dbo.ReferenceSearchParam(ResourceTypeId, ResourceSurrogateId, SearchParamId) WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE UNIQUE INDEX IXU_ReferenceResourceId_ReferenceResourceTypeId_SearchParamId_BaseUri_ResourceSurrogateId_ResourceTypeId + ON dbo.ReferenceSearchParam(ReferenceResourceId, ReferenceResourceTypeId, SearchParamId, BaseUri, ResourceSurrogateId, ResourceTypeId) WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE TABLE dbo.ReferenceTokenCompositeSearchParam ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + BaseUri1 VARCHAR (128) COLLATE Latin1_General_100_CS_AS NULL, + ReferenceResourceTypeId1 SMALLINT NULL, + ReferenceResourceId1 VARCHAR (64) COLLATE Latin1_General_100_CS_AS NOT NULL, + ReferenceResourceVersion1 INT NULL, + SystemId2 INT NULL, + Code2 VARCHAR (256) COLLATE Latin1_General_100_CS_AS NOT NULL, + CodeOverflow2 VARCHAR (MAX) COLLATE Latin1_General_100_CS_AS NULL +); + +ALTER TABLE dbo.ReferenceTokenCompositeSearchParam + ADD CONSTRAINT CHK_ReferenceTokenCompositeSearchParam_CodeOverflow2 CHECK (LEN(Code2) = 256 + OR CodeOverflow2 IS NULL); + +ALTER TABLE dbo.ReferenceTokenCompositeSearchParam SET (LOCK_ESCALATION = AUTO); + +CREATE CLUSTERED INDEX IXC_ReferenceTokenCompositeSearchParam + ON dbo.ReferenceTokenCompositeSearchParam(ResourceTypeId, ResourceSurrogateId, SearchParamId) WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_ReferenceResourceId1_Code2_INCLUDE_ReferenceResourceTypeId1_BaseUri1_SystemId2 + ON dbo.ReferenceTokenCompositeSearchParam(SearchParamId, ReferenceResourceId1, Code2) + INCLUDE(ReferenceResourceTypeId1, BaseUri1, SystemId2) WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE TABLE dbo.ReindexJob ( + Id VARCHAR (64) COLLATE Latin1_General_100_CS_AS NOT NULL, + Status VARCHAR (10) NOT NULL, + HeartbeatDateTime DATETIME2 (7) NULL, + RawJobRecord VARCHAR (MAX) NOT NULL, + JobVersion ROWVERSION NOT NULL, + CONSTRAINT PKC_ReindexJob PRIMARY KEY CLUSTERED (Id) +); + +CREATE TABLE dbo.Resource ( + ResourceTypeId SMALLINT NOT NULL, + ResourceId VARCHAR (64) COLLATE Latin1_General_100_CS_AS NOT NULL, + Version INT NOT NULL, + IsHistory BIT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + IsDeleted BIT NOT NULL, + RequestMethod VARCHAR (10) NULL, + RawResource VARBINARY (MAX) NOT NULL, + IsRawResourceMetaSet BIT DEFAULT 0 NOT NULL, + SearchParamHash VARCHAR (64) NULL, + TransactionId BIGINT NULL, + HistoryTransactionId BIGINT NULL CONSTRAINT PKC_Resource PRIMARY KEY CLUSTERED (ResourceTypeId, ResourceSurrogateId) WITH (DATA_COMPRESSION = PAGE) ON PartitionScheme_ResourceTypeId (ResourceTypeId), + CONSTRAINT CH_Resource_RawResource_Length CHECK (RawResource > 0x0) +); + +ALTER TABLE dbo.Resource SET (LOCK_ESCALATION = AUTO); + +CREATE INDEX IX_ResourceTypeId_TransactionId + ON dbo.Resource(ResourceTypeId, TransactionId) WHERE TransactionId IS NOT NULL + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_ResourceTypeId_HistoryTransactionId + ON dbo.Resource(ResourceTypeId, HistoryTransactionId) WHERE HistoryTransactionId IS NOT NULL + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE UNIQUE NONCLUSTERED INDEX IX_Resource_ResourceTypeId_ResourceId_Version + ON dbo.Resource(ResourceTypeId, ResourceId, Version) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE UNIQUE NONCLUSTERED INDEX IX_Resource_ResourceTypeId_ResourceId + ON dbo.Resource(ResourceTypeId, ResourceId) + INCLUDE(Version, IsDeleted) WHERE IsHistory = 0 + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE UNIQUE NONCLUSTERED INDEX IX_Resource_ResourceTypeId_ResourceSurrgateId + ON dbo.Resource(ResourceTypeId, ResourceSurrogateId) WHERE IsHistory = 0 + AND IsDeleted = 0 + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE TABLE dbo.ResourceChangeData ( + Id BIGINT IDENTITY (1, 1) NOT NULL, + Timestamp DATETIME2 (7) CONSTRAINT DF_ResourceChangeData_Timestamp DEFAULT sysutcdatetime() NOT NULL, + ResourceId VARCHAR (64) NOT NULL, + ResourceTypeId SMALLINT NOT NULL, + ResourceVersion INT NOT NULL, + ResourceChangeTypeId TINYINT NOT NULL +) ON PartitionScheme_ResourceChangeData_Timestamp (Timestamp); + +CREATE CLUSTERED INDEX IXC_ResourceChangeData + ON dbo.ResourceChangeData(Id ASC) WITH (ONLINE = ON) + ON PartitionScheme_ResourceChangeData_Timestamp (Timestamp); + +CREATE TABLE dbo.ResourceChangeDataStaging ( + Id BIGINT IDENTITY (1, 1) NOT NULL, + Timestamp DATETIME2 (7) CONSTRAINT DF_ResourceChangeDataStaging_Timestamp DEFAULT sysutcdatetime() NOT NULL, + ResourceId VARCHAR (64) NOT NULL, + ResourceTypeId SMALLINT NOT NULL, + ResourceVersion INT NOT NULL, + ResourceChangeTypeId TINYINT NOT NULL +) ON [PRIMARY]; + +CREATE CLUSTERED INDEX IXC_ResourceChangeDataStaging + ON dbo.ResourceChangeDataStaging(Id ASC, Timestamp ASC) WITH (ONLINE = ON) + ON [PRIMARY]; + +ALTER TABLE dbo.ResourceChangeDataStaging WITH CHECK + ADD CONSTRAINT CHK_ResourceChangeDataStaging_partition CHECK (Timestamp < CONVERT (DATETIME2 (7), N'9999-12-31 23:59:59.9999999')); + +ALTER TABLE dbo.ResourceChangeDataStaging CHECK CONSTRAINT CHK_ResourceChangeDataStaging_partition; + +CREATE TABLE dbo.ResourceChangeType ( + ResourceChangeTypeId TINYINT NOT NULL, + Name NVARCHAR (50) NOT NULL, + CONSTRAINT PK_ResourceChangeType PRIMARY KEY CLUSTERED (ResourceChangeTypeId), + CONSTRAINT UQ_ResourceChangeType_Name UNIQUE NONCLUSTERED (Name) +) ON [PRIMARY]; + + +GO +INSERT dbo.ResourceChangeType (ResourceChangeTypeId, Name) +VALUES (0, N'Creation'); + +INSERT dbo.ResourceChangeType (ResourceChangeTypeId, Name) +VALUES (1, N'Update'); + +INSERT dbo.ResourceChangeType (ResourceChangeTypeId, Name) +VALUES (2, N'Deletion'); + +CREATE TABLE dbo.ResourceType ( + ResourceTypeId SMALLINT IDENTITY (1, 1) NOT NULL, + Name NVARCHAR (50) COLLATE Latin1_General_100_CS_AS NOT NULL, + CONSTRAINT UQ_ResourceType_ResourceTypeId UNIQUE (ResourceTypeId), + CONSTRAINT PKC_ResourceType PRIMARY KEY CLUSTERED (Name) WITH (DATA_COMPRESSION = PAGE) +); + +CREATE TABLE dbo.ResourceWriteClaim ( + ResourceSurrogateId BIGINT NOT NULL, + ClaimTypeId TINYINT NOT NULL, + ClaimValue NVARCHAR (128) NOT NULL +) +WITH (DATA_COMPRESSION = PAGE); + +CREATE CLUSTERED INDEX IXC_ResourceWriteClaim + ON dbo.ResourceWriteClaim(ResourceSurrogateId, ClaimTypeId); + +CREATE TABLE dbo.SchemaMigrationProgress ( + Timestamp DATETIME2 (3) DEFAULT CURRENT_TIMESTAMP, + Message NVARCHAR (MAX) +); + +CREATE TABLE dbo.SearchParam ( + SearchParamId SMALLINT IDENTITY (1, 1) NOT NULL, + Uri VARCHAR (128) COLLATE Latin1_General_100_CS_AS NOT NULL, + Status VARCHAR (20) NULL, + LastUpdated DATETIMEOFFSET (7) NULL, + IsPartiallySupported BIT NULL, + CONSTRAINT UQ_SearchParam_SearchParamId UNIQUE (SearchParamId), + CONSTRAINT PKC_SearchParam PRIMARY KEY CLUSTERED (Uri) WITH (DATA_COMPRESSION = PAGE) +); + +CREATE TABLE dbo.StringSearchParam ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + Text NVARCHAR (256) COLLATE Latin1_General_100_CI_AI_SC NOT NULL, + TextOverflow NVARCHAR (MAX) COLLATE Latin1_General_100_CI_AI_SC NULL, + IsMin BIT CONSTRAINT string_IsMin_Constraint DEFAULT 0 NOT NULL, + IsMax BIT CONSTRAINT string_IsMax_Constraint DEFAULT 0 NOT NULL +); + +ALTER TABLE dbo.StringSearchParam SET (LOCK_ESCALATION = AUTO); + +CREATE CLUSTERED INDEX IXC_StringSearchParam + ON dbo.StringSearchParam(ResourceTypeId, ResourceSurrogateId, SearchParamId) WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_Text_INCLUDE_TextOverflow_IsMin_IsMax + ON dbo.StringSearchParam(SearchParamId, Text) + INCLUDE(TextOverflow, IsMin, IsMax) WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_Text_INCLUDE_IsMin_IsMax_WHERE_TextOverflow_NOT_NULL + ON dbo.StringSearchParam(SearchParamId, Text) + INCLUDE(IsMin, IsMax) WHERE TextOverflow IS NOT NULL WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE TABLE dbo.System ( + SystemId INT IDENTITY (1, 1) NOT NULL, + Value NVARCHAR (256) NOT NULL, + CONSTRAINT UQ_System_SystemId UNIQUE (SystemId), + CONSTRAINT PKC_System PRIMARY KEY CLUSTERED (Value) WITH (DATA_COMPRESSION = PAGE) +); + +CREATE TABLE [dbo].[TaskInfo] ( + [TaskId] VARCHAR (64) NOT NULL, + [QueueId] VARCHAR (64) NOT NULL, + [Status] SMALLINT NOT NULL, + [TaskTypeId] SMALLINT NOT NULL, + [RunId] VARCHAR (50) NULL, + [IsCanceled] BIT NOT NULL, + [RetryCount] SMALLINT NOT NULL, + [MaxRetryCount] SMALLINT NOT NULL, + [HeartbeatDateTime] DATETIME2 (7) NULL, + [InputData] VARCHAR (MAX) NOT NULL, + [TaskContext] VARCHAR (MAX) NULL, + [Result] VARCHAR (MAX) NULL, + [CreateDateTime] DATETIME2 (7) CONSTRAINT DF_TaskInfo_CreateDate DEFAULT SYSUTCDATETIME() NOT NULL, + [StartDateTime] DATETIME2 (7) NULL, + [EndDateTime] DATETIME2 (7) NULL, + [Worker] VARCHAR (100) NULL, + [RestartInfo] VARCHAR (MAX) NULL, + [ParentTaskId] VARCHAR (64) NULL, + CONSTRAINT PKC_TaskInfo PRIMARY KEY CLUSTERED (TaskId) WITH (DATA_COMPRESSION = PAGE) +) ON [PRIMARY] TEXTIMAGE_ON [PRIMARY]; + + +GO +CREATE NONCLUSTERED INDEX IX_QueueId_Status + ON dbo.TaskInfo(QueueId, Status); + + +GO +CREATE NONCLUSTERED INDEX IX_QueueId_ParentTaskId + ON dbo.TaskInfo(QueueId, ParentTaskId); + +CREATE TABLE dbo.TokenDateTimeCompositeSearchParam ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + SystemId1 INT NULL, + Code1 VARCHAR (256) COLLATE Latin1_General_100_CS_AS NOT NULL, + StartDateTime2 DATETIME2 (7) NOT NULL, + EndDateTime2 DATETIME2 (7) NOT NULL, + IsLongerThanADay2 BIT NOT NULL, + CodeOverflow1 VARCHAR (MAX) COLLATE Latin1_General_100_CS_AS NULL +); + +ALTER TABLE dbo.TokenDateTimeCompositeSearchParam + ADD CONSTRAINT CHK_TokenDateTimeCompositeSearchParam_CodeOverflow1 CHECK (LEN(Code1) = 256 + OR CodeOverflow1 IS NULL); + +ALTER TABLE dbo.TokenDateTimeCompositeSearchParam SET (LOCK_ESCALATION = AUTO); + +CREATE CLUSTERED INDEX IXC_TokenDateTimeCompositeSearchParam + ON dbo.TokenDateTimeCompositeSearchParam(ResourceTypeId, ResourceSurrogateId, SearchParamId) WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_Code1_StartDateTime2_EndDateTime2_INCLUDE_SystemId1_IsLongerThanADay2 + ON dbo.TokenDateTimeCompositeSearchParam(SearchParamId, Code1, StartDateTime2, EndDateTime2) + INCLUDE(SystemId1, IsLongerThanADay2) WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_Code1_EndDateTime2_StartDateTime2_INCLUDE_SystemId1_IsLongerThanADay2 + ON dbo.TokenDateTimeCompositeSearchParam(SearchParamId, Code1, EndDateTime2, StartDateTime2) + INCLUDE(SystemId1, IsLongerThanADay2) WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_Code1_StartDateTime2_EndDateTime2_INCLUDE_SystemId1_WHERE_IsLongerThanADay2_1 + ON dbo.TokenDateTimeCompositeSearchParam(SearchParamId, Code1, StartDateTime2, EndDateTime2) + INCLUDE(SystemId1) WHERE IsLongerThanADay2 = 1 WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_Code1_EndDateTime2_StartDateTime2_INCLUDE_SystemId1_WHERE_IsLongerThanADay2_1 + ON dbo.TokenDateTimeCompositeSearchParam(SearchParamId, Code1, EndDateTime2, StartDateTime2) + INCLUDE(SystemId1) WHERE IsLongerThanADay2 = 1 WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE TABLE dbo.TokenNumberNumberCompositeSearchParam ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + SystemId1 INT NULL, + Code1 VARCHAR (256) COLLATE Latin1_General_100_CS_AS NOT NULL, + SingleValue2 DECIMAL (36, 18) NULL, + LowValue2 DECIMAL (36, 18) NULL, + HighValue2 DECIMAL (36, 18) NULL, + SingleValue3 DECIMAL (36, 18) NULL, + LowValue3 DECIMAL (36, 18) NULL, + HighValue3 DECIMAL (36, 18) NULL, + HasRange BIT NOT NULL, + CodeOverflow1 VARCHAR (MAX) COLLATE Latin1_General_100_CS_AS NULL +); + +ALTER TABLE dbo.TokenNumberNumberCompositeSearchParam + ADD CONSTRAINT CHK_TokenNumberNumberCompositeSearchParam_CodeOverflow1 CHECK (LEN(Code1) = 256 + OR CodeOverflow1 IS NULL); + +ALTER TABLE dbo.TokenNumberNumberCompositeSearchParam SET (LOCK_ESCALATION = AUTO); + +CREATE CLUSTERED INDEX IXC_TokenNumberNumberCompositeSearchParam + ON dbo.TokenNumberNumberCompositeSearchParam(ResourceTypeId, ResourceSurrogateId, SearchParamId) WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_Code1_SingleValue2_SingleValue3_INCLUDE_SystemId1_WHERE_HasRange_0 + ON dbo.TokenNumberNumberCompositeSearchParam(SearchParamId, Code1, SingleValue2, SingleValue3) + INCLUDE(SystemId1) WHERE HasRange = 0 WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_Code1_LowValue2_HighValue2_LowValue3_HighValue3_INCLUDE_SystemId1_WHERE_HasRange_1 + ON dbo.TokenNumberNumberCompositeSearchParam(SearchParamId, Code1, LowValue2, HighValue2, LowValue3, HighValue3) + INCLUDE(SystemId1) WHERE HasRange = 1 WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE TABLE dbo.TokenQuantityCompositeSearchParam ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + SystemId1 INT NULL, + Code1 VARCHAR (256) COLLATE Latin1_General_100_CS_AS NOT NULL, + SystemId2 INT NULL, + QuantityCodeId2 INT NULL, + SingleValue2 DECIMAL (36, 18) NULL, + LowValue2 DECIMAL (36, 18) NULL, + HighValue2 DECIMAL (36, 18) NULL, + CodeOverflow1 VARCHAR (MAX) COLLATE Latin1_General_100_CS_AS NULL +); + +ALTER TABLE dbo.TokenQuantityCompositeSearchParam + ADD CONSTRAINT CHK_TokenQuantityCompositeSearchParam_CodeOverflow1 CHECK (LEN(Code1) = 256 + OR CodeOverflow1 IS NULL); + +ALTER TABLE dbo.TokenQuantityCompositeSearchParam SET (LOCK_ESCALATION = AUTO); + +CREATE CLUSTERED INDEX IXC_TokenQuantityCompositeSearchParam + ON dbo.TokenQuantityCompositeSearchParam(ResourceTypeId, ResourceSurrogateId, SearchParamId) WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_Code1_SingleValue2_INCLUDE_QuantityCodeId2_SystemId1_SystemId2_WHERE_SingleValue2_NOT_NULL + ON dbo.TokenQuantityCompositeSearchParam(SearchParamId, Code1, SingleValue2) + INCLUDE(QuantityCodeId2, SystemId1, SystemId2) WHERE SingleValue2 IS NOT NULL WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_Code1_LowValue2_HighValue2_INCLUDE_QuantityCodeId2_SystemId1_SystemId2_WHERE_LowValue2_NOT_NULL + ON dbo.TokenQuantityCompositeSearchParam(SearchParamId, Code1, LowValue2, HighValue2) + INCLUDE(QuantityCodeId2, SystemId1, SystemId2) WHERE LowValue2 IS NOT NULL WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_Code1_HighValue2_LowValue2_INCLUDE_QuantityCodeId2_SystemId1_SystemId2_WHERE_LowValue2_NOT_NULL + ON dbo.TokenQuantityCompositeSearchParam(SearchParamId, Code1, HighValue2, LowValue2) + INCLUDE(QuantityCodeId2, SystemId1, SystemId2) WHERE LowValue2 IS NOT NULL WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE TABLE dbo.TokenSearchParam ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + SystemId INT NULL, + Code VARCHAR (256) COLLATE Latin1_General_100_CS_AS NOT NULL, + CodeOverflow VARCHAR (MAX) COLLATE Latin1_General_100_CS_AS NULL +); + +ALTER TABLE dbo.TokenSearchParam + ADD CONSTRAINT CHK_TokenSearchParam_CodeOverflow CHECK (LEN(Code) = 256 + OR CodeOverflow IS NULL); + +ALTER TABLE dbo.TokenSearchParam SET (LOCK_ESCALATION = AUTO); + +CREATE CLUSTERED INDEX IXC_TokenSearchParam + ON dbo.TokenSearchParam(ResourceTypeId, ResourceSurrogateId, SearchParamId) WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_Code_INCLUDE_SystemId + ON dbo.TokenSearchParam(SearchParamId, Code) + INCLUDE(SystemId) WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE TABLE dbo.TokenStringCompositeSearchParam ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + SystemId1 INT NULL, + Code1 VARCHAR (256) COLLATE Latin1_General_100_CS_AS NOT NULL, + Text2 NVARCHAR (256) COLLATE Latin1_General_CI_AI NOT NULL, + TextOverflow2 NVARCHAR (MAX) COLLATE Latin1_General_CI_AI NULL, + CodeOverflow1 VARCHAR (MAX) COLLATE Latin1_General_100_CS_AS NULL +); + +ALTER TABLE dbo.TokenStringCompositeSearchParam + ADD CONSTRAINT CHK_TokenStringCompositeSearchParam_CodeOverflow1 CHECK (LEN(Code1) = 256 + OR CodeOverflow1 IS NULL); + +ALTER TABLE dbo.TokenStringCompositeSearchParam SET (LOCK_ESCALATION = AUTO); + +CREATE CLUSTERED INDEX IXC_TokenStringCompositeSearchParam + ON dbo.TokenStringCompositeSearchParam(ResourceSurrogateId, SearchParamId) WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_Code1_Text2_INCLUDE_SystemId1_TextOverflow2 + ON dbo.TokenStringCompositeSearchParam(SearchParamId, Code1, Text2) + INCLUDE(SystemId1, TextOverflow2) WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_Code1_Text2_INCLUDE_SystemId1_WHERE_TextOverflow2_NOT_NULL + ON dbo.TokenStringCompositeSearchParam(SearchParamId, Code1, Text2) + INCLUDE(SystemId1) WHERE TextOverflow2 IS NOT NULL WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE TABLE dbo.TokenText ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + Text NVARCHAR (400) COLLATE Latin1_General_CI_AI NOT NULL, + IsHistory BIT NOT NULL +); + +ALTER TABLE dbo.TokenText + ADD CONSTRAINT DF_TokenText_IsHistory DEFAULT 0 FOR IsHistory; + +ALTER TABLE dbo.TokenText SET (LOCK_ESCALATION = AUTO); + +CREATE CLUSTERED INDEX IXC_TokenText + ON dbo.TokenText(ResourceTypeId, ResourceSurrogateId, SearchParamId) WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE NONCLUSTERED INDEX IX_TokenText_SearchParamId_Text + ON dbo.TokenText(ResourceTypeId, SearchParamId, Text, ResourceSurrogateId) WHERE IsHistory = 0 WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE TABLE dbo.TokenTokenCompositeSearchParam ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + SystemId1 INT NULL, + Code1 VARCHAR (256) COLLATE Latin1_General_100_CS_AS NOT NULL, + SystemId2 INT NULL, + Code2 VARCHAR (256) COLLATE Latin1_General_100_CS_AS NOT NULL, + CodeOverflow1 VARCHAR (MAX) COLLATE Latin1_General_100_CS_AS NULL, + CodeOverflow2 VARCHAR (MAX) COLLATE Latin1_General_100_CS_AS NULL +); + +ALTER TABLE dbo.TokenTokenCompositeSearchParam + ADD CONSTRAINT CHK_TokenTokenCompositeSearchParam_CodeOverflow1 CHECK (LEN(Code1) = 256 + OR CodeOverflow1 IS NULL); + +ALTER TABLE dbo.TokenTokenCompositeSearchParam + ADD CONSTRAINT CHK_TokenTokenCompositeSearchParam_CodeOverflow2 CHECK (LEN(Code2) = 256 + OR CodeOverflow2 IS NULL); + +ALTER TABLE dbo.TokenTokenCompositeSearchParam SET (LOCK_ESCALATION = AUTO); + +CREATE CLUSTERED INDEX IXC_TokenTokenCompositeSearchParam + ON dbo.TokenTokenCompositeSearchParam(ResourceSurrogateId, SearchParamId) WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_Code1_Code2_INCLUDE_SystemId1_SystemId2 + ON dbo.TokenTokenCompositeSearchParam(SearchParamId, Code1, Code2) + INCLUDE(SystemId1, SystemId2) WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE TABLE dbo.Transactions ( + SurrogateIdRangeFirstValue BIGINT NOT NULL, + SurrogateIdRangeLastValue BIGINT NOT NULL, + Definition VARCHAR (2000) NULL, + IsCompleted BIT CONSTRAINT DF_Transactions_IsCompleted DEFAULT 0 NOT NULL, + IsSuccess BIT CONSTRAINT DF_Transactions_IsSuccess DEFAULT 0 NOT NULL, + IsVisible BIT CONSTRAINT DF_Transactions_IsVisible DEFAULT 0 NOT NULL, + IsHistoryMoved BIT CONSTRAINT DF_Transactions_IsHistoryMoved DEFAULT 0 NOT NULL, + CreateDate DATETIME CONSTRAINT DF_Transactions_CreateDate DEFAULT getUTCdate() NOT NULL, + EndDate DATETIME NULL, + VisibleDate DATETIME NULL, + HistoryMovedDate DATETIME NULL, + HeartbeatDate DATETIME CONSTRAINT DF_Transactions_HeartbeatDate DEFAULT getUTCdate() NOT NULL, + FailureReason VARCHAR (MAX) NULL, + IsControlledByClient BIT CONSTRAINT DF_Transactions_IsControlledByClient DEFAULT 1 NOT NULL, + InvisibleHistoryRemovedDate DATETIME NULL CONSTRAINT PKC_Transactions_SurrogateIdRangeFirstValue PRIMARY KEY CLUSTERED (SurrogateIdRangeFirstValue) +); + +CREATE INDEX IX_IsVisible + ON dbo.Transactions(IsVisible); + +CREATE TABLE dbo.UriSearchParam ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + Uri VARCHAR (256) COLLATE Latin1_General_100_CS_AS NOT NULL +); + +ALTER TABLE dbo.UriSearchParam SET (LOCK_ESCALATION = AUTO); + +CREATE CLUSTERED INDEX IXC_UriSearchParam + ON dbo.UriSearchParam(ResourceTypeId, ResourceSurrogateId, SearchParamId) WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_Uri + ON dbo.UriSearchParam(SearchParamId, Uri) WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE TABLE dbo.WatchdogLeases ( + Watchdog VARCHAR (100) NOT NULL, + LeaseHolder VARCHAR (100) CONSTRAINT DF_WatchdogLeases_LeaseHolder DEFAULT '' NOT NULL, + LeaseEndTime DATETIME CONSTRAINT DF_WatchdogLeases_LeaseEndTime DEFAULT 0 NOT NULL, + RemainingLeaseTimeSec AS datediff(second, getUTCdate(), LeaseEndTime), + LeaseRequestor VARCHAR (100) CONSTRAINT DF_WatchdogLeases_LeaseRequestor DEFAULT '' NOT NULL, + LeaseRequestTime DATETIME CONSTRAINT DF_WatchdogLeases_LeaseRequestTime DEFAULT 0 NOT NULL CONSTRAINT PKC_WatchdogLeases_Watchdog PRIMARY KEY CLUSTERED (Watchdog) +); + +COMMIT +GO +CREATE PROCEDURE dbo.AcquireReindexJobs +@jobHeartbeatTimeoutThresholdInSeconds BIGINT, @maximumNumberOfConcurrentJobsAllowed INT +AS +SET NOCOUNT ON; +SET XACT_ABORT ON; +SET TRANSACTION ISOLATION LEVEL SERIALIZABLE; +BEGIN TRANSACTION; +DECLARE @expirationDateTime AS DATETIME2 (7); +SELECT @expirationDateTime = DATEADD(second, -@jobHeartbeatTimeoutThresholdInSeconds, SYSUTCDATETIME()); +DECLARE @numberOfRunningJobs AS INT; +SELECT @numberOfRunningJobs = COUNT(*) +FROM dbo.ReindexJob WITH (TABLOCKX) +WHERE Status = 'Running' + AND HeartbeatDateTime > @expirationDateTime; +DECLARE @limit AS INT = @maximumNumberOfConcurrentJobsAllowed - @numberOfRunningJobs; +IF (@limit > 0) + BEGIN + DECLARE @availableJobs TABLE ( + Id VARCHAR (64) COLLATE Latin1_General_100_CS_AS NOT NULL, + JobVersion BINARY (8) NOT NULL); + INSERT INTO @availableJobs + SELECT TOP (@limit) Id, + JobVersion + FROM dbo.ReindexJob + WHERE (Status = 'Queued' + OR (Status = 'Running' + AND HeartbeatDateTime <= @expirationDateTime)) + ORDER BY HeartbeatDateTime; + DECLARE @heartbeatDateTime AS DATETIME2 (7) = SYSUTCDATETIME(); + UPDATE dbo.ReindexJob + SET Status = 'Running', + HeartbeatDateTime = @heartbeatDateTime, + RawJobRecord = JSON_MODIFY(RawJobRecord, '$.status', 'Running') + OUTPUT inserted.RawJobRecord, inserted.JobVersion + FROM dbo.ReindexJob AS job + INNER JOIN + @availableJobs AS availableJob + ON job.Id = availableJob.Id + AND job.JobVersion = availableJob.JobVersion; + END +COMMIT TRANSACTION; + +GO +CREATE PROCEDURE dbo.AcquireWatchdogLease +@Watchdog VARCHAR (100), @Worker VARCHAR (100), @AllowRebalance BIT=1, @ForceAcquire BIT=0, @LeasePeriodSec FLOAT, @WorkerIsRunning BIT=0, @LeaseEndTime DATETIME OUTPUT, @IsAcquired BIT OUTPUT, @CurrentLeaseHolder VARCHAR (100)=NULL OUTPUT +AS +SET NOCOUNT ON; +SET XACT_ABORT ON; +DECLARE @SP AS VARCHAR (100) = 'AcquireWatchdogLease', @Mode AS VARCHAR (100), @msg AS VARCHAR (1000), @MyLeasesNumber AS INT, @OtherValidRequestsOrLeasesNumber AS INT, @MyValidRequestsOrLeasesNumber AS INT, @DesiredLeasesNumber AS INT, @NotLeasedWatchdogNumber AS INT, @WatchdogNumber AS INT, @Now AS DATETIME, @MyLastChangeTime AS DATETIME, @PreviousLeaseHolder AS VARCHAR (100), @Rows AS INT = 0, @NumberOfWorkers AS INT, @st AS DATETIME = getUTCdate(), @RowsInt AS INT, @Pattern AS VARCHAR (100); +BEGIN TRY + SET @Mode = 'R=' + isnull(@Watchdog, 'NULL') + ' W=' + isnull(@Worker, 'NULL') + ' F=' + isnull(CONVERT (VARCHAR, @ForceAcquire), 'NULL') + ' LP=' + isnull(CONVERT (VARCHAR, @LeasePeriodSec), 'NULL'); + SET @CurrentLeaseHolder = ''; + SET @IsAcquired = 0; + SET @Now = getUTCdate(); + SET @LeaseEndTime = @Now; + SET @Pattern = NULLIF ((SELECT Char + FROM dbo.Parameters + WHERE Id = 'WatchdogLeaseHolderIncludePatternFor' + @Watchdog), ''); + IF @Pattern IS NULL + SET @Pattern = NULLIF ((SELECT Char + FROM dbo.Parameters + WHERE Id = 'WatchdogLeaseHolderIncludePattern'), ''); + IF @Pattern IS NOT NULL + AND @Worker NOT LIKE @Pattern + BEGIN + SET @msg = 'Worker does not match include pattern=' + @Pattern; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @Rows, @Text = @msg; + SET @CurrentLeaseHolder = isnull((SELECT LeaseHolder + FROM dbo.WatchdogLeases + WHERE Watchdog = @Watchdog), ''); + RETURN; + END + SET @Pattern = NULLIF ((SELECT Char + FROM dbo.Parameters + WHERE Id = 'WatchdogLeaseHolderExcludePatternFor' + @Watchdog), ''); + IF @Pattern IS NULL + SET @Pattern = NULLIF ((SELECT Char + FROM dbo.Parameters + WHERE Id = 'WatchdogLeaseHolderExcludePattern'), ''); + IF @Pattern IS NOT NULL + AND @Worker LIKE @Pattern + BEGIN + SET @msg = 'Worker matches exclude pattern=' + @Pattern; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @Rows, @Text = @msg; + SET @CurrentLeaseHolder = isnull((SELECT LeaseHolder + FROM dbo.WatchdogLeases + WHERE Watchdog = @Watchdog), ''); + RETURN; + END + DECLARE @Watchdogs TABLE ( + Watchdog VARCHAR (100) PRIMARY KEY); + INSERT INTO @Watchdogs + SELECT Watchdog + FROM dbo.WatchdogLeases WITH (NOLOCK) + WHERE RemainingLeaseTimeSec * (-1) > 10 * @LeasePeriodSec + OR @ForceAcquire = 1 + AND Watchdog = @Watchdog + AND LeaseHolder <> @Worker; + IF @@rowcount > 0 + BEGIN + DELETE dbo.WatchdogLeases + WHERE Watchdog IN (SELECT Watchdog + FROM @Watchdogs); + SET @Rows += @@rowcount; + IF @Rows > 0 + BEGIN + SET @msg = ''; + SELECT @msg = CONVERT (VARCHAR (1000), @msg + CASE WHEN @msg = '' THEN '' ELSE ',' END + Watchdog) + FROM @Watchdogs; + SET @msg = CONVERT (VARCHAR (1000), 'Remove old/forced leases:' + @msg); + EXECUTE dbo.LogEvent @Process = 'AcquireWatchdogLease', @Status = 'Info', @Mode = @Mode, @Target = 'WatchdogLeases', @Action = 'Delete', @Rows = @Rows, @Text = @msg; + END + END + SET @NumberOfWorkers = 1 + (SELECT count(*) + FROM (SELECT LeaseHolder + FROM dbo.WatchdogLeases WITH (NOLOCK) + WHERE LeaseHolder <> @Worker + UNION + SELECT LeaseRequestor + FROM dbo.WatchdogLeases WITH (NOLOCK) + WHERE LeaseRequestor <> @Worker + AND LeaseRequestor <> '') AS A); + SET @Mode = CONVERT (VARCHAR (100), @Mode + ' N=' + CONVERT (VARCHAR (10), @NumberOfWorkers)); + IF NOT EXISTS (SELECT * + FROM dbo.WatchdogLeases WITH (NOLOCK) + WHERE Watchdog = @Watchdog) + INSERT INTO dbo.WatchdogLeases (Watchdog, LeaseEndTime, LeaseRequestTime) + SELECT @Watchdog, + dateadd(day, -10, @Now), + dateadd(day, -10, @Now) + WHERE NOT EXISTS (SELECT * + FROM dbo.WatchdogLeases WITH (TABLOCKX) + WHERE Watchdog = @Watchdog); + SET @LeaseEndTime = dateadd(second, @LeasePeriodSec, @Now); + SET @WatchdogNumber = (SELECT count(*) + FROM dbo.WatchdogLeases WITH (NOLOCK)); + SET @NotLeasedWatchdogNumber = (SELECT count(*) + FROM dbo.WatchdogLeases WITH (NOLOCK) + WHERE LeaseHolder = '' + OR LeaseEndTime < @Now); + SET @MyLeasesNumber = (SELECT count(*) + FROM dbo.WatchdogLeases WITH (NOLOCK) + WHERE LeaseHolder = @Worker + AND LeaseEndTime > @Now); + SET @OtherValidRequestsOrLeasesNumber = (SELECT count(*) + FROM dbo.WatchdogLeases WITH (NOLOCK) + WHERE LeaseHolder <> @Worker + AND LeaseEndTime > @Now + OR LeaseRequestor <> @Worker + AND datediff(second, LeaseRequestTime, @Now) < @LeasePeriodSec); + SET @MyValidRequestsOrLeasesNumber = (SELECT count(*) + FROM dbo.WatchdogLeases WITH (NOLOCK) + WHERE LeaseHolder = @Worker + AND LeaseEndTime > @Now + OR LeaseRequestor = @Worker + AND datediff(second, LeaseRequestTime, @Now) < @LeasePeriodSec); + SET @DesiredLeasesNumber = ceiling(1.0 * @WatchdogNumber / @NumberOfWorkers); + IF @DesiredLeasesNumber = 0 + SET @DesiredLeasesNumber = 1; + IF @DesiredLeasesNumber = 1 + AND @OtherValidRequestsOrLeasesNumber = 1 + AND @WatchdogNumber = 1 + SET @DesiredLeasesNumber = 0; + IF @MyValidRequestsOrLeasesNumber = floor(1.0 * @WatchdogNumber / @NumberOfWorkers) + AND @OtherValidRequestsOrLeasesNumber + @MyValidRequestsOrLeasesNumber = @WatchdogNumber + SET @DesiredLeasesNumber = @DesiredLeasesNumber - 1; + UPDATE dbo.WatchdogLeases + SET LeaseHolder = @Worker, + LeaseEndTime = @LeaseEndTime, + LeaseRequestor = '', + @PreviousLeaseHolder = LeaseHolder + WHERE Watchdog = @Watchdog + AND NOT (LeaseRequestor <> @Worker + AND datediff(second, LeaseRequestTime, @Now) < @LeasePeriodSec) + AND (LeaseHolder = @Worker + AND (LeaseEndTime > @Now + OR @WorkerIsRunning = 1) + OR LeaseEndTime < @Now + AND (@DesiredLeasesNumber > @MyLeasesNumber + OR @OtherValidRequestsOrLeasesNumber < @WatchdogNumber)); + IF @@rowcount > 0 + BEGIN + SET @IsAcquired = 1; + SET @msg = 'Lease holder changed from [' + isnull(@PreviousLeaseHolder, '') + '] to [' + @Worker + ']'; + IF @PreviousLeaseHolder <> @Worker + EXECUTE dbo.LogEvent @Process = 'AcquireWatchdogLease', @Status = 'Info', @Mode = @Mode, @Text = @msg; + END + ELSE + IF @AllowRebalance = 1 + BEGIN + SET @CurrentLeaseHolder = (SELECT LeaseHolder + FROM dbo.WatchdogLeases + WHERE Watchdog = @Watchdog); + UPDATE dbo.WatchdogLeases + SET LeaseRequestTime = @Now + WHERE Watchdog = @Watchdog + AND LeaseRequestor = @Worker + AND datediff(second, LeaseRequestTime, @Now) < @LeasePeriodSec; + IF @DesiredLeasesNumber > @MyValidRequestsOrLeasesNumber + BEGIN + UPDATE A + SET LeaseRequestor = @Worker, + LeaseRequestTime = @Now + FROM dbo.WatchdogLeases AS A + WHERE Watchdog = @Watchdog + AND NOT (LeaseRequestor <> @Worker + AND datediff(second, LeaseRequestTime, @Now) < @LeasePeriodSec) + AND @NotLeasedWatchdogNumber = 0 + AND (SELECT count(*) + FROM dbo.WatchdogLeases AS B + WHERE B.LeaseHolder = A.LeaseHolder + AND datediff(second, B.LeaseEndTime, @Now) < @LeasePeriodSec) > @DesiredLeasesNumber; + SET @RowsInt = @@rowcount; + SET @msg = '@DesiredLeasesNumber=[' + CONVERT (VARCHAR (10), @DesiredLeasesNumber) + '] > @MyValidRequestsOrLeasesNumber=[' + CONVERT (VARCHAR (10), @MyValidRequestsOrLeasesNumber) + ']'; + EXECUTE dbo.LogEvent @Process = 'AcquireWatchdogLease', @Status = 'Info', @Mode = @Mode, @Rows = @RowsInt, @Text = @msg; + END + END + SET @Mode = CONVERT (VARCHAR (100), @Mode + ' A=' + CONVERT (VARCHAR (1), @IsAcquired)); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @Rows; +END TRY +BEGIN CATCH + IF @@trancount > 0 + ROLLBACK; + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = 'AcquireWatchdogLease', @Status = 'Error', @Mode = @Mode; + THROW; +END CATCH + +GO +CREATE OR ALTER PROCEDURE dbo.AddPartitionOnResourceChanges +@partitionBoundary DATETIME2 (7) OUTPUT +AS +BEGIN + SET XACT_ABORT ON; + BEGIN TRANSACTION; + DECLARE @rightPartitionBoundary AS DATETIME2 (7) = CAST ((SELECT TOP (1) value + FROM sys.partition_range_values AS prv + INNER JOIN + sys.partition_functions AS pf + ON pf.function_id = prv.function_id + WHERE pf.name = N'PartitionFunction_ResourceChangeData_Timestamp' + ORDER BY prv.boundary_id DESC) AS DATETIME2 (7)); + DECLARE @timestamp AS DATETIME2 (7) = DATEADD(hour, DATEDIFF(hour, 0, sysutcdatetime()), 0); + IF (@rightPartitionBoundary < @timestamp) + BEGIN + SET @rightPartitionBoundary = @timestamp; + END + SET @rightPartitionBoundary = DATEADD(hour, 1, @rightPartitionBoundary); + ALTER PARTITION SCHEME PartitionScheme_ResourceChangeData_Timestamp NEXT USED [Primary]; + ALTER PARTITION FUNCTION PartitionFunction_ResourceChangeData_Timestamp( ) + SPLIT RANGE (@rightPartitionBoundary); + SET @partitionBoundary = @rightPartitionBoundary; + COMMIT TRANSACTION; +END + +GO +CREATE PROCEDURE dbo.ArchiveJobs +@QueueType TINYINT +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'ArchiveJobs', @Mode AS VARCHAR (100) = '', @st AS DATETIME = getUTCdate(), @Rows AS INT = 0, @PartitionId AS TINYINT, @MaxPartitions AS TINYINT = 16, @LookedAtPartitions AS TINYINT = 0, @InflightRows AS INT = 0, @Lock AS VARCHAR (100) = 'DequeueJob_' + CONVERT (VARCHAR, @QueueType); +BEGIN TRY + SET @PartitionId = @MaxPartitions * rand(); + BEGIN TRANSACTION; + EXECUTE sp_getapplock @Lock, 'Exclusive'; + WHILE @LookedAtPartitions <= @MaxPartitions + BEGIN + SET @InflightRows += (SELECT count(*) + FROM dbo.JobQueue + WHERE PartitionId = @PartitionId + AND QueueType = @QueueType + AND Status IN (0, 1)); + SET @PartitionId = CASE WHEN @PartitionId = 15 THEN 0 ELSE @PartitionId + 1 END; + SET @LookedAtPartitions = @LookedAtPartitions + 1; + END + IF @InflightRows = 0 + BEGIN + SET @LookedAtPartitions = 0; + WHILE @LookedAtPartitions <= @MaxPartitions + BEGIN + UPDATE dbo.JobQueue + SET Status = 5 + WHERE PartitionId = @PartitionId + AND QueueType = @QueueType + AND Status IN (2, 3, 4); + SET @Rows += @@rowcount; + SET @PartitionId = CASE WHEN @PartitionId = 15 THEN 0 ELSE @PartitionId + 1 END; + SET @LookedAtPartitions = @LookedAtPartitions + 1; + END + END + COMMIT TRANSACTION; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @Rows; +END TRY +BEGIN CATCH + IF @@trancount > 0 + ROLLBACK; + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error'; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.CaptureResourceChanges +@isDeleted BIT, @version INT, @resourceId VARCHAR (64), @resourceTypeId SMALLINT +AS +BEGIN + DECLARE @changeType AS SMALLINT; + IF (@isDeleted = 1) + BEGIN + SET @changeType = 2; + END + ELSE + BEGIN + IF (@version = 1) + BEGIN + SET @changeType = 0; + END + ELSE + BEGIN + SET @changeType = 1; + END + END + INSERT INTO dbo.ResourceChangeData (ResourceId, ResourceTypeId, ResourceVersion, ResourceChangeTypeId) + VALUES (@resourceId, @resourceTypeId, @version, @changeType); +END + +GO +CREATE PROCEDURE dbo.CaptureResourceIdsForChanges +@Resources dbo.ResourceList READONLY +AS +SET NOCOUNT ON; +INSERT INTO dbo.ResourceChangeData (ResourceId, ResourceTypeId, ResourceVersion, ResourceChangeTypeId) +SELECT ResourceId, + ResourceTypeId, + Version, + CASE WHEN IsDeleted = 1 THEN 2 WHEN Version > 1 THEN 1 ELSE 0 END +FROM @Resources +WHERE IsHistory = 0; + +GO +CREATE PROCEDURE dbo.CheckActiveReindexJobs +AS +SET NOCOUNT ON; +SELECT Id +FROM dbo.ReindexJob +WHERE Status = 'Running' + OR Status = 'Queued' + OR Status = 'Paused'; + +GO +CREATE PROCEDURE dbo.CleanupEventLog +WITH EXECUTE AS 'dbo' +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'CleanupEventLog', @Mode AS VARCHAR (100) = '', @MaxDeleteRows AS INT, @MaxAllowedRows AS BIGINT, @RetentionPeriodSecond AS INT, @DeletedRows AS INT, @TotalDeletedRows AS INT = 0, @TotalRows AS INT, @Now AS DATETIME = getUTCdate(); +EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Start'; +BEGIN TRY + SET @MaxDeleteRows = (SELECT Number + FROM dbo.Parameters + WHERE Id = 'CleanupEventLog.DeleteBatchSize'); + IF @MaxDeleteRows IS NULL + RAISERROR ('Cannot get Parameter.CleanupEventLog.DeleteBatchSize', 18, 127); + SET @MaxAllowedRows = (SELECT Number + FROM dbo.Parameters + WHERE Id = 'CleanupEventLog.AllowedRows'); + IF @MaxAllowedRows IS NULL + RAISERROR ('Cannot get Parameter.CleanupEventLog.AllowedRows', 18, 127); + SET @RetentionPeriodSecond = (SELECT Number * 24 * 60 * 60 + FROM dbo.Parameters + WHERE Id = 'CleanupEventLog.RetentionPeriodDay'); + IF @RetentionPeriodSecond IS NULL + RAISERROR ('Cannot get Parameter.CleanupEventLog.RetentionPeriodDay', 18, 127); + SET @TotalRows = (SELECT sum(row_count) + FROM sys.dm_db_partition_stats + WHERE object_id = object_id('EventLog') + AND index_id IN (0, 1)); + SET @DeletedRows = 1; + WHILE @DeletedRows > 0 + AND EXISTS (SELECT * + FROM dbo.Parameters + WHERE Id = 'CleanupEventLog.IsEnabled' + AND Number = 1) + BEGIN + SET @DeletedRows = 0; + IF @TotalRows - @TotalDeletedRows > @MaxAllowedRows + BEGIN + DELETE TOP (@MaxDeleteRows) + dbo.EventLog WITH (PAGLOCK) + WHERE EventDate <= dateadd(second, -@RetentionPeriodSecond, @Now); + SET @DeletedRows = @@rowcount; + SET @TotalDeletedRows += @DeletedRows; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Run', @Target = 'EventLog', @Action = 'Delete', @Rows = @DeletedRows, @Text = @TotalDeletedRows; + END + END + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @Now; +END TRY +BEGIN CATCH + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error'; + THROW; +END CATCH + +GO +CREATE OR ALTER PROCEDURE dbo.ConfigurePartitionOnResourceChanges +@numberOfFuturePartitionsToAdd INT +AS +BEGIN + SET XACT_ABORT ON; + BEGIN TRANSACTION; + DECLARE @partitionBoundary AS DATETIME2 (7) = DATEADD(hour, DATEDIFF(hour, 0, sysutcdatetime()), 0); + DECLARE @startingRightPartitionBoundary AS DATETIME2 (7) = CAST ((SELECT TOP (1) value + FROM sys.partition_range_values AS prv + INNER JOIN + sys.partition_functions AS pf + ON pf.function_id = prv.function_id + WHERE pf.name = N'PartitionFunction_ResourceChangeData_Timestamp' + ORDER BY prv.boundary_id DESC) AS DATETIME2 (7)); + DECLARE @numberOfPartitionsToAdd AS INT = @numberOfFuturePartitionsToAdd + 1; + WHILE @numberOfPartitionsToAdd > 0 + BEGIN + IF (@startingRightPartitionBoundary < @partitionBoundary) + BEGIN + ALTER PARTITION SCHEME PartitionScheme_ResourceChangeData_Timestamp NEXT USED [PRIMARY]; + ALTER PARTITION FUNCTION PartitionFunction_ResourceChangeData_Timestamp( ) + SPLIT RANGE (@partitionBoundary); + END + SET @partitionBoundary = DATEADD(hour, 1, @partitionBoundary); + SET @numberOfPartitionsToAdd -= 1; + END + COMMIT TRANSACTION; +END + +GO +CREATE PROCEDURE dbo.CreateReindexJob +@id VARCHAR (64), @status VARCHAR (10), @rawJobRecord VARCHAR (MAX) +AS +SET NOCOUNT ON; +SET XACT_ABORT ON; +BEGIN TRANSACTION; +DECLARE @heartbeatDateTime AS DATETIME2 (7) = SYSUTCDATETIME(); +INSERT INTO dbo.ReindexJob (Id, Status, HeartbeatDateTime, RawJobRecord) +VALUES (@id, @status, @heartbeatDateTime, @rawJobRecord); +SELECT CAST (MIN_ACTIVE_ROWVERSION() AS INT); +COMMIT TRANSACTION; + +GO +CREATE PROCEDURE dbo.CreateResourceSearchParamStats +@Table VARCHAR (100), @Column VARCHAR (100), @ResourceTypeId SMALLINT, @SearchParamId SMALLINT +WITH EXECUTE AS 'dbo' +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = object_name(@@procid), @Mode AS VARCHAR (200) = 'T=' + isnull(@Table, 'NULL') + ' C=' + isnull(@Column, 'NULL') + ' RT=' + isnull(CONVERT (VARCHAR, @ResourceTypeId), 'NULL') + ' SP=' + isnull(CONVERT (VARCHAR, @SearchParamId), 'NULL'), @st AS DATETIME = getUTCdate(); +BEGIN TRY + IF @Table IS NULL + OR @Column IS NULL + OR @ResourceTypeId IS NULL + OR @SearchParamId IS NULL + RAISERROR ('@TableName IS NULL OR @KeyColumn IS NULL OR @ResourceTypeId IS NULL OR @SearchParamId IS NULL', 18, 127); + EXECUTE ('CREATE STATISTICS ST_' + @Column + '_WHERE_ResourceTypeId_' + @ResourceTypeId + '_SearchParamId_' + @SearchParamId + ' ON dbo.' + @Table + ' (' + @Column + ') WHERE ResourceTypeId = ' + @ResourceTypeId + ' AND SearchParamId = ' + @SearchParamId); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Text = 'Stats created'; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + IF error_number() = 1927 + BEGIN + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st; + RETURN; + END + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error', @Start = @st; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.Defrag +@TableName VARCHAR (100), @IndexName VARCHAR (200), @PartitionNumber INT, @IsPartitioned BIT +WITH EXECUTE AS 'dbo' +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'Defrag', @Mode AS VARCHAR (200) = @TableName + '.' + @IndexName + '.' + CONVERT (VARCHAR, @PartitionNumber) + '.' + CONVERT (VARCHAR, @IsPartitioned), @st AS DATETIME = getUTCdate(), @SQL AS VARCHAR (3500), @msg AS VARCHAR (1000), @SizeBefore AS FLOAT, @SizeAfter AS FLOAT, @IndexId AS INT; +BEGIN TRY + SET @IndexId = (SELECT index_id + FROM sys.indexes + WHERE object_id = object_id(@TableName) + AND name = @IndexName); + SET @SizeBefore = (SELECT sum(reserved_page_count) + FROM sys.dm_db_partition_stats + WHERE object_id = object_id(@TableName) + AND index_id = @IndexId) * 8.0 / 1024 / 1024; + SET @msg = 'Size[GB] before=' + CONVERT (VARCHAR, @SizeBefore); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Start', @Text = @msg; + SET @Sql = 'ALTER INDEX ' + quotename(@IndexName) + ' ON dbo.' + quotename(@TableName) + ' REORGANIZE' + CASE WHEN @IsPartitioned = 1 THEN ' PARTITION = ' + CONVERT (VARCHAR, @PartitionNumber) ELSE '' END; + BEGIN TRY + EXECUTE (@Sql); + SET @SizeAfter = (SELECT sum(reserved_page_count) + FROM sys.dm_db_partition_stats + WHERE object_id = object_id(@TableName) + AND index_id = @IndexId) * 8.0 / 1024 / 1024; + SET @msg = 'Size[GB] before=' + CONVERT (VARCHAR, @SizeBefore) + ', after=' + CONVERT (VARCHAR, @SizeAfter) + ', reduced by=' + CONVERT (VARCHAR, @SizeBefore - @SizeAfter); + EXECUTE dbo.LogEvent @Process = @SP, @Status = 'End', @Mode = @Mode, @Action = 'Reorganize', @Start = @st, @Text = @msg; + END TRY + BEGIN CATCH + EXECUTE dbo.LogEvent @Process = @SP, @Status = 'Error', @Mode = @Mode, @Action = 'Reorganize', @Start = @st, @ReRaisError = 0; + END CATCH +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error'; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.DefragChangeDatabaseSettings +@IsOn BIT +WITH EXECUTE AS 'dbo' +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'DefragChangeDatabaseSettings', @Mode AS VARCHAR (200) = 'On=' + CONVERT (VARCHAR, @IsOn), @st AS DATETIME = getUTCdate(), @SQL AS VARCHAR (3500); +BEGIN TRY + EXECUTE dbo.LogEvent @Process = @SP, @Status = 'Start', @Mode = @Mode; + SET @SQL = 'ALTER DATABASE CURRENT SET AUTO_UPDATE_STATISTICS ' + CASE WHEN @IsOn = 1 THEN 'ON' ELSE 'OFF' END; + EXECUTE (@SQL); + EXECUTE dbo.LogEvent @Process = @SP, @Status = 'Run', @Mode = @Mode, @Text = @SQL; + SET @SQL = 'ALTER DATABASE CURRENT SET AUTO_CREATE_STATISTICS ' + CASE WHEN @IsOn = 1 THEN 'ON' ELSE 'OFF' END; + EXECUTE (@SQL); + EXECUTE dbo.LogEvent @Process = @SP, @Status = 'End', @Mode = @Mode, @Start = @st, @Text = @SQL; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error'; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.DeleteHistory +@DeleteResources BIT=0, @Reset BIT=0, @DisableLogEvent BIT=0 +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'DeleteHistory', @Mode AS VARCHAR (100) = 'D=' + isnull(CONVERT (VARCHAR, @DeleteResources), 'NULL') + ' R=' + isnull(CONVERT (VARCHAR, @Reset), 'NULL'), @st AS DATETIME = getUTCdate(), @Id AS VARCHAR (100) = 'DeleteHistory.LastProcessed.TypeId.SurrogateId', @ResourceTypeId AS SMALLINT, @SurrogateId AS BIGINT, @RowsToProcess AS INT, @ProcessedResources AS INT = 0, @DeletedResources AS INT = 0, @DeletedSearchParams AS INT = 0, @ReportDate AS DATETIME = getUTCdate(); +BEGIN TRY + IF @DisableLogEvent = 0 + INSERT INTO dbo.Parameters (Id, Char) + SELECT @SP, + 'LogEvent'; + ELSE + DELETE dbo.Parameters + WHERE Id = @SP; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Start'; + INSERT INTO dbo.Parameters (Id, Char) + SELECT @Id, + '0.0' + WHERE NOT EXISTS (SELECT * + FROM dbo.Parameters + WHERE Id = @Id); + DECLARE @LastProcessed AS VARCHAR (100) = CASE WHEN @Reset = 0 THEN (SELECT Char + FROM dbo.Parameters + WHERE Id = @Id) ELSE '0.0' END; + DECLARE @Types TABLE ( + ResourceTypeId SMALLINT PRIMARY KEY, + Name VARCHAR (100)); + DECLARE @SurrogateIds TABLE ( + ResourceSurrogateId BIGINT PRIMARY KEY, + IsHistory BIT ); + INSERT INTO @Types + EXECUTE dbo.GetUsedResourceTypes ; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Run', @Target = '@Types', @Action = 'Insert', @Rows = @@rowcount; + SET @ResourceTypeId = substring(@LastProcessed, 1, charindex('.', @LastProcessed) - 1); + SET @SurrogateId = substring(@LastProcessed, charindex('.', @LastProcessed) + 1, 255); + DELETE @Types + WHERE ResourceTypeId < @ResourceTypeId; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Run', @Target = '@Types', @Action = 'Delete', @Rows = @@rowcount; + WHILE EXISTS (SELECT * + FROM @Types) + BEGIN + SET @ResourceTypeId = (SELECT TOP 1 ResourceTypeId + FROM @Types + ORDER BY ResourceTypeId); + SET @ProcessedResources = 0; + SET @DeletedResources = 0; + SET @DeletedSearchParams = 0; + SET @RowsToProcess = 1; + WHILE @RowsToProcess > 0 + BEGIN + DELETE @SurrogateIds; + INSERT INTO @SurrogateIds + SELECT TOP 10000 ResourceSurrogateId, + IsHistory + FROM dbo.Resource + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceSurrogateId > @SurrogateId + ORDER BY ResourceSurrogateId; + SET @RowsToProcess = @@rowcount; + SET @ProcessedResources += @RowsToProcess; + IF @RowsToProcess > 0 + SET @SurrogateId = (SELECT max(ResourceSurrogateId) + FROM @SurrogateIds); + SET @LastProcessed = CONVERT (VARCHAR, @ResourceTypeId) + '.' + CONVERT (VARCHAR, @SurrogateId); + DELETE @SurrogateIds + WHERE IsHistory = 0; + IF EXISTS (SELECT * + FROM @SurrogateIds) + BEGIN + DELETE dbo.ResourceWriteClaim + WHERE ResourceSurrogateId IN (SELECT ResourceSurrogateId + FROM @SurrogateIds); + SET @DeletedSearchParams += @@rowcount; + DELETE dbo.CompartmentAssignment + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceSurrogateId IN (SELECT ResourceSurrogateId + FROM @SurrogateIds); + SET @DeletedSearchParams += @@rowcount; + DELETE dbo.ReferenceSearchParam + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceSurrogateId IN (SELECT ResourceSurrogateId + FROM @SurrogateIds); + SET @DeletedSearchParams += @@rowcount; + DELETE dbo.TokenSearchParam + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceSurrogateId IN (SELECT ResourceSurrogateId + FROM @SurrogateIds); + SET @DeletedSearchParams += @@rowcount; + DELETE dbo.TokenText + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceSurrogateId IN (SELECT ResourceSurrogateId + FROM @SurrogateIds); + SET @DeletedSearchParams += @@rowcount; + DELETE dbo.StringSearchParam + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceSurrogateId IN (SELECT ResourceSurrogateId + FROM @SurrogateIds); + SET @DeletedSearchParams += @@rowcount; + DELETE dbo.UriSearchParam + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceSurrogateId IN (SELECT ResourceSurrogateId + FROM @SurrogateIds); + SET @DeletedSearchParams += @@rowcount; + DELETE dbo.NumberSearchParam + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceSurrogateId IN (SELECT ResourceSurrogateId + FROM @SurrogateIds); + SET @DeletedSearchParams += @@rowcount; + DELETE dbo.QuantitySearchParam + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceSurrogateId IN (SELECT ResourceSurrogateId + FROM @SurrogateIds); + SET @DeletedSearchParams += @@rowcount; + DELETE dbo.DateTimeSearchParam + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceSurrogateId IN (SELECT ResourceSurrogateId + FROM @SurrogateIds); + SET @DeletedSearchParams += @@rowcount; + DELETE dbo.ReferenceTokenCompositeSearchParam + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceSurrogateId IN (SELECT ResourceSurrogateId + FROM @SurrogateIds); + SET @DeletedSearchParams += @@rowcount; + DELETE dbo.TokenTokenCompositeSearchParam + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceSurrogateId IN (SELECT ResourceSurrogateId + FROM @SurrogateIds); + SET @DeletedSearchParams += @@rowcount; + DELETE dbo.TokenDateTimeCompositeSearchParam + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceSurrogateId IN (SELECT ResourceSurrogateId + FROM @SurrogateIds); + SET @DeletedSearchParams += @@rowcount; + DELETE dbo.TokenQuantityCompositeSearchParam + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceSurrogateId IN (SELECT ResourceSurrogateId + FROM @SurrogateIds); + SET @DeletedSearchParams += @@rowcount; + DELETE dbo.TokenStringCompositeSearchParam + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceSurrogateId IN (SELECT ResourceSurrogateId + FROM @SurrogateIds); + SET @DeletedSearchParams += @@rowcount; + DELETE dbo.TokenNumberNumberCompositeSearchParam + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceSurrogateId IN (SELECT ResourceSurrogateId + FROM @SurrogateIds); + SET @DeletedSearchParams += @@rowcount; + IF @DeleteResources = 1 + BEGIN + DELETE dbo.Resource + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceSurrogateId IN (SELECT ResourceSurrogateId + FROM @SurrogateIds); + SET @DeletedResources += @@rowcount; + END + END + UPDATE dbo.Parameters + SET Char = @LastProcessed + WHERE Id = @Id; + IF datediff(second, @ReportDate, getUTCdate()) > 60 + BEGIN + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Run', @Target = 'Resource', @Action = 'Select', @Rows = @ProcessedResources, @Text = @LastProcessed; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Run', @Target = '*SearchParam', @Action = 'Delete', @Rows = @DeletedSearchParams, @Text = @LastProcessed; + IF @DeleteResources = 1 + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Run', @Target = 'Resource', @Action = 'Delete', @Rows = @DeletedResources, @Text = @LastProcessed; + SET @ReportDate = getUTCdate(); + SET @ProcessedResources = 0; + SET @DeletedSearchParams = 0; + SET @DeletedResources = 0; + END + END + DELETE @Types + WHERE ResourceTypeId = @ResourceTypeId; + SET @SurrogateId = 0; + END + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Run', @Target = 'Resource', @Action = 'Select', @Rows = @ProcessedResources, @Text = @LastProcessed; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Run', @Target = '*SearchParam', @Action = 'Delete', @Rows = @DeletedSearchParams, @Text = @LastProcessed; + IF @DeleteResources = 1 + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Run', @Target = 'Resource', @Action = 'Delete', @Rows = @DeletedResources, @Text = @LastProcessed; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error'; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.DequeueJob +@QueueType TINYINT, @Worker VARCHAR (100), @HeartbeatTimeoutSec INT, @InputJobId BIGINT=NULL, @CheckTimeoutJobs BIT=0 +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'DequeueJob', @Mode AS VARCHAR (100) = 'Q=' + isnull(CONVERT (VARCHAR, @QueueType), 'NULL') + ' H=' + isnull(CONVERT (VARCHAR, @HeartbeatTimeoutSec), 'NULL') + ' W=' + isnull(@Worker, 'NULL') + ' IJ=' + isnull(CONVERT (VARCHAR, @InputJobId), 'NULL') + ' T=' + isnull(CONVERT (VARCHAR, @CheckTimeoutJobs), 'NULL'), @Rows AS INT = 0, @st AS DATETIME = getUTCdate(), @JobId AS BIGINT, @msg AS VARCHAR (100), @Lock AS VARCHAR (100), @PartitionId AS TINYINT, @MaxPartitions AS TINYINT = 16, @LookedAtPartitions AS TINYINT = 0; +BEGIN TRY + IF EXISTS (SELECT * + FROM dbo.Parameters + WHERE Id = 'DequeueJobStop' + AND Number = 1) + BEGIN + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = 0, @Text = 'Skipped'; + RETURN; + END + IF @InputJobId IS NULL + SET @PartitionId = @MaxPartitions * rand(); + ELSE + SET @PartitionId = @InputJobId % 16; + SET TRANSACTION ISOLATION LEVEL READ COMMITTED; + WHILE @InputJobId IS NULL + AND @JobId IS NULL + AND @LookedAtPartitions < @MaxPartitions + AND @CheckTimeoutJobs = 0 + BEGIN + SET @Lock = 'DequeueJob_' + CONVERT (VARCHAR, @QueueType) + '_' + CONVERT (VARCHAR, @PartitionId); + BEGIN TRANSACTION; + EXECUTE sp_getapplock @Lock, 'Exclusive'; + UPDATE T + SET StartDate = getUTCdate(), + HeartbeatDate = getUTCdate(), + Worker = @Worker, + Status = 1, + Version = datediff_big(millisecond, '0001-01-01', getUTCdate()), + @JobId = T.JobId + FROM dbo.JobQueue AS T WITH (PAGLOCK) + INNER JOIN + (SELECT TOP 1 JobId + FROM dbo.JobQueue WITH (INDEX (IX_QueueType_PartitionId_Status_Priority)) + WHERE QueueType = @QueueType + AND PartitionId = @PartitionId + AND Status = 0 + ORDER BY Priority, JobId) AS S + ON QueueType = @QueueType + AND PartitionId = @PartitionId + AND T.JobId = S.JobId; + SET @Rows += @@rowcount; + COMMIT TRANSACTION; + IF @JobId IS NULL + BEGIN + SET @PartitionId = CASE WHEN @PartitionId = 15 THEN 0 ELSE @PartitionId + 1 END; + SET @LookedAtPartitions = @LookedAtPartitions + 1; + END + END + SET @LookedAtPartitions = 0; + WHILE @InputJobId IS NULL + AND @JobId IS NULL + AND @LookedAtPartitions < @MaxPartitions + BEGIN + SET @Lock = 'DequeueStoreCopyWorkUnit_' + CONVERT (VARCHAR, @PartitionId); + BEGIN TRANSACTION; + EXECUTE sp_getapplock @Lock, 'Exclusive'; + UPDATE T + SET StartDate = getUTCdate(), + HeartbeatDate = getUTCdate(), + Worker = @Worker, + Status = CASE WHEN CancelRequested = 0 THEN 1 ELSE 4 END, + Version = datediff_big(millisecond, '0001-01-01', getUTCdate()), + @JobId = CASE WHEN CancelRequested = 0 THEN T.JobId END, + Info = CONVERT (VARCHAR (1000), isnull(Info, '') + ' Prev: Worker=' + Worker + ' Start=' + CONVERT (VARCHAR, StartDate, 121)) + FROM dbo.JobQueue AS T WITH (PAGLOCK) + INNER JOIN + (SELECT TOP 1 JobId + FROM dbo.JobQueue WITH (INDEX (IX_QueueType_PartitionId_Status_Priority)) + WHERE QueueType = @QueueType + AND PartitionId = @PartitionId + AND Status = 1 + AND datediff(second, HeartbeatDate, getUTCdate()) > @HeartbeatTimeoutSec + ORDER BY Priority, JobId) AS S + ON QueueType = @QueueType + AND PartitionId = @PartitionId + AND T.JobId = S.JobId; + SET @Rows += @@rowcount; + COMMIT TRANSACTION; + IF @JobId IS NULL + BEGIN + SET @PartitionId = CASE WHEN @PartitionId = 15 THEN 0 ELSE @PartitionId + 1 END; + SET @LookedAtPartitions = @LookedAtPartitions + 1; + END + END + IF @InputJobId IS NOT NULL + BEGIN + UPDATE dbo.JobQueue WITH (PAGLOCK) + SET StartDate = getUTCdate(), + HeartbeatDate = getUTCdate(), + Worker = @Worker, + Status = 1, + Version = datediff_big(millisecond, '0001-01-01', getUTCdate()), + @JobId = JobId + WHERE QueueType = @QueueType + AND PartitionId = @PartitionId + AND Status = 0 + AND JobId = @InputJobId; + SET @Rows += @@rowcount; + IF @JobId IS NULL + BEGIN + UPDATE dbo.JobQueue WITH (PAGLOCK) + SET StartDate = getUTCdate(), + HeartbeatDate = getUTCdate(), + Worker = @Worker, + Status = 1, + Version = datediff_big(millisecond, '0001-01-01', getUTCdate()), + @JobId = JobId + WHERE QueueType = @QueueType + AND PartitionId = @PartitionId + AND Status = 1 + AND JobId = @InputJobId + AND datediff(second, HeartbeatDate, getUTCdate()) > @HeartbeatTimeoutSec; + SET @Rows += @@rowcount; + END + END + IF @JobId IS NOT NULL + EXECUTE dbo.GetJobs @QueueType = @QueueType, @JobId = @JobId; + SET @msg = 'J=' + isnull(CONVERT (VARCHAR, @JobId), 'NULL') + ' P=' + CONVERT (VARCHAR, @PartitionId); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @Rows, @Text = @msg; +END TRY +BEGIN CATCH + IF @@trancount > 0 + ROLLBACK; + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error'; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.DisableIndex +@tableName NVARCHAR (128), @indexName NVARCHAR (128) +WITH EXECUTE AS 'dbo' +AS +DECLARE @errorTxt AS VARCHAR (1000), @sql AS NVARCHAR (1000), @isDisabled AS BIT; +IF object_id(@tableName) IS NULL + BEGIN + SET @errorTxt = @tableName + ' does not exist or you don''t have permissions.'; + RAISERROR (@errorTxt, 18, 127); + END +SET @isDisabled = (SELECT is_disabled + FROM sys.indexes + WHERE object_id = object_id(@tableName) + AND name = @indexName); +IF @isDisabled IS NULL + BEGIN + SET @errorTxt = @indexName + ' does not exist or you don''t have permissions.'; + RAISERROR (@errorTxt, 18, 127); + END +IF @isDisabled = 0 + BEGIN + SET @sql = N'ALTER INDEX ' + QUOTENAME(@indexName) + N' on ' + @tableName + ' Disable'; + EXECUTE sp_executesql @sql; + END + +GO +CREATE PROCEDURE dbo.DisableIndexes +WITH EXECUTE AS 'dbo' +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'DisableIndexes', @Mode AS VARCHAR (200) = '', @st AS DATETIME = getUTCdate(), @Tbl AS VARCHAR (100), @Ind AS VARCHAR (200), @Txt AS VARCHAR (4000); +BEGIN TRY + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Start'; + DECLARE @Tables TABLE ( + Tbl VARCHAR (100) PRIMARY KEY, + Supported BIT ); + INSERT INTO @Tables + EXECUTE dbo.GetPartitionedTables @IncludeNotDisabled = 1, @IncludeNotSupported = 0; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = '@Tables', @Action = 'Insert', @Rows = @@rowcount; + DECLARE @Indexes TABLE ( + Tbl VARCHAR (100), + Ind VARCHAR (200), + TblId INT , + IndId INT PRIMARY KEY (Tbl, Ind)); + INSERT INTO @Indexes + SELECT Tbl, + I.Name, + TblId, + I.index_id + FROM (SELECT object_id(Tbl) AS TblId, + Tbl + FROM @Tables) AS O + INNER JOIN + sys.indexes AS I + ON I.object_id = TblId; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = '@Indexes', @Action = 'Insert', @Rows = @@rowcount; + INSERT INTO dbo.IndexProperties (TableName, IndexName, PropertyName, PropertyValue) + SELECT Tbl, + Ind, + 'DATA_COMPRESSION', + data_comp + FROM (SELECT Tbl, + Ind, + isnull((SELECT TOP 1 CASE WHEN data_compression_desc = 'PAGE' THEN 'PAGE' END + FROM sys.partitions + WHERE object_id = TblId + AND index_id = IndId), 'NONE') AS data_comp + FROM @Indexes) AS A + WHERE NOT EXISTS (SELECT * + FROM dbo.IndexProperties + WHERE TableName = Tbl + AND IndexName = Ind); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = 'IndexProperties', @Action = 'Insert', @Rows = @@rowcount; + DELETE @Indexes + WHERE Tbl = 'Resource' + OR IndId = 1; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = '@Indexes', @Action = 'Delete', @Rows = @@rowcount; + WHILE EXISTS (SELECT * + FROM @Indexes) + BEGIN + SELECT TOP 1 @Tbl = Tbl, + @Ind = Ind + FROM @Indexes; + SET @Txt = 'ALTER INDEX ' + @Ind + ' ON dbo.' + @Tbl + ' DISABLE'; + EXECUTE (@Txt); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = @Ind, @Action = 'Disable', @Text = @Txt; + DELETE @Indexes + WHERE Tbl = @Tbl + AND Ind = @Ind; + END + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error', @Start = @st; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.EnqueueJobs +@QueueType TINYINT, @Definitions StringList READONLY, @GroupId BIGINT=NULL, @ForceOneActiveJobGroup BIT=1, @IsCompleted BIT=NULL, @ReturnJobs BIT=1 +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'EnqueueJobs', @Mode AS VARCHAR (100) = 'Q=' + isnull(CONVERT (VARCHAR, @QueueType), 'NULL') + ' D=' + CONVERT (VARCHAR, (SELECT count(*) + FROM @Definitions)) + ' G=' + isnull(CONVERT (VARCHAR, @GroupId), 'NULL') + ' F=' + isnull(CONVERT (VARCHAR, @ForceOneActiveJobGroup), 'NULL') + ' C=' + isnull(CONVERT (VARCHAR, @IsCompleted), 'NULL'), @st AS DATETIME = getUTCdate(), @Lock AS VARCHAR (100) = 'EnqueueJobs_' + CONVERT (VARCHAR, @QueueType), @MaxJobId AS BIGINT, @Rows AS INT, @msg AS VARCHAR (1000), @JobIds AS BigintList, @InputRows AS INT; +BEGIN TRY + DECLARE @Input TABLE ( + DefinitionHash VARBINARY (20) PRIMARY KEY, + Definition VARCHAR (MAX) ); + INSERT INTO @Input + SELECT hashbytes('SHA1', String) AS DefinitionHash, + String AS Definition + FROM @Definitions; + SET @InputRows = @@rowcount; + INSERT INTO @JobIds + SELECT JobId + FROM @Input AS A + INNER JOIN + dbo.JobQueue AS B + ON B.QueueType = @QueueType + AND B.DefinitionHash = A.DefinitionHash + AND B.Status <> 5; + IF @@rowcount < @InputRows + BEGIN + BEGIN TRANSACTION; + EXECUTE sp_getapplock @Lock, 'Exclusive'; + IF @ForceOneActiveJobGroup = 1 + AND EXISTS (SELECT * + FROM dbo.JobQueue + WHERE QueueType = @QueueType + AND Status IN (0, 1) + AND (@GroupId IS NULL + OR GroupId <> @GroupId)) + RAISERROR ('There are other active job groups', 18, 127); + SET @MaxJobId = isnull((SELECT TOP 1 JobId + FROM dbo.JobQueue + WHERE QueueType = @QueueType + ORDER BY JobId DESC), 0); + INSERT INTO dbo.JobQueue (QueueType, GroupId, JobId, Definition, DefinitionHash, Status) + OUTPUT inserted.JobId INTO @JobIds + SELECT @QueueType, + isnull(@GroupId, @MaxJobId + 1) AS GroupId, + JobId, + Definition, + DefinitionHash, + CASE WHEN @IsCompleted = 1 THEN 2 ELSE 0 END AS Status + FROM (SELECT @MaxJobId + row_number() OVER (ORDER BY Dummy) AS JobId, + * + FROM (SELECT *, + 0 AS Dummy + FROM @Input) AS A) AS A + WHERE NOT EXISTS (SELECT * + FROM dbo.JobQueue AS B WITH (INDEX (IX_QueueType_DefinitionHash)) + WHERE B.QueueType = @QueueType + AND B.DefinitionHash = A.DefinitionHash + AND B.Status <> 5); + SET @Rows = @@rowcount; + COMMIT TRANSACTION; + END + IF @ReturnJobs = 1 + EXECUTE dbo.GetJobs @QueueType = @QueueType, @JobIds = @JobIds; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @Rows; +END TRY +BEGIN CATCH + IF @@trancount > 0 + ROLLBACK; + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error'; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.ExecuteCommandForRebuildIndexes +@Tbl VARCHAR (100), @Ind VARCHAR (1000), @Cmd VARCHAR (MAX) +WITH EXECUTE AS 'dbo' +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'ExecuteCommandForRebuildIndexes', @Mode AS VARCHAR (200) = 'Tbl=' + isnull(@Tbl, 'NULL'), @st AS DATETIME, @Retries AS INT = 0, @Action AS VARCHAR (100), @msg AS VARCHAR (1000); +RetryOnTempdbError: +BEGIN TRY + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Start', @Text = @Cmd; + SET @st = getUTCdate(); + IF @Tbl IS NULL + RAISERROR ('@Tbl IS NULL', 18, 127); + IF @Cmd IS NULL + RAISERROR ('@Cmd IS NULL', 18, 127); + SET @Action = CASE WHEN @Cmd LIKE 'UPDATE STAT%' THEN 'Update statistics' WHEN @Cmd LIKE 'CREATE%INDEX%' THEN 'Create Index' WHEN @Cmd LIKE 'ALTER%INDEX%REBUILD%' THEN 'Rebuild Index' WHEN @Cmd LIKE 'ALTER%TABLE%ADD%' THEN 'Add Constraint' END; + IF @Action IS NULL + BEGIN + SET @msg = 'Not supported command = ' + CONVERT (VARCHAR (900), @Cmd); + RAISERROR (@msg, 18, 127); + END + IF @Action = 'Create Index' + WAITFOR DELAY '00:00:05'; + EXECUTE (@Cmd); + SELECT @Ind; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Action = @Action, @Status = 'End', @Start = @st, @Text = @Cmd; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + IF error_number() = 40544 + BEGIN + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error', @Start = @st, @Retry = @Retries; + SET @Retries = @Retries + 1; + IF @Tbl = 'TokenText_96' + WAITFOR DELAY '01:00:00'; + ELSE + WAITFOR DELAY '00:10:00'; + GOTO RetryOnTempdbError; + END + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error', @Start = @st; + THROW; +END CATCH + +GO +CREATE OR ALTER PROCEDURE dbo.FetchEventAgentCheckpoint +@CheckpointId VARCHAR (64) +AS +BEGIN + SELECT TOP (1) CheckpointId, + LastProcessedDateTime, + LastProcessedIdentifier + FROM dbo.EventAgentCheckpoint + WHERE CheckpointId = @CheckpointId; +END + +GO +CREATE PROCEDURE dbo.FetchResourceChanges_3 +@startId BIGINT, @lastProcessedUtcDateTime DATETIME2 (7), @pageSize SMALLINT +AS +BEGIN + SET NOCOUNT ON; + DECLARE @precedingPartitionBoundary AS DATETIME2 (7) = (SELECT TOP (1) CAST (prv.value AS DATETIME2 (7)) AS value + FROM sys.partition_range_values AS prv WITH (NOLOCK) + INNER JOIN + sys.partition_functions AS pf WITH (NOLOCK) + ON pf.function_id = prv.function_id + WHERE pf.name = N'PartitionFunction_ResourceChangeData_Timestamp' + AND SQL_VARIANT_PROPERTY(prv.Value, 'BaseType') = 'datetime2' + AND CAST (prv.value AS DATETIME2 (7)) < DATEADD(HOUR, DATEDIFF(HOUR, 0, @lastProcessedUtcDateTime), 0) + ORDER BY prv.boundary_id DESC); + IF (@precedingPartitionBoundary IS NULL) + BEGIN + SET @precedingPartitionBoundary = CONVERT (DATETIME2 (7), N'1970-01-01T00:00:00.0000000'); + END + DECLARE @endDateTimeToFilter AS DATETIME2 (7) = DATEADD(HOUR, 1, SYSUTCDATETIME()); + WITH PartitionBoundaries + AS (SELECT CAST (prv.value AS DATETIME2 (7)) AS PartitionBoundary + FROM sys.partition_range_values AS prv WITH (NOLOCK) + INNER JOIN + sys.partition_functions AS pf WITH (NOLOCK) + ON pf.function_id = prv.function_id + WHERE pf.name = N'PartitionFunction_ResourceChangeData_Timestamp' + AND SQL_VARIANT_PROPERTY(prv.Value, 'BaseType') = 'datetime2' + AND CAST (prv.value AS DATETIME2 (7)) BETWEEN @precedingPartitionBoundary AND @endDateTimeToFilter) + SELECT TOP (@pageSize) Id, + Timestamp, + ResourceId, + ResourceTypeId, + ResourceVersion, + ResourceChangeTypeId + FROM PartitionBoundaries AS p CROSS APPLY (SELECT TOP (@pageSize) Id, + Timestamp, + ResourceId, + ResourceTypeId, + ResourceVersion, + ResourceChangeTypeId + FROM dbo.ResourceChangeData WITH (TABLOCK, HOLDLOCK) + WHERE Id >= @startId + AND $PARTITION.PartitionFunction_ResourceChangeData_Timestamp (Timestamp) = $PARTITION.PartitionFunction_ResourceChangeData_Timestamp (p.PartitionBoundary) + ORDER BY Id ASC) AS rcd + ORDER BY rcd.Id ASC; +END + +GO +CREATE PROCEDURE dbo.GetActiveJobs +@QueueType TINYINT, @GroupId BIGINT=NULL +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'GetActiveJobs', @Mode AS VARCHAR (100) = 'Q=' + isnull(CONVERT (VARCHAR, @QueueType), 'NULL') + ' G=' + isnull(CONVERT (VARCHAR, @GroupId), 'NULL'), @st AS DATETIME = getUTCdate(), @JobIds AS BigintList, @PartitionId AS TINYINT, @MaxPartitions AS TINYINT = 16, @LookedAtPartitions AS TINYINT = 0, @Rows AS INT = 0; +BEGIN TRY + SET @PartitionId = @MaxPartitions * rand(); + WHILE @LookedAtPartitions < @MaxPartitions + BEGIN + IF @GroupId IS NULL + INSERT INTO @JobIds + SELECT JobId + FROM dbo.JobQueue + WHERE PartitionId = @PartitionId + AND QueueType = @QueueType + AND Status IN (0, 1); + ELSE + INSERT INTO @JobIds + SELECT JobId + FROM dbo.JobQueue + WHERE PartitionId = @PartitionId + AND QueueType = @QueueType + AND GroupId = @GroupId + AND Status IN (0, 1); + SET @Rows += @@rowcount; + SET @PartitionId = CASE WHEN @PartitionId = 15 THEN 0 ELSE @PartitionId + 1 END; + SET @LookedAtPartitions += 1; + END + IF @Rows > 0 + EXECUTE dbo.GetJobs @QueueType = @QueueType, @JobIds = @JobIds; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @Rows; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error'; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.GetCommandsForRebuildIndexes +@RebuildClustered BIT +WITH EXECUTE AS 'dbo' +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'GetCommandsForRebuildIndexes', @Mode AS VARCHAR (200) = 'PS=PartitionScheme_ResourceTypeId RC=' + isnull(CONVERT (VARCHAR, @RebuildClustered), 'NULL'), @st AS DATETIME = getUTCdate(), @Tbl AS VARCHAR (100), @TblInt AS VARCHAR (100), @Ind AS VARCHAR (200), @IndId AS INT, @Supported AS BIT, @Txt AS VARCHAR (MAX), @Rows AS BIGINT, @Pages AS BIGINT, @ResourceTypeId AS SMALLINT, @IndexesCnt AS INT, @DataComp AS VARCHAR (100); +BEGIN TRY + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Start'; + DECLARE @Commands TABLE ( + Tbl VARCHAR (100), + Ind VARCHAR (200), + Txt VARCHAR (MAX), + Pages BIGINT ); + DECLARE @ResourceTypes TABLE ( + ResourceTypeId SMALLINT PRIMARY KEY); + DECLARE @Indexes TABLE ( + Ind VARCHAR (200) PRIMARY KEY, + IndId INT ); + DECLARE @Tables TABLE ( + name VARCHAR (100) PRIMARY KEY, + Supported BIT ); + INSERT INTO @Tables + EXECUTE dbo.GetPartitionedTables @IncludeNotDisabled = 1, @IncludeNotSupported = 1; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = '@Tables', @Action = 'Insert', @Rows = @@rowcount; + WHILE EXISTS (SELECT * + FROM @Tables) + BEGIN + SELECT TOP 1 @Tbl = name, + @Supported = Supported + FROM @Tables + ORDER BY name; + IF @Supported = 0 + BEGIN + INSERT INTO @Commands + SELECT @Tbl, + name, + 'ALTER INDEX ' + name + ' ON dbo.' + @Tbl + ' REBUILD' + CASE WHEN (SELECT PropertyValue + FROM dbo.IndexProperties + WHERE TableName = @Tbl + AND IndexName = name) = 'PAGE' THEN ' PARTITION = ALL WITH (DATA_COMPRESSION = PAGE)' ELSE '' END, + CONVERT (BIGINT, 9e18) + FROM sys.indexes + WHERE object_id = object_id(@Tbl) + AND (is_disabled = 1 + AND index_id > 1 + AND @RebuildClustered = 0 + OR index_id = 1 + AND @RebuildClustered = 1); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = '@Commands', @Action = 'Insert', @Rows = @@rowcount, @Text = 'Not supported tables with disabled indexes'; + END + ELSE + BEGIN + DELETE @ResourceTypes; + INSERT INTO @ResourceTypes + SELECT CONVERT (SMALLINT, substring(name, charindex('_', name) + 1, 6)) AS ResourceTypeId + FROM sys.sysobjects + WHERE name LIKE @Tbl + '[_]%'; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = '@ResourceTypes', @Action = 'Insert', @Rows = @@rowcount; + WHILE EXISTS (SELECT * + FROM @ResourceTypes) + BEGIN + SET @ResourceTypeId = (SELECT TOP 1 ResourceTypeId + FROM @ResourceTypes + ORDER BY ResourceTypeId); + SET @TblInt = @Tbl + '_' + CONVERT (VARCHAR, @ResourceTypeId); + SET @Pages = (SELECT dpages + FROM sysindexes + WHERE id = object_id(@TblInt) + AND indid IN (0, 1)); + DELETE @Indexes; + INSERT INTO @Indexes + SELECT name, + index_id + FROM sys.indexes + WHERE object_id = object_id(@Tbl) + AND (index_id > 1 + AND @RebuildClustered = 0 + OR index_id = 1 + AND @RebuildClustered = 1); + SET @IndexesCnt = 0; + WHILE EXISTS (SELECT * + FROM @Indexes) + BEGIN + SELECT TOP 1 @Ind = Ind, + @IndId = IndId + FROM @Indexes + ORDER BY Ind; + IF @IndId = 1 + BEGIN + SET @Txt = 'ALTER INDEX ' + @Ind + ' ON dbo.' + @TblInt + ' REBUILD' + CASE WHEN (SELECT PropertyValue + FROM dbo.IndexProperties + WHERE TableName = @Tbl + AND IndexName = @Ind) = 'PAGE' THEN ' PARTITION = ALL WITH (DATA_COMPRESSION = PAGE)' ELSE '' END; + INSERT INTO @Commands + SELECT @TblInt, + @Ind, + @Txt, + @Pages; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = @TblInt, @Action = 'Add command', @Rows = @@rowcount, @Text = @Txt; + END + ELSE + IF NOT EXISTS (SELECT * + FROM sys.indexes + WHERE object_id = object_id(@TblInt) + AND name = @Ind) + BEGIN + EXECUTE dbo.GetIndexCommands @Tbl = @Tbl, @Ind = @Ind, @AddPartClause = 0, @IncludeClustered = 0, @Txt = @Txt OUTPUT; + SET @Txt = replace(@Txt, '[' + @Tbl + ']', @TblInt); + IF @Txt IS NOT NULL + BEGIN + SET @IndexesCnt = @IndexesCnt + 1; + INSERT INTO @Commands + SELECT @TblInt, + @Ind, + @Txt, + @Pages; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = @TblInt, @Action = 'Add command', @Rows = @@rowcount, @Text = @Txt; + END + END + DELETE @Indexes + WHERE Ind = @Ind; + END + IF @IndexesCnt > 1 + BEGIN + INSERT INTO @Commands + SELECT @TblInt, + 'UPDATE STAT', + 'UPDATE STATISTICS dbo.' + @TblInt, + @Pages; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = @TblInt, @Action = 'Add command', @Rows = @@rowcount, @Text = 'Add stats update'; + END + DELETE @ResourceTypes + WHERE ResourceTypeId = @ResourceTypeId; + END + END + DELETE @Tables + WHERE name = @Tbl; + END + SELECT Tbl, + Ind, + Txt + FROM @Commands + ORDER BY Pages DESC, Tbl, CASE WHEN Txt LIKE 'UPDATE STAT%' THEN 0 ELSE 1 END; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = '@Commands', @Action = 'Select', @Rows = @@rowcount; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error', @Start = @st; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.GetIndexCommands +@Tbl VARCHAR (100), @Ind VARCHAR (200), @AddPartClause BIT, @IncludeClustered BIT, @Txt VARCHAR (MAX)=NULL OUTPUT +WITH EXECUTE AS 'dbo' +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'GetIndexCommands', @Mode AS VARCHAR (200) = 'Tbl=' + isnull(@Tbl, 'NULL') + ' Ind=' + isnull(@Ind, 'NULL'), @st AS DATETIME = getUTCdate(); +DECLARE @Indexes TABLE ( + Ind VARCHAR (200) PRIMARY KEY, + Txt VARCHAR (MAX)); +BEGIN TRY + IF @Tbl IS NULL + RAISERROR ('@Tbl IS NULL', 18, 127); + INSERT INTO @Indexes + SELECT Ind, + CASE WHEN is_primary_key = 1 THEN 'ALTER TABLE dbo.[' + Tbl + '] ADD PRIMARY KEY ' + CASE WHEN type = 1 THEN ' CLUSTERED' ELSE '' END ELSE 'CREATE' + CASE WHEN is_unique = 1 THEN ' UNIQUE' ELSE '' END + CASE WHEN type = 1 THEN ' CLUSTERED' ELSE '' END + ' INDEX ' + Ind + ' ON dbo.[' + Tbl + ']' END + ' (' + KeyCols + ')' + IncClause + CASE WHEN filter_def IS NOT NULL THEN ' WHERE ' + filter_def ELSE '' END + CASE WHEN data_comp IS NOT NULL THEN ' WITH (DATA_COMPRESSION = ' + data_comp + ')' ELSE '' END + CASE WHEN @AddPartClause = 1 THEN PartClause ELSE '' END + FROM (SELECT O.Name AS Tbl, + I.Name AS Ind, + isnull((SELECT TOP 1 CASE WHEN data_compression_desc = 'PAGE' THEN 'PAGE' END + FROM sys.partitions AS P + WHERE P.object_id = I.object_id + AND I.index_id = P.index_id), (SELECT NULLIF (PropertyValue, 'NONE') + FROM dbo.IndexProperties + WHERE TableName = O.Name + AND IndexName = I.Name + AND PropertyName = 'DATA_COMPRESSION')) AS data_comp, + replace(replace(replace(replace(I.filter_definition, '[', ''), ']', ''), '(', ''), ')', '') AS filter_def, + I.is_unique, + I.is_primary_key, + I.type, + KeyCols, + CASE WHEN IncCols IS NOT NULL THEN ' INCLUDE (' + IncCols + ')' ELSE '' END AS IncClause, + CASE WHEN EXISTS (SELECT * + FROM sys.partition_schemes AS S + WHERE S.data_space_id = I.data_space_id + AND name = 'PartitionScheme_ResourceTypeId') THEN ' ON PartitionScheme_ResourceTypeId (ResourceTypeId)' ELSE '' END AS PartClause + FROM sys.indexes AS I + INNER JOIN + sys.objects AS O + ON O.object_id = I.object_id CROSS APPLY (SELECT string_agg(CASE WHEN IC.key_ordinal > 0 + AND IC.is_included_column = 0 THEN C.name END, ',') WITHIN GROUP (ORDER BY key_ordinal) AS KeyCols, + string_agg(CASE WHEN IC.is_included_column = 1 THEN C.name END, ',') WITHIN GROUP (ORDER BY key_ordinal) AS IncCols + FROM sys.index_columns AS IC + INNER JOIN + sys.columns AS C + ON C.object_id = IC.object_id + AND C.column_id = IC.column_id + WHERE IC.object_id = I.object_id + AND IC.index_id = I.index_id + GROUP BY IC.object_id, IC.index_id) AS IC + WHERE O.name = @Tbl + AND (@Ind IS NULL + OR I.name = @Ind) + AND (@IncludeClustered = 1 + OR index_id > 1)) AS A; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = '@Indexes', @Action = 'Insert', @Rows = @@rowcount; + IF @Ind IS NULL + SELECT Ind, + Txt + FROM @Indexes; + ELSE + SET @Txt = (SELECT Txt + FROM @Indexes); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Text = @Txt; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error', @Start = @st; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.GetJobs +@QueueType TINYINT, @JobId BIGINT=NULL, @JobIds BigintList READONLY, @GroupId BIGINT=NULL, @ReturnDefinition BIT=1 +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'GetJobs', @Mode AS VARCHAR (100) = 'Q=' + isnull(CONVERT (VARCHAR, @QueueType), 'NULL') + ' J=' + isnull(CONVERT (VARCHAR, @JobId), 'NULL') + ' G=' + isnull(CONVERT (VARCHAR, @GroupId), 'NULL'), @st AS DATETIME = getUTCdate(), @PartitionId AS TINYINT = @JobId % 16; +BEGIN TRY + IF @JobId IS NULL + AND @GroupId IS NULL + AND NOT EXISTS (SELECT * + FROM @JobIds) + RAISERROR ('@JobId = NULL and @GroupId = NULL and @JobIds is empty', 18, 127); + IF @JobId IS NOT NULL + SELECT GroupId, + JobId, + CASE WHEN @ReturnDefinition = 1 THEN Definition ELSE NULL END AS Definition, + Version, + Status, + Priority, + Data, + Result, + CreateDate, + StartDate, + EndDate, + HeartbeatDate, + CancelRequested + FROM dbo.JobQueue + WHERE QueueType = @QueueType + AND PartitionId = @PartitionId + AND JobId = isnull(@JobId, -1) + AND Status <> 5; + ELSE + IF @GroupId IS NOT NULL + SELECT GroupId, + JobId, + CASE WHEN @ReturnDefinition = 1 THEN Definition ELSE NULL END AS Definition, + Version, + Status, + Priority, + Data, + Result, + CreateDate, + StartDate, + EndDate, + HeartbeatDate, + CancelRequested + FROM dbo.JobQueue WITH (INDEX (IX_QueueType_GroupId)) + WHERE QueueType = @QueueType + AND GroupId = isnull(@GroupId, -1) + AND Status <> 5; + ELSE + SELECT GroupId, + JobId, + CASE WHEN @ReturnDefinition = 1 THEN Definition ELSE NULL END AS Definition, + Version, + Status, + Priority, + Data, + Result, + CreateDate, + StartDate, + EndDate, + HeartbeatDate, + CancelRequested + FROM dbo.JobQueue + WHERE QueueType = @QueueType + AND JobId IN (SELECT Id + FROM @JobIds) + AND PartitionId = JobId % 16 + AND Status <> 5; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @@rowcount; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error'; + THROW; +END CATCH + +GO +CREATE OR ALTER PROCEDURE dbo.GetNonCompletedJobCountOfSpecificQueueType +@queueType TINYINT +AS +BEGIN + SET NOCOUNT ON; + SELECT COUNT(*) + FROM dbo.JobQueue + WHERE QueueType = @queueType + AND (Status = 0 + OR Status = 1); +END + +GO +CREATE PROCEDURE dbo.GetPartitionedTables +@IncludeNotDisabled BIT, @IncludeNotSupported BIT +WITH EXECUTE AS 'dbo' +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'GetPartitionedTables', @Mode AS VARCHAR (200) = 'PS=PartitionScheme_ResourceTypeId D=' + isnull(CONVERT (VARCHAR, @IncludeNotDisabled), 'NULL') + ' S=' + isnull(CONVERT (VARCHAR, @IncludeNotSupported), 'NULL'), @st AS DATETIME = getUTCdate(); +DECLARE @NotSupportedTables TABLE ( + id INT PRIMARY KEY); +BEGIN TRY + INSERT INTO @NotSupportedTables + SELECT DISTINCT O.object_id + FROM sys.indexes AS I + INNER JOIN + sys.objects AS O + ON O.object_id = I.object_id + WHERE O.type = 'u' + AND EXISTS (SELECT * + FROM sys.partition_schemes AS PS + WHERE PS.data_space_id = I.data_space_id + AND name = 'PartitionScheme_ResourceTypeId') + AND (NOT EXISTS (SELECT * + FROM sys.index_columns AS IC + INNER JOIN + sys.columns AS C + ON C.object_id = IC.object_id + AND C.column_id = IC.column_id + WHERE IC.object_id = I.object_id + AND IC.index_id = I.index_id + AND IC.key_ordinal > 0 + AND IC.is_included_column = 0 + AND C.name = 'ResourceTypeId') + OR EXISTS (SELECT * + FROM sys.indexes AS NSI + WHERE NSI.object_id = O.object_id + AND NOT EXISTS (SELECT * + FROM sys.partition_schemes AS PS + WHERE PS.data_space_id = NSI.data_space_id + AND name = 'PartitionScheme_ResourceTypeId'))); + SELECT CONVERT (VARCHAR (100), O.name), + CONVERT (BIT, CASE WHEN EXISTS (SELECT * + FROM @NotSupportedTables AS NSI + WHERE NSI.id = O.object_id) THEN 0 ELSE 1 END) + FROM sys.indexes AS I + INNER JOIN + sys.objects AS O + ON O.object_id = I.object_id + WHERE O.type = 'u' + AND I.index_id IN (0, 1) + AND EXISTS (SELECT * + FROM sys.partition_schemes AS PS + WHERE PS.data_space_id = I.data_space_id + AND name = 'PartitionScheme_ResourceTypeId') + AND EXISTS (SELECT * + FROM sys.index_columns AS IC + INNER JOIN + sys.columns AS C + ON C.object_id = I.object_id + AND C.column_id = IC.column_id + AND IC.is_included_column = 0 + AND C.name = 'ResourceTypeId') + AND (@IncludeNotSupported = 1 + OR NOT EXISTS (SELECT * + FROM @NotSupportedTables AS NSI + WHERE NSI.id = O.object_id)) + AND (@IncludeNotDisabled = 1 + OR EXISTS (SELECT * + FROM sys.indexes AS D + WHERE D.object_id = O.object_id + AND D.is_disabled = 1)) + ORDER BY 1; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @@rowcount; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error', @Start = @st; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.GetReindexJobById +@id VARCHAR (64) +AS +SET NOCOUNT ON; +SELECT RawJobRecord, + JobVersion +FROM dbo.ReindexJob +WHERE Id = @id; + +GO +CREATE PROCEDURE dbo.GetResources +@ResourceKeys dbo.ResourceKeyList READONLY +AS +SET NOCOUNT ON; +DECLARE @st AS DATETIME = getUTCdate(), @SP AS VARCHAR (100) = 'GetResources', @InputRows AS INT, @DummyTop AS BIGINT = 9223372036854775807, @NotNullVersionExists AS BIT, @NullVersionExists AS BIT, @MinRT AS SMALLINT, @MaxRT AS SMALLINT; +SELECT @MinRT = min(ResourceTypeId), + @MaxRT = max(ResourceTypeId), + @InputRows = count(*), + @NotNullVersionExists = max(CASE WHEN Version IS NOT NULL THEN 1 ELSE 0 END), + @NullVersionExists = max(CASE WHEN Version IS NULL THEN 1 ELSE 0 END) +FROM @ResourceKeys; +DECLARE @Mode AS VARCHAR (100) = 'RT=[' + CONVERT (VARCHAR, @MinRT) + ',' + CONVERT (VARCHAR, @MaxRT) + '] Cnt=' + CONVERT (VARCHAR, @InputRows) + ' NNVE=' + CONVERT (VARCHAR, @NotNullVersionExists) + ' NVE=' + CONVERT (VARCHAR, @NullVersionExists); +BEGIN TRY + IF @NotNullVersionExists = 1 + IF @NullVersionExists = 0 + SELECT B.ResourceTypeId, + B.ResourceId, + ResourceSurrogateId, + B.Version, + IsDeleted, + IsHistory, + RawResource, + IsRawResourceMetaSet, + SearchParamHash + FROM (SELECT TOP (@DummyTop) * + FROM @ResourceKeys) AS A + INNER JOIN + dbo.Resource AS B WITH (INDEX (IX_Resource_ResourceTypeId_ResourceId_Version)) + ON B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceId = A.ResourceId + AND B.Version = A.Version + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)); + ELSE + SELECT * + FROM (SELECT B.ResourceTypeId, + B.ResourceId, + ResourceSurrogateId, + B.Version, + IsDeleted, + IsHistory, + RawResource, + IsRawResourceMetaSet, + SearchParamHash + FROM (SELECT TOP (@DummyTop) * + FROM @ResourceKeys + WHERE Version IS NOT NULL) AS A + INNER JOIN + dbo.Resource AS B WITH (INDEX (IX_Resource_ResourceTypeId_ResourceId_Version)) + ON B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceId = A.ResourceId + AND B.Version = A.Version + UNION ALL + SELECT B.ResourceTypeId, + B.ResourceId, + ResourceSurrogateId, + B.Version, + IsDeleted, + IsHistory, + RawResource, + IsRawResourceMetaSet, + SearchParamHash + FROM (SELECT TOP (@DummyTop) * + FROM @ResourceKeys + WHERE Version IS NULL) AS A + INNER JOIN + dbo.Resource AS B WITH (INDEX (IX_Resource_ResourceTypeId_ResourceId)) + ON B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceId = A.ResourceId + WHERE IsHistory = 0) AS A + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)); + ELSE + SELECT B.ResourceTypeId, + B.ResourceId, + ResourceSurrogateId, + B.Version, + IsDeleted, + IsHistory, + RawResource, + IsRawResourceMetaSet, + SearchParamHash + FROM (SELECT TOP (@DummyTop) * + FROM @ResourceKeys) AS A + INNER JOIN + dbo.Resource AS B WITH (INDEX (IX_Resource_ResourceTypeId_ResourceId)) + ON B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceId = A.ResourceId + WHERE IsHistory = 0 + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @@rowcount; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error', @Start = @st; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.GetResourcesByTransactionId +@TransactionId BIGINT, @IncludeHistory BIT=0, @ReturnResourceKeysOnly BIT=0 +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = object_name(@@procid), @Mode AS VARCHAR (100) = 'T=' + CONVERT (VARCHAR, @TransactionId) + ' H=' + CONVERT (VARCHAR, @IncludeHistory), @st AS DATETIME = getUTCdate(), @DummyTop AS BIGINT = 9223372036854775807, @TypeId AS SMALLINT; +BEGIN TRY + DECLARE @Types TABLE ( + TypeId SMALLINT PRIMARY KEY, + Name VARCHAR (100)); + INSERT INTO @Types + EXECUTE dbo.GetUsedResourceTypes ; + DECLARE @Keys TABLE ( + TypeId SMALLINT, + SurrogateId BIGINT PRIMARY KEY (TypeId, SurrogateId)); + WHILE EXISTS (SELECT * + FROM @Types) + BEGIN + SET @TypeId = (SELECT TOP 1 TypeId + FROM @Types + ORDER BY TypeId); + INSERT INTO @Keys + SELECT @TypeId, + ResourceSurrogateId + FROM dbo.Resource + WHERE ResourceTypeId = @TypeId + AND TransactionId = @TransactionId; + DELETE @Types + WHERE TypeId = @TypeId; + END + IF @ReturnResourceKeysOnly = 0 + SELECT ResourceTypeId, + ResourceId, + ResourceSurrogateId, + Version, + IsDeleted, + IsHistory, + RawResource, + IsRawResourceMetaSet, + SearchParamHash, + RequestMethod + FROM (SELECT TOP (@DummyTop) * + FROM @Keys) AS A + INNER JOIN + dbo.Resource AS B + ON ResourceTypeId = TypeId + AND ResourceSurrogateId = SurrogateId + WHERE IsHistory = 0 + OR @IncludeHistory = 1 + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)); + ELSE + SELECT ResourceTypeId, + ResourceId, + ResourceSurrogateId, + Version, + IsDeleted + FROM (SELECT TOP (@DummyTop) * + FROM @Keys) AS A + INNER JOIN + dbo.Resource AS B + ON ResourceTypeId = TypeId + AND ResourceSurrogateId = SurrogateId + WHERE IsHistory = 0 + OR @IncludeHistory = 1 + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @@rowcount; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error'; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.GetResourcesByTypeAndSurrogateIdRange +@ResourceTypeId SMALLINT, @StartId BIGINT, @EndId BIGINT, @GlobalStartId BIGINT=NULL, @GlobalEndId BIGINT=NULL, @IncludeHistory BIT=0, @IncludeDeleted BIT=0 +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'GetResourcesByTypeAndSurrogateIdRange', @Mode AS VARCHAR (100) = 'RT=' + isnull(CONVERT (VARCHAR, @ResourceTypeId), 'NULL') + ' S=' + isnull(CONVERT (VARCHAR, @StartId), 'NULL') + ' E=' + isnull(CONVERT (VARCHAR, @EndId), 'NULL') + ' GE=' + isnull(CONVERT (VARCHAR, @GlobalEndId), 'NULL') + ' HI=' + isnull(CONVERT (VARCHAR, @IncludeHistory), 'NULL') + ' DE' + isnull(CONVERT (VARCHAR, @IncludeDeleted), 'NULL'), @st AS DATETIME = getUTCdate(), @DummyTop AS BIGINT = 9223372036854775807; +BEGIN TRY + DECLARE @ResourceIds TABLE ( + ResourceId VARCHAR (64) COLLATE Latin1_General_100_CS_AS PRIMARY KEY); + DECLARE @SurrogateIds TABLE ( + MaxSurrogateId BIGINT PRIMARY KEY); + IF @GlobalEndId IS NOT NULL + AND @IncludeHistory = 0 + BEGIN + INSERT INTO @ResourceIds + SELECT DISTINCT ResourceId + FROM dbo.Resource + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceSurrogateId BETWEEN @StartId AND @EndId + AND IsHistory = 1 + AND (IsDeleted = 0 + OR @IncludeDeleted = 1) + OPTION (MAXDOP 1); + IF @@rowcount > 0 + INSERT INTO @SurrogateIds + SELECT ResourceSurrogateId + FROM (SELECT ResourceId, + ResourceSurrogateId, + row_number() OVER (PARTITION BY ResourceId ORDER BY ResourceSurrogateId DESC) AS RowId + FROM dbo.Resource WITH (INDEX (IX_Resource_ResourceTypeId_ResourceId_Version)) + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceId IN (SELECT TOP (@DummyTop) ResourceId + FROM @ResourceIds) + AND ResourceSurrogateId BETWEEN @StartId AND @GlobalEndId) AS A + WHERE RowId = 1 + AND ResourceSurrogateId BETWEEN @StartId AND @EndId + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)); + END + SELECT ResourceTypeId, + ResourceId, + Version, + IsDeleted, + ResourceSurrogateId, + RequestMethod, + CONVERT (BIT, 1) AS IsMatch, + CONVERT (BIT, 0) AS IsPartial, + IsRawResourceMetaSet, + SearchParamHash, + RawResource + FROM dbo.Resource + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceSurrogateId BETWEEN @StartId AND @EndId + AND (IsHistory = 0 + OR @IncludeHistory = 1) + AND (IsDeleted = 0 + OR @IncludeDeleted = 1) + UNION ALL + SELECT ResourceTypeId, + ResourceId, + Version, + IsDeleted, + ResourceSurrogateId, + RequestMethod, + CONVERT (BIT, 1) AS IsMatch, + CONVERT (BIT, 0) AS IsPartial, + IsRawResourceMetaSet, + SearchParamHash, + RawResource + FROM @SurrogateIds + INNER JOIN + dbo.Resource + ON ResourceTypeId = @ResourceTypeId + AND ResourceSurrogateId = MaxSurrogateId + WHERE IsHistory = 1 + AND (IsDeleted = 0 + OR @IncludeDeleted = 1) + OPTION (MAXDOP 1); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @@rowcount; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error'; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.GetResourceSearchParamStats +@Table VARCHAR (100)=NULL, @ResourceTypeId SMALLINT=NULL, @SearchParamId SMALLINT=NULL +WITH EXECUTE AS 'dbo' +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = object_name(@@procid), @Mode AS VARCHAR (200) = 'T=' + isnull(@Table, 'NULL') + ' RT=' + isnull(CONVERT (VARCHAR, @ResourceTypeId), 'NULL') + ' SP=' + isnull(CONVERT (VARCHAR, @SearchParamId), 'NULL'), @st AS DATETIME = getUTCdate(); +BEGIN TRY + SELECT T.name AS TableName, + S.name AS StatsName, + db_name() AS DatabaseName + FROM sys.stats AS S + INNER JOIN + sys.tables AS T + ON T.object_id = S.object_id + WHERE T.name LIKE '%SearchParam' + AND T.name <> 'SearchParam' + AND S.name LIKE 'ST[_]%' + AND (T.name LIKE @Table + OR @Table IS NULL) + AND (S.name LIKE '%ResourceTypeId[_]' + CONVERT (VARCHAR, @ResourceTypeId) + '[_]%' + OR @ResourceTypeId IS NULL) + AND (S.name LIKE '%SearchParamId[_]' + CONVERT (VARCHAR, @SearchParamId) + OR @SearchParamId IS NULL); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Rows = @@rowcount, @Start = @st; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error', @Start = @st; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.GetResourceSurrogateIdRanges +@ResourceTypeId SMALLINT, @StartId BIGINT, @EndId BIGINT, @RangeSize INT, @NumberOfRanges INT=100, @Up BIT=1 +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'GetResourceSurrogateIdRanges', @Mode AS VARCHAR (100) = 'RT=' + isnull(CONVERT (VARCHAR, @ResourceTypeId), 'NULL') + ' S=' + isnull(CONVERT (VARCHAR, @StartId), 'NULL') + ' E=' + isnull(CONVERT (VARCHAR, @EndId), 'NULL') + ' R=' + isnull(CONVERT (VARCHAR, @RangeSize), 'NULL') + ' UP=' + isnull(CONVERT (VARCHAR, @Up), 'NULL'), @st AS DATETIME = getUTCdate(); +BEGIN TRY + IF @Up = 1 + SELECT RangeId, + min(ResourceSurrogateId), + max(ResourceSurrogateId), + count(*) + FROM (SELECT isnull(CONVERT (INT, (row_number() OVER (ORDER BY ResourceSurrogateId) - 1) / @RangeSize), 0) AS RangeId, + ResourceSurrogateId + FROM (SELECT TOP (@RangeSize * @NumberOfRanges) ResourceSurrogateId + FROM dbo.Resource + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceSurrogateId >= @StartId + AND ResourceSurrogateId <= @EndId + ORDER BY ResourceSurrogateId) AS A) AS A + GROUP BY RangeId + OPTION (MAXDOP 1); + ELSE + SELECT RangeId, + min(ResourceSurrogateId), + max(ResourceSurrogateId), + count(*) + FROM (SELECT isnull(CONVERT (INT, (row_number() OVER (ORDER BY ResourceSurrogateId) - 1) / @RangeSize), 0) AS RangeId, + ResourceSurrogateId + FROM (SELECT TOP (@RangeSize * @NumberOfRanges) ResourceSurrogateId + FROM dbo.Resource + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceSurrogateId >= @StartId + AND ResourceSurrogateId <= @EndId + ORDER BY ResourceSurrogateId DESC) AS A) AS A + GROUP BY RangeId + OPTION (MAXDOP 1); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @@rowcount; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error'; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.GetResourceVersions +@ResourceDateKeys dbo.ResourceDateKeyList READONLY +AS +SET NOCOUNT ON; +DECLARE @st AS DATETIME = getUTCdate(), @SP AS VARCHAR (100) = 'GetResourceVersions', @Mode AS VARCHAR (100) = 'Rows=' + CONVERT (VARCHAR, (SELECT count(*) + FROM @ResourceDateKeys)), @DummyTop AS BIGINT = 9223372036854775807; +BEGIN TRY + SELECT A.ResourceTypeId, + A.ResourceId, + A.ResourceSurrogateId, + CASE WHEN EXISTS (SELECT * + FROM dbo.Resource AS B + WHERE B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId) THEN 0 WHEN isnull(U.Version, 1) - isnull(L.Version, 0) > 1 THEN isnull(U.Version, 1) - 1 ELSE 0 END AS Version + FROM (SELECT TOP (@DummyTop) * + FROM @ResourceDateKeys) AS A OUTER APPLY (SELECT TOP 1 * + FROM dbo.Resource AS B WITH (INDEX (IX_Resource_ResourceTypeId_ResourceId_Version)) + WHERE B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceId = A.ResourceId + AND B.ResourceSurrogateId < A.ResourceSurrogateId + ORDER BY B.ResourceSurrogateId DESC) AS L OUTER APPLY (SELECT TOP 1 * + FROM dbo.Resource AS B WITH (INDEX (IX_Resource_ResourceTypeId_ResourceId_Version)) + WHERE B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceId = A.ResourceId + AND B.ResourceSurrogateId > A.ResourceSurrogateId + ORDER BY B.ResourceSurrogateId) AS U + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @@rowcount; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error', @Start = @st; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.GetSearchParamStatuses +AS +SET NOCOUNT ON; +SELECT SearchParamId, + Uri, + Status, + LastUpdated, + IsPartiallySupported +FROM dbo.SearchParam; + +GO +CREATE PROCEDURE dbo.GetTransactions +@StartNotInclusiveTranId BIGINT, @EndInclusiveTranId BIGINT, @EndDate DATETIME=NULL +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = object_name(@@procid), @Mode AS VARCHAR (100) = 'ST=' + CONVERT (VARCHAR, @StartNotInclusiveTranId) + ' ET=' + CONVERT (VARCHAR, @EndInclusiveTranId) + ' ED=' + isnull(CONVERT (VARCHAR, @EndDate, 121), 'NULL'), @st AS DATETIME = getUTCdate(); +IF @EndDate IS NULL + SET @EndDate = getUTCdate(); +SELECT SurrogateIdRangeFirstValue, + VisibleDate, + InvisibleHistoryRemovedDate +FROM dbo.Transactions +WHERE SurrogateIdRangeFirstValue > @StartNotInclusiveTranId + AND SurrogateIdRangeFirstValue <= @EndInclusiveTranId + AND EndDate <= @EndDate +ORDER BY SurrogateIdRangeFirstValue; +EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @@rowcount; + +GO +CREATE PROCEDURE dbo.GetUsedResourceTypes +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'GetUsedResourceTypes', @Mode AS VARCHAR (100) = '', @st AS DATETIME = getUTCdate(); +BEGIN TRY + SELECT ResourceTypeId, + Name + FROM dbo.ResourceType AS A + WHERE EXISTS (SELECT * + FROM dbo.Resource AS B + WHERE B.ResourceTypeId = A.ResourceTypeId); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @@rowcount; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error'; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.HardDeleteResource +@ResourceTypeId SMALLINT, @ResourceId VARCHAR (64), @KeepCurrentVersion BIT, @IsResourceChangeCaptureEnabled BIT +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = object_name(@@procid), @Mode AS VARCHAR (200) = 'RT=' + CONVERT (VARCHAR, @ResourceTypeId) + ' R=' + @ResourceId + ' V=' + CONVERT (VARCHAR, @KeepCurrentVersion) + ' CC=' + CONVERT (VARCHAR, @IsResourceChangeCaptureEnabled), @st AS DATETIME = getUTCdate(), @TransactionId AS BIGINT; +BEGIN TRY + IF @IsResourceChangeCaptureEnabled = 1 + EXECUTE dbo.MergeResourcesBeginTransaction @Count = 1, @TransactionId = @TransactionId OUTPUT; + IF @KeepCurrentVersion = 0 + BEGIN TRANSACTION; + DECLARE @SurrogateIds TABLE ( + ResourceSurrogateId BIGINT NOT NULL); + IF @IsResourceChangeCaptureEnabled = 1 + AND NOT EXISTS (SELECT * + FROM dbo.Parameters + WHERE Id = 'InvisibleHistory.IsEnabled' + AND Number = 0) + UPDATE dbo.Resource + SET IsDeleted = 1, + RawResource = 0xF, + SearchParamHash = NULL, + HistoryTransactionId = @TransactionId + OUTPUT deleted.ResourceSurrogateId INTO @SurrogateIds + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceId = @ResourceId + AND (@KeepCurrentVersion = 0 + OR IsHistory = 1) + AND RawResource <> 0xF; + ELSE + DELETE dbo.Resource + OUTPUT deleted.ResourceSurrogateId INTO @SurrogateIds + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceId = @ResourceId + AND (@KeepCurrentVersion = 0 + OR IsHistory = 1) + AND RawResource <> 0xF; + IF @KeepCurrentVersion = 0 + BEGIN + DELETE B + FROM @SurrogateIds AS A + INNER LOOP JOIN + dbo.ResourceWriteClaim AS B WITH (INDEX (1), FORCESEEK, PAGLOCK) + ON B.ResourceSurrogateId = A.ResourceSurrogateId + OPTION (MAXDOP 1); + DELETE B + FROM @SurrogateIds AS A + INNER LOOP JOIN + dbo.ReferenceSearchParam AS B WITH (INDEX (1), FORCESEEK, PAGLOCK) + ON B.ResourceTypeId = @ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId + OPTION (MAXDOP 1); + DELETE B + FROM @SurrogateIds AS A + INNER LOOP JOIN + dbo.TokenSearchParam AS B WITH (INDEX (1), FORCESEEK, PAGLOCK) + ON B.ResourceTypeId = @ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId + OPTION (MAXDOP 1); + DELETE B + FROM @SurrogateIds AS A + INNER LOOP JOIN + dbo.TokenText AS B WITH (INDEX (1), FORCESEEK, PAGLOCK) + ON B.ResourceTypeId = @ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId + OPTION (MAXDOP 1); + DELETE B + FROM @SurrogateIds AS A + INNER LOOP JOIN + dbo.StringSearchParam AS B WITH (INDEX (1), FORCESEEK, PAGLOCK) + ON B.ResourceTypeId = @ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId + OPTION (MAXDOP 1); + DELETE B + FROM @SurrogateIds AS A + INNER LOOP JOIN + dbo.UriSearchParam AS B WITH (INDEX (1), FORCESEEK, PAGLOCK) + ON B.ResourceTypeId = @ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId + OPTION (MAXDOP 1); + DELETE B + FROM @SurrogateIds AS A + INNER LOOP JOIN + dbo.NumberSearchParam AS B WITH (INDEX (1), FORCESEEK, PAGLOCK) + ON B.ResourceTypeId = @ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId + OPTION (MAXDOP 1); + DELETE B + FROM @SurrogateIds AS A + INNER LOOP JOIN + dbo.QuantitySearchParam AS B WITH (INDEX (1), FORCESEEK, PAGLOCK) + ON B.ResourceTypeId = @ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId + OPTION (MAXDOP 1); + DELETE B + FROM @SurrogateIds AS A + INNER LOOP JOIN + dbo.DateTimeSearchParam AS B WITH (INDEX (1), FORCESEEK, PAGLOCK) + ON B.ResourceTypeId = @ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId + OPTION (MAXDOP 1); + DELETE B + FROM @SurrogateIds AS A + INNER LOOP JOIN + dbo.ReferenceTokenCompositeSearchParam AS B WITH (INDEX (1), FORCESEEK, PAGLOCK) + ON B.ResourceTypeId = @ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId + OPTION (MAXDOP 1); + DELETE B + FROM @SurrogateIds AS A + INNER LOOP JOIN + dbo.TokenTokenCompositeSearchParam AS B WITH (INDEX (1), FORCESEEK, PAGLOCK) + ON B.ResourceTypeId = @ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId + OPTION (MAXDOP 1); + DELETE B + FROM @SurrogateIds AS A + INNER LOOP JOIN + dbo.TokenDateTimeCompositeSearchParam AS B WITH (INDEX (1), FORCESEEK, PAGLOCK) + ON B.ResourceTypeId = @ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId + OPTION (MAXDOP 1); + DELETE B + FROM @SurrogateIds AS A + INNER LOOP JOIN + dbo.TokenQuantityCompositeSearchParam AS B WITH (INDEX (1), FORCESEEK, PAGLOCK) + ON B.ResourceTypeId = @ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId + OPTION (MAXDOP 1); + DELETE B + FROM @SurrogateIds AS A + INNER LOOP JOIN + dbo.TokenStringCompositeSearchParam AS B WITH (INDEX (1), FORCESEEK, PAGLOCK) + ON B.ResourceTypeId = @ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId + OPTION (MAXDOP 1); + DELETE B + FROM @SurrogateIds AS A + INNER LOOP JOIN + dbo.TokenNumberNumberCompositeSearchParam AS B WITH (INDEX (1), FORCESEEK, PAGLOCK) + ON B.ResourceTypeId = @ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId + OPTION (MAXDOP 1); + END + IF @@trancount > 0 + COMMIT TRANSACTION; + IF @IsResourceChangeCaptureEnabled = 1 + EXECUTE dbo.MergeResourcesCommitTransaction @TransactionId; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st; +END TRY +BEGIN CATCH + IF @@trancount > 0 + ROLLBACK; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error', @Start = @st; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.InitDefrag +@QueueType TINYINT, @GroupId BIGINT, @DefragItems INT=NULL OUTPUT +WITH EXECUTE AS 'dbo' +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'InitDefrag', @st AS DATETIME = getUTCdate(), @ObjectId AS INT, @msg AS VARCHAR (1000), @Rows AS INT, @MinFragPct AS INT = isnull((SELECT Number + FROM dbo.Parameters + WHERE Id = 'Defrag.MinFragPct'), 10), @MinSizeGB AS FLOAT = isnull((SELECT Number + FROM dbo.Parameters + WHERE Id = 'Defrag.MinSizeGB'), 0.1), @DefinitionsSorted AS StringList; +DECLARE @Mode AS VARCHAR (200) = 'G=' + CONVERT (VARCHAR, @GroupId) + ' MF=' + CONVERT (VARCHAR, @MinFragPct) + ' MS=' + CONVERT (VARCHAR, @MinSizeGB); +DECLARE @Definitions AS TABLE ( + Def VARCHAR (900) PRIMARY KEY, + FragGB FLOAT ); +BEGIN TRY + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Start'; + SELECT * + INTO #filter + FROM (SELECT object_id, + sum(reserved_page_count * 8.0 / 1024 / 1024) AS ReservedGB + FROM sys.dm_db_partition_stats AS A + WHERE object_id IN (SELECT object_id + FROM sys.objects + WHERE type = 'U' + AND name NOT IN ('EventLog')) + GROUP BY object_id) AS A + WHERE ReservedGB > @MinSizeGB; + WHILE EXISTS (SELECT * + FROM #filter) + BEGIN + SET @ObjectId = (SELECT TOP 1 object_id + FROM #filter + ORDER BY ReservedGB DESC); + INSERT INTO @Definitions + SELECT object_name(@ObjectId) + ';' + I.name + ';' + CONVERT (VARCHAR, partition_number) + ';' + CONVERT (VARCHAR, CASE WHEN EXISTS (SELECT * + FROM sys.partition_schemes AS PS + WHERE PS.data_space_id = I.data_space_id) THEN 1 ELSE 0 END) + ';' + CONVERT (VARCHAR, (SELECT sum(reserved_page_count) + FROM sys.dm_db_partition_stats AS S + WHERE S.object_id = A.object_id + AND S.index_id = A.index_id + AND S.partition_number = A.partition_number) * 8.0 / 1024 / 1024), + FragGB + FROM (SELECT object_id, + index_id, + partition_number, + A.avg_fragmentation_in_percent * A.page_count * 8.0 / 1024 / 1024 / 100 AS FragGB + FROM sys.dm_db_index_physical_stats(db_id(), @ObjectId, NULL, NULL, 'LIMITED') AS A + WHERE index_id > 0 + AND avg_fragmentation_in_percent >= @MinFragPct + AND A.page_count > 500) AS A + INNER JOIN + sys.indexes AS I + ON I.object_id = A.object_id + AND I.index_id = A.index_id; + SET @Rows = @@rowcount; + SET @msg = object_name(@ObjectId); + EXECUTE dbo.LogEvent @Process = @SP, @Status = 'Run', @Mode = @Mode, @Target = '@Definitions', @Action = 'Insert', @Rows = @Rows, @Text = @msg; + DELETE #filter + WHERE object_id = @ObjectId; + END + INSERT INTO @DefinitionsSorted + SELECT Def + ';' + CONVERT (VARCHAR, FragGB) + FROM @Definitions + ORDER BY FragGB DESC; + SET @DefragItems = @@rowcount; + IF @DefragItems > 0 + EXECUTE dbo.EnqueueJobs @QueueType = @QueueType, @Definitions = @DefinitionsSorted, @GroupId = @GroupId, @ForceOneActiveJobGroup = 1; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error'; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.InitializeIndexProperties +AS +SET NOCOUNT ON; +INSERT INTO dbo.IndexProperties (TableName, IndexName, PropertyName, PropertyValue) +SELECT Tbl, + Ind, + 'DATA_COMPRESSION', + isnull(data_comp, 'NONE') +FROM (SELECT O.Name AS Tbl, + I.Name AS Ind, + (SELECT TOP 1 CASE WHEN data_compression_desc = 'PAGE' THEN 'PAGE' END + FROM sys.partitions AS P + WHERE P.object_id = I.object_id + AND I.index_id = P.index_id) AS data_comp + FROM sys.indexes AS I + INNER JOIN + sys.objects AS O + ON O.object_id = I.object_id + WHERE O.type = 'u' + AND EXISTS (SELECT * + FROM sys.partition_schemes AS PS + WHERE PS.data_space_id = I.data_space_id + AND name = 'PartitionScheme_ResourceTypeId')) AS A +WHERE NOT EXISTS (SELECT * + FROM dbo.IndexProperties + WHERE TableName = Tbl + AND IndexName = Ind); + +GO +CREATE PROCEDURE dbo.LogEvent +@Process VARCHAR (100), @Status VARCHAR (10), @Mode VARCHAR (200)=NULL, @Action VARCHAR (20)=NULL, @Target VARCHAR (100)=NULL, @Rows BIGINT=NULL, @Start DATETIME=NULL, @Text NVARCHAR (3500)=NULL, @EventId BIGINT=NULL OUTPUT, @Retry INT=NULL +AS +SET NOCOUNT ON; +DECLARE @ErrorNumber AS INT = error_number(), @ErrorMessage AS VARCHAR (1000) = '', @TranCount AS INT = @@trancount, @DoWork AS BIT = 0, @NumberAdded AS BIT; +IF @ErrorNumber IS NOT NULL + OR @Status IN ('Warn', 'Error') + SET @DoWork = 1; +IF @DoWork = 0 + SET @DoWork = CASE WHEN EXISTS (SELECT * + FROM dbo.Parameters + WHERE Id = isnull(@Process, '') + AND Char = 'LogEvent') THEN 1 ELSE 0 END; +IF @DoWork = 0 + RETURN; +IF @ErrorNumber IS NOT NULL + SET @ErrorMessage = CASE WHEN @Retry IS NOT NULL THEN 'Retry ' + CONVERT (VARCHAR, @Retry) + ', ' ELSE '' END + 'Error ' + CONVERT (VARCHAR, error_number()) + ': ' + CONVERT (VARCHAR (1000), error_message()) + ', Level ' + CONVERT (VARCHAR, error_severity()) + ', State ' + CONVERT (VARCHAR, error_state()) + CASE WHEN error_procedure() IS NOT NULL THEN ', Procedure ' + error_procedure() ELSE '' END + ', Line ' + CONVERT (VARCHAR, error_line()); +IF @TranCount > 0 + AND @ErrorNumber IS NOT NULL + ROLLBACK; +IF databasepropertyex(db_name(), 'UpdateAbility') = 'READ_WRITE' + BEGIN + INSERT INTO dbo.EventLog (Process, Status, Mode, Action, Target, Rows, Milliseconds, EventDate, EventText, SPID, HostName) + SELECT @Process, + @Status, + @Mode, + @Action, + @Target, + @Rows, + datediff(millisecond, @Start, getUTCdate()), + getUTCdate() AS EventDate, + CASE WHEN @ErrorNumber IS NULL THEN @Text ELSE @ErrorMessage + CASE WHEN isnull(@Text, '') <> '' THEN '. ' + @Text ELSE '' END END AS Text, + @@SPID, + host_name() AS HostName; + SET @EventId = scope_identity(); + END +IF @TranCount > 0 + AND @ErrorNumber IS NOT NULL + BEGIN TRANSACTION; + +GO +CREATE PROCEDURE dbo.LogSchemaMigrationProgress +@message VARCHAR (MAX) +AS +INSERT INTO dbo.SchemaMigrationProgress (Message) +VALUES (@message); + +GO +CREATE PROCEDURE dbo.MergeResources +@AffectedRows INT=0 OUTPUT, @RaiseExceptionOnConflict BIT=1, @IsResourceChangeCaptureEnabled BIT=0, @TransactionId BIGINT=NULL, @SingleTransaction BIT=1, @Resources dbo.ResourceList READONLY, @ResourceWriteClaims dbo.ResourceWriteClaimList READONLY, @ReferenceSearchParams dbo.ReferenceSearchParamList READONLY, @TokenSearchParams dbo.TokenSearchParamList READONLY, @TokenTexts dbo.TokenTextList READONLY, @StringSearchParams dbo.StringSearchParamList READONLY, @UriSearchParams dbo.UriSearchParamList READONLY, @NumberSearchParams dbo.NumberSearchParamList READONLY, @QuantitySearchParams dbo.QuantitySearchParamList READONLY, @DateTimeSearchParms dbo.DateTimeSearchParamList READONLY, @ReferenceTokenCompositeSearchParams dbo.ReferenceTokenCompositeSearchParamList READONLY, @TokenTokenCompositeSearchParams dbo.TokenTokenCompositeSearchParamList READONLY, @TokenDateTimeCompositeSearchParams dbo.TokenDateTimeCompositeSearchParamList READONLY, @TokenQuantityCompositeSearchParams dbo.TokenQuantityCompositeSearchParamList READONLY, @TokenStringCompositeSearchParams dbo.TokenStringCompositeSearchParamList READONLY, @TokenNumberNumberCompositeSearchParams dbo.TokenNumberNumberCompositeSearchParamList READONLY +AS +SET NOCOUNT ON; +DECLARE @st AS DATETIME = getUTCdate(), @SP AS VARCHAR (100) = object_name(@@procid), @DummyTop AS BIGINT = 9223372036854775807, @InitialTranCount AS INT = @@trancount, @IsRetry AS BIT = 0; +DECLARE @Mode AS VARCHAR (200) = isnull((SELECT 'RT=[' + CONVERT (VARCHAR, min(ResourceTypeId)) + ',' + CONVERT (VARCHAR, max(ResourceTypeId)) + '] Sur=[' + CONVERT (VARCHAR, min(ResourceSurrogateId)) + ',' + CONVERT (VARCHAR, max(ResourceSurrogateId)) + '] V=' + CONVERT (VARCHAR, max(Version)) + ' Rows=' + CONVERT (VARCHAR, count(*)) + FROM @Resources), 'Input=Empty'); +SET @Mode += ' E=' + CONVERT (VARCHAR, @RaiseExceptionOnConflict) + ' CC=' + CONVERT (VARCHAR, @IsResourceChangeCaptureEnabled) + ' IT=' + CONVERT (VARCHAR, @InitialTranCount) + ' T=' + isnull(CONVERT (VARCHAR, @TransactionId), 'NULL'); +SET @AffectedRows = 0; +BEGIN TRY + DECLARE @Existing AS TABLE ( + ResourceTypeId SMALLINT NOT NULL, + SurrogateId BIGINT NOT NULL PRIMARY KEY (ResourceTypeId, SurrogateId)); + DECLARE @ResourceInfos AS TABLE ( + ResourceTypeId SMALLINT NOT NULL, + SurrogateId BIGINT NOT NULL, + Version INT NOT NULL, + KeepHistory BIT NOT NULL, + PreviousVersion INT NULL, + PreviousSurrogateId BIGINT NULL PRIMARY KEY (ResourceTypeId, SurrogateId)); + DECLARE @PreviousSurrogateIds AS TABLE ( + TypeId SMALLINT NOT NULL, + SurrogateId BIGINT NOT NULL PRIMARY KEY (TypeId, SurrogateId), + KeepHistory BIT ); + IF @SingleTransaction = 0 + AND isnull((SELECT Number + FROM dbo.Parameters + WHERE Id = 'MergeResources.NoTransaction.IsEnabled'), 0) = 0 + SET @SingleTransaction = 1; + SET @Mode += ' ST=' + CONVERT (VARCHAR, @SingleTransaction); + IF @InitialTranCount = 0 + BEGIN + IF EXISTS (SELECT * + FROM @Resources AS A + INNER JOIN + dbo.Resource AS B + ON B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId) + BEGIN + BEGIN TRANSACTION; + INSERT INTO @Existing (ResourceTypeId, SurrogateId) + SELECT B.ResourceTypeId, + B.ResourceSurrogateId + FROM (SELECT TOP (@DummyTop) * + FROM @Resources) AS A + INNER JOIN + dbo.Resource AS B WITH (ROWLOCK, HOLDLOCK) + ON B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId + WHERE B.IsHistory = 0 + AND B.ResourceId = A.ResourceId + AND B.Version = A.Version + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)); + IF @@rowcount = (SELECT count(*) + FROM @Resources) + SET @IsRetry = 1; + IF @IsRetry = 0 + COMMIT TRANSACTION; + END + END + SET @Mode += ' R=' + CONVERT (VARCHAR, @IsRetry); + IF @SingleTransaction = 1 + AND @@trancount = 0 + BEGIN TRANSACTION; + IF @IsRetry = 0 + BEGIN + INSERT INTO @ResourceInfos (ResourceTypeId, SurrogateId, Version, KeepHistory, PreviousVersion, PreviousSurrogateId) + SELECT A.ResourceTypeId, + A.ResourceSurrogateId, + A.Version, + A.KeepHistory, + B.Version, + B.ResourceSurrogateId + FROM (SELECT TOP (@DummyTop) * + FROM @Resources + WHERE HasVersionToCompare = 1) AS A + LEFT OUTER JOIN + dbo.Resource AS B + ON B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceId = A.ResourceId + AND B.IsHistory = 0 + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)); + IF @RaiseExceptionOnConflict = 1 + AND EXISTS (SELECT * + FROM @ResourceInfos + WHERE PreviousVersion IS NOT NULL + AND Version <= PreviousVersion) + THROW 50409, 'Resource has been recently updated or added, please compare the resource content in code for any duplicate updates', 1; + INSERT INTO @PreviousSurrogateIds + SELECT ResourceTypeId, + PreviousSurrogateId, + KeepHistory + FROM @ResourceInfos + WHERE PreviousSurrogateId IS NOT NULL; + IF @@rowcount > 0 + BEGIN + UPDATE dbo.Resource + SET IsHistory = 1 + WHERE EXISTS (SELECT * + FROM @PreviousSurrogateIds + WHERE TypeId = ResourceTypeId + AND SurrogateId = ResourceSurrogateId + AND KeepHistory = 1); + SET @AffectedRows += @@rowcount; + IF @IsResourceChangeCaptureEnabled = 1 + AND NOT EXISTS (SELECT * + FROM dbo.Parameters + WHERE Id = 'InvisibleHistory.IsEnabled' + AND Number = 0) + UPDATE dbo.Resource + SET IsHistory = 1, + RawResource = 0xF, + SearchParamHash = NULL, + HistoryTransactionId = @TransactionId + WHERE EXISTS (SELECT * + FROM @PreviousSurrogateIds + WHERE TypeId = ResourceTypeId + AND SurrogateId = ResourceSurrogateId + AND KeepHistory = 0); + ELSE + DELETE dbo.Resource + WHERE EXISTS (SELECT * + FROM @PreviousSurrogateIds + WHERE TypeId = ResourceTypeId + AND SurrogateId = ResourceSurrogateId + AND KeepHistory = 0); + SET @AffectedRows += @@rowcount; + DELETE dbo.ResourceWriteClaim + WHERE EXISTS (SELECT * + FROM @PreviousSurrogateIds + WHERE SurrogateId = ResourceSurrogateId); + SET @AffectedRows += @@rowcount; + DELETE dbo.ReferenceSearchParam + WHERE EXISTS (SELECT * + FROM @PreviousSurrogateIds + WHERE TypeId = ResourceTypeId + AND SurrogateId = ResourceSurrogateId); + SET @AffectedRows += @@rowcount; + DELETE dbo.TokenSearchParam + WHERE EXISTS (SELECT * + FROM @PreviousSurrogateIds + WHERE TypeId = ResourceTypeId + AND SurrogateId = ResourceSurrogateId); + SET @AffectedRows += @@rowcount; + DELETE dbo.TokenText + WHERE EXISTS (SELECT * + FROM @PreviousSurrogateIds + WHERE TypeId = ResourceTypeId + AND SurrogateId = ResourceSurrogateId); + SET @AffectedRows += @@rowcount; + DELETE dbo.StringSearchParam + WHERE EXISTS (SELECT * + FROM @PreviousSurrogateIds + WHERE TypeId = ResourceTypeId + AND SurrogateId = ResourceSurrogateId); + SET @AffectedRows += @@rowcount; + DELETE dbo.UriSearchParam + WHERE EXISTS (SELECT * + FROM @PreviousSurrogateIds + WHERE TypeId = ResourceTypeId + AND SurrogateId = ResourceSurrogateId); + SET @AffectedRows += @@rowcount; + DELETE dbo.NumberSearchParam + WHERE EXISTS (SELECT * + FROM @PreviousSurrogateIds + WHERE TypeId = ResourceTypeId + AND SurrogateId = ResourceSurrogateId); + SET @AffectedRows += @@rowcount; + DELETE dbo.QuantitySearchParam + WHERE EXISTS (SELECT * + FROM @PreviousSurrogateIds + WHERE TypeId = ResourceTypeId + AND SurrogateId = ResourceSurrogateId); + SET @AffectedRows += @@rowcount; + DELETE dbo.DateTimeSearchParam + WHERE EXISTS (SELECT * + FROM @PreviousSurrogateIds + WHERE TypeId = ResourceTypeId + AND SurrogateId = ResourceSurrogateId); + SET @AffectedRows += @@rowcount; + DELETE dbo.ReferenceTokenCompositeSearchParam + WHERE EXISTS (SELECT * + FROM @PreviousSurrogateIds + WHERE TypeId = ResourceTypeId + AND SurrogateId = ResourceSurrogateId); + SET @AffectedRows += @@rowcount; + DELETE dbo.TokenTokenCompositeSearchParam + WHERE EXISTS (SELECT * + FROM @PreviousSurrogateIds + WHERE TypeId = ResourceTypeId + AND SurrogateId = ResourceSurrogateId); + SET @AffectedRows += @@rowcount; + DELETE dbo.TokenDateTimeCompositeSearchParam + WHERE EXISTS (SELECT * + FROM @PreviousSurrogateIds + WHERE TypeId = ResourceTypeId + AND SurrogateId = ResourceSurrogateId); + SET @AffectedRows += @@rowcount; + DELETE dbo.TokenQuantityCompositeSearchParam + WHERE EXISTS (SELECT * + FROM @PreviousSurrogateIds + WHERE TypeId = ResourceTypeId + AND SurrogateId = ResourceSurrogateId); + SET @AffectedRows += @@rowcount; + DELETE dbo.TokenStringCompositeSearchParam + WHERE EXISTS (SELECT * + FROM @PreviousSurrogateIds + WHERE TypeId = ResourceTypeId + AND SurrogateId = ResourceSurrogateId); + SET @AffectedRows += @@rowcount; + DELETE dbo.TokenNumberNumberCompositeSearchParam + WHERE EXISTS (SELECT * + FROM @PreviousSurrogateIds + WHERE TypeId = ResourceTypeId + AND SurrogateId = ResourceSurrogateId); + SET @AffectedRows += @@rowcount; + END + INSERT INTO dbo.Resource (ResourceTypeId, ResourceId, Version, IsHistory, ResourceSurrogateId, IsDeleted, RequestMethod, RawResource, IsRawResourceMetaSet, SearchParamHash, TransactionId) + SELECT ResourceTypeId, + ResourceId, + Version, + IsHistory, + ResourceSurrogateId, + IsDeleted, + RequestMethod, + RawResource, + IsRawResourceMetaSet, + SearchParamHash, + @TransactionId + FROM @Resources; + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.ResourceWriteClaim (ResourceSurrogateId, ClaimTypeId, ClaimValue) + SELECT ResourceSurrogateId, + ClaimTypeId, + ClaimValue + FROM @ResourceWriteClaims; + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.ReferenceSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, BaseUri, ReferenceResourceTypeId, ReferenceResourceId, ReferenceResourceVersion) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + BaseUri, + ReferenceResourceTypeId, + ReferenceResourceId, + ReferenceResourceVersion + FROM @ReferenceSearchParams; + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.TokenSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId, Code, CodeOverflow) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + SystemId, + Code, + CodeOverflow + FROM @TokenSearchParams; + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.TokenText (ResourceTypeId, ResourceSurrogateId, SearchParamId, Text) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + Text + FROM @TokenTexts; + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.StringSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, Text, TextOverflow, IsMin, IsMax) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + Text, + TextOverflow, + IsMin, + IsMax + FROM @StringSearchParams; + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.UriSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, Uri) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + Uri + FROM @UriSearchParams; + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.NumberSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, SingleValue, LowValue, HighValue) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + SingleValue, + LowValue, + HighValue + FROM @NumberSearchParams; + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.QuantitySearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId, QuantityCodeId, SingleValue, LowValue, HighValue) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + SystemId, + QuantityCodeId, + SingleValue, + LowValue, + HighValue + FROM @QuantitySearchParams; + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.DateTimeSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, StartDateTime, EndDateTime, IsLongerThanADay, IsMin, IsMax) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + StartDateTime, + EndDateTime, + IsLongerThanADay, + IsMin, + IsMax + FROM @DateTimeSearchParms; + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.ReferenceTokenCompositeSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, BaseUri1, ReferenceResourceTypeId1, ReferenceResourceId1, ReferenceResourceVersion1, SystemId2, Code2, CodeOverflow2) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + BaseUri1, + ReferenceResourceTypeId1, + ReferenceResourceId1, + ReferenceResourceVersion1, + SystemId2, + Code2, + CodeOverflow2 + FROM @ReferenceTokenCompositeSearchParams; + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.TokenTokenCompositeSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId1, Code1, CodeOverflow1, SystemId2, Code2, CodeOverflow2) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + SystemId1, + Code1, + CodeOverflow1, + SystemId2, + Code2, + CodeOverflow2 + FROM @TokenTokenCompositeSearchParams; + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.TokenDateTimeCompositeSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId1, Code1, CodeOverflow1, StartDateTime2, EndDateTime2, IsLongerThanADay2) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + SystemId1, + Code1, + CodeOverflow1, + StartDateTime2, + EndDateTime2, + IsLongerThanADay2 + FROM @TokenDateTimeCompositeSearchParams; + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.TokenQuantityCompositeSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId1, Code1, CodeOverflow1, SingleValue2, SystemId2, QuantityCodeId2, LowValue2, HighValue2) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + SystemId1, + Code1, + CodeOverflow1, + SingleValue2, + SystemId2, + QuantityCodeId2, + LowValue2, + HighValue2 + FROM @TokenQuantityCompositeSearchParams; + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.TokenStringCompositeSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId1, Code1, CodeOverflow1, Text2, TextOverflow2) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + SystemId1, + Code1, + CodeOverflow1, + Text2, + TextOverflow2 + FROM @TokenStringCompositeSearchParams; + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.TokenNumberNumberCompositeSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId1, Code1, CodeOverflow1, SingleValue2, LowValue2, HighValue2, SingleValue3, LowValue3, HighValue3, HasRange) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + SystemId1, + Code1, + CodeOverflow1, + SingleValue2, + LowValue2, + HighValue2, + SingleValue3, + LowValue3, + HighValue3, + HasRange + FROM @TokenNumberNumberCompositeSearchParams; + SET @AffectedRows += @@rowcount; + END + ELSE + BEGIN + INSERT INTO dbo.ResourceWriteClaim (ResourceSurrogateId, ClaimTypeId, ClaimValue) + SELECT ResourceSurrogateId, + ClaimTypeId, + ClaimValue + FROM (SELECT TOP (@DummyTop) * + FROM @ResourceWriteClaims) AS A + WHERE EXISTS (SELECT * + FROM @Existing AS B + WHERE B.SurrogateId = A.ResourceSurrogateId) + AND NOT EXISTS (SELECT * + FROM dbo.ResourceWriteClaim AS C + WHERE C.ResourceSurrogateId = A.ResourceSurrogateId) + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)); + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.ReferenceSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, BaseUri, ReferenceResourceTypeId, ReferenceResourceId, ReferenceResourceVersion) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + BaseUri, + ReferenceResourceTypeId, + ReferenceResourceId, + ReferenceResourceVersion + FROM (SELECT TOP (@DummyTop) * + FROM @ReferenceSearchParams) AS A + WHERE EXISTS (SELECT * + FROM @Existing AS B + WHERE B.ResourceTypeId = A.ResourceTypeId + AND B.SurrogateId = A.ResourceSurrogateId) + AND NOT EXISTS (SELECT * + FROM dbo.ReferenceSearchParam AS C + WHERE C.ResourceTypeId = A.ResourceTypeId + AND C.ResourceSurrogateId = A.ResourceSurrogateId) + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)); + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.TokenSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId, Code, CodeOverflow) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + SystemId, + Code, + CodeOverflow + FROM (SELECT TOP (@DummyTop) * + FROM @TokenSearchParams) AS A + WHERE EXISTS (SELECT * + FROM @Existing AS B + WHERE B.ResourceTypeId = A.ResourceTypeId + AND B.SurrogateId = A.ResourceSurrogateId) + AND NOT EXISTS (SELECT * + FROM dbo.TokenSearchParam AS C + WHERE C.ResourceTypeId = A.ResourceTypeId + AND C.ResourceSurrogateId = A.ResourceSurrogateId) + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)); + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.TokenText (ResourceTypeId, ResourceSurrogateId, SearchParamId, Text) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + Text + FROM (SELECT TOP (@DummyTop) * + FROM @TokenTexts) AS A + WHERE EXISTS (SELECT * + FROM @Existing AS B + WHERE B.ResourceTypeId = A.ResourceTypeId + AND B.SurrogateId = A.ResourceSurrogateId) + AND NOT EXISTS (SELECT * + FROM dbo.TokenSearchParam AS C + WHERE C.ResourceTypeId = A.ResourceTypeId + AND C.ResourceSurrogateId = A.ResourceSurrogateId) + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)); + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.StringSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, Text, TextOverflow, IsMin, IsMax) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + Text, + TextOverflow, + IsMin, + IsMax + FROM (SELECT TOP (@DummyTop) * + FROM @StringSearchParams) AS A + WHERE EXISTS (SELECT * + FROM @Existing AS B + WHERE B.ResourceTypeId = A.ResourceTypeId + AND B.SurrogateId = A.ResourceSurrogateId) + AND NOT EXISTS (SELECT * + FROM dbo.TokenText AS C + WHERE C.ResourceTypeId = A.ResourceTypeId + AND C.ResourceSurrogateId = A.ResourceSurrogateId) + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)); + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.UriSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, Uri) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + Uri + FROM (SELECT TOP (@DummyTop) * + FROM @UriSearchParams) AS A + WHERE EXISTS (SELECT * + FROM @Existing AS B + WHERE B.ResourceTypeId = A.ResourceTypeId + AND B.SurrogateId = A.ResourceSurrogateId) + AND NOT EXISTS (SELECT * + FROM dbo.UriSearchParam AS C + WHERE C.ResourceTypeId = A.ResourceTypeId + AND C.ResourceSurrogateId = A.ResourceSurrogateId) + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)); + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.NumberSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, SingleValue, LowValue, HighValue) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + SingleValue, + LowValue, + HighValue + FROM (SELECT TOP (@DummyTop) * + FROM @NumberSearchParams) AS A + WHERE EXISTS (SELECT * + FROM @Existing AS B + WHERE B.ResourceTypeId = A.ResourceTypeId + AND B.SurrogateId = A.ResourceSurrogateId) + AND NOT EXISTS (SELECT * + FROM dbo.NumberSearchParam AS C + WHERE C.ResourceTypeId = A.ResourceTypeId + AND C.ResourceSurrogateId = A.ResourceSurrogateId) + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)); + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.QuantitySearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId, QuantityCodeId, SingleValue, LowValue, HighValue) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + SystemId, + QuantityCodeId, + SingleValue, + LowValue, + HighValue + FROM (SELECT TOP (@DummyTop) * + FROM @QuantitySearchParams) AS A + WHERE EXISTS (SELECT * + FROM @Existing AS B + WHERE B.ResourceTypeId = A.ResourceTypeId + AND B.SurrogateId = A.ResourceSurrogateId) + AND NOT EXISTS (SELECT * + FROM dbo.QuantitySearchParam AS C + WHERE C.ResourceTypeId = A.ResourceTypeId + AND C.ResourceSurrogateId = A.ResourceSurrogateId) + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)); + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.DateTimeSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, StartDateTime, EndDateTime, IsLongerThanADay, IsMin, IsMax) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + StartDateTime, + EndDateTime, + IsLongerThanADay, + IsMin, + IsMax + FROM (SELECT TOP (@DummyTop) * + FROM @DateTimeSearchParms) AS A + WHERE EXISTS (SELECT * + FROM @Existing AS B + WHERE B.ResourceTypeId = A.ResourceTypeId + AND B.SurrogateId = A.ResourceSurrogateId) + AND NOT EXISTS (SELECT * + FROM dbo.TokenSearchParam AS C + WHERE C.ResourceTypeId = A.ResourceTypeId + AND C.ResourceSurrogateId = A.ResourceSurrogateId) + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)); + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.ReferenceTokenCompositeSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, BaseUri1, ReferenceResourceTypeId1, ReferenceResourceId1, ReferenceResourceVersion1, SystemId2, Code2, CodeOverflow2) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + BaseUri1, + ReferenceResourceTypeId1, + ReferenceResourceId1, + ReferenceResourceVersion1, + SystemId2, + Code2, + CodeOverflow2 + FROM (SELECT TOP (@DummyTop) * + FROM @ReferenceTokenCompositeSearchParams) AS A + WHERE EXISTS (SELECT * + FROM @Existing AS B + WHERE B.ResourceTypeId = A.ResourceTypeId + AND B.SurrogateId = A.ResourceSurrogateId) + AND NOT EXISTS (SELECT * + FROM dbo.DateTimeSearchParam AS C + WHERE C.ResourceTypeId = A.ResourceTypeId + AND C.ResourceSurrogateId = A.ResourceSurrogateId) + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)); + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.TokenTokenCompositeSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId1, Code1, CodeOverflow1, SystemId2, Code2, CodeOverflow2) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + SystemId1, + Code1, + CodeOverflow1, + SystemId2, + Code2, + CodeOverflow2 + FROM (SELECT TOP (@DummyTop) * + FROM @TokenTokenCompositeSearchParams) AS A + WHERE EXISTS (SELECT * + FROM @Existing AS B + WHERE B.ResourceTypeId = A.ResourceTypeId + AND B.SurrogateId = A.ResourceSurrogateId) + AND NOT EXISTS (SELECT * + FROM dbo.TokenTokenCompositeSearchParam AS C + WHERE C.ResourceTypeId = A.ResourceTypeId + AND C.ResourceSurrogateId = A.ResourceSurrogateId) + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)); + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.TokenDateTimeCompositeSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId1, Code1, CodeOverflow1, StartDateTime2, EndDateTime2, IsLongerThanADay2) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + SystemId1, + Code1, + CodeOverflow1, + StartDateTime2, + EndDateTime2, + IsLongerThanADay2 + FROM (SELECT TOP (@DummyTop) * + FROM @TokenDateTimeCompositeSearchParams) AS A + WHERE EXISTS (SELECT * + FROM @Existing AS B + WHERE B.ResourceTypeId = A.ResourceTypeId + AND B.SurrogateId = A.ResourceSurrogateId) + AND NOT EXISTS (SELECT * + FROM dbo.TokenDateTimeCompositeSearchParam AS C + WHERE C.ResourceTypeId = A.ResourceTypeId + AND C.ResourceSurrogateId = A.ResourceSurrogateId) + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)); + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.TokenQuantityCompositeSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId1, Code1, CodeOverflow1, SingleValue2, SystemId2, QuantityCodeId2, LowValue2, HighValue2) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + SystemId1, + Code1, + CodeOverflow1, + SingleValue2, + SystemId2, + QuantityCodeId2, + LowValue2, + HighValue2 + FROM (SELECT TOP (@DummyTop) * + FROM @TokenQuantityCompositeSearchParams) AS A + WHERE EXISTS (SELECT * + FROM @Existing AS B + WHERE B.ResourceTypeId = A.ResourceTypeId + AND B.SurrogateId = A.ResourceSurrogateId) + AND NOT EXISTS (SELECT * + FROM dbo.TokenQuantityCompositeSearchParam AS C + WHERE C.ResourceTypeId = A.ResourceTypeId + AND C.ResourceSurrogateId = A.ResourceSurrogateId) + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)); + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.TokenStringCompositeSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId1, Code1, CodeOverflow1, Text2, TextOverflow2) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + SystemId1, + Code1, + CodeOverflow1, + Text2, + TextOverflow2 + FROM (SELECT TOP (@DummyTop) * + FROM @TokenStringCompositeSearchParams) AS A + WHERE EXISTS (SELECT * + FROM @Existing AS B + WHERE B.ResourceTypeId = A.ResourceTypeId + AND B.SurrogateId = A.ResourceSurrogateId) + AND NOT EXISTS (SELECT * + FROM dbo.TokenStringCompositeSearchParam AS C + WHERE C.ResourceTypeId = A.ResourceTypeId + AND C.ResourceSurrogateId = A.ResourceSurrogateId) + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)); + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.TokenNumberNumberCompositeSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId1, Code1, CodeOverflow1, SingleValue2, LowValue2, HighValue2, SingleValue3, LowValue3, HighValue3, HasRange) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + SystemId1, + Code1, + CodeOverflow1, + SingleValue2, + LowValue2, + HighValue2, + SingleValue3, + LowValue3, + HighValue3, + HasRange + FROM (SELECT TOP (@DummyTop) * + FROM @TokenNumberNumberCompositeSearchParams) AS A + WHERE EXISTS (SELECT * + FROM @Existing AS B + WHERE B.ResourceTypeId = A.ResourceTypeId + AND B.SurrogateId = A.ResourceSurrogateId) + AND NOT EXISTS (SELECT * + FROM dbo.TokenNumberNumberCompositeSearchParam AS C + WHERE C.ResourceTypeId = A.ResourceTypeId + AND C.ResourceSurrogateId = A.ResourceSurrogateId) + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)); + SET @AffectedRows += @@rowcount; + END + IF @IsResourceChangeCaptureEnabled = 1 + EXECUTE dbo.CaptureResourceIdsForChanges @Resources; + IF @TransactionId IS NOT NULL + EXECUTE dbo.MergeResourcesCommitTransaction @TransactionId; + IF @InitialTranCount = 0 + AND @@trancount > 0 + COMMIT TRANSACTION; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @AffectedRows; +END TRY +BEGIN CATCH + IF @InitialTranCount = 0 + AND @@trancount > 0 + ROLLBACK; + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error', @Start = @st; + IF @RaiseExceptionOnConflict = 1 + AND error_number() IN (2601, 2627) + AND error_message() LIKE '%''dbo.Resource''%' + THROW 50409, 'Resource has been recently updated or added, please compare the resource content in code for any duplicate updates', 1; + ELSE + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.MergeResourcesAdvanceTransactionVisibility +@AffectedRows INT=0 OUTPUT +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = object_name(@@procid), @Mode AS VARCHAR (100) = '', @st AS DATETIME = getUTCdate(), @msg AS VARCHAR (1000), @MaxTransactionId AS BIGINT, @MinTransactionId AS BIGINT, @MinNotCompletedTransactionId AS BIGINT, @CurrentTransactionId AS BIGINT; +SET @AffectedRows = 0; +BEGIN TRY + EXECUTE dbo.MergeResourcesGetTransactionVisibility @MinTransactionId OUTPUT; + SET @MinTransactionId += 1; + SET @CurrentTransactionId = (SELECT TOP 1 SurrogateIdRangeFirstValue + FROM dbo.Transactions + ORDER BY SurrogateIdRangeFirstValue DESC); + SET @MinNotCompletedTransactionId = isnull((SELECT TOP 1 SurrogateIdRangeFirstValue + FROM dbo.Transactions + WHERE IsCompleted = 0 + AND SurrogateIdRangeFirstValue BETWEEN @MinTransactionId AND @CurrentTransactionId + ORDER BY SurrogateIdRangeFirstValue), @CurrentTransactionId + 1); + SET @MaxTransactionId = (SELECT TOP 1 SurrogateIdRangeFirstValue + FROM dbo.Transactions + WHERE IsCompleted = 1 + AND SurrogateIdRangeFirstValue BETWEEN @MinTransactionId AND @CurrentTransactionId + AND SurrogateIdRangeFirstValue < @MinNotCompletedTransactionId + ORDER BY SurrogateIdRangeFirstValue DESC); + IF @MaxTransactionId >= @MinTransactionId + BEGIN + UPDATE A + SET IsVisible = 1, + VisibleDate = getUTCdate() + FROM dbo.Transactions AS A WITH (INDEX (1)) + WHERE SurrogateIdRangeFirstValue BETWEEN @MinTransactionId AND @CurrentTransactionId + AND SurrogateIdRangeFirstValue <= @MaxTransactionId; + SET @AffectedRows += @@rowcount; + END + SET @msg = 'Min=' + CONVERT (VARCHAR, @MinTransactionId) + ' C=' + CONVERT (VARCHAR, @CurrentTransactionId) + ' MinNC=' + CONVERT (VARCHAR, @MinNotCompletedTransactionId) + ' Max=' + CONVERT (VARCHAR, @MaxTransactionId); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @AffectedRows, @Text = @msg; +END TRY +BEGIN CATCH + IF @@trancount > 0 + ROLLBACK; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error'; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.MergeResourcesBeginTransaction +@Count INT, @TransactionId BIGINT OUTPUT, @SequenceRangeFirstValue INT=NULL OUTPUT, @HeartbeatDate DATETIME=NULL +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'MergeResourcesBeginTransaction', @Mode AS VARCHAR (200) = 'Cnt=' + CONVERT (VARCHAR, @Count), @st AS DATETIME = getUTCdate(), @FirstValueVar AS SQL_VARIANT, @LastValueVar AS SQL_VARIANT; +BEGIN TRY + SET @TransactionId = NULL; + IF @@trancount > 0 + RAISERROR ('MergeResourcesBeginTransaction cannot be called inside outer transaction.', 18, 127); + SET @FirstValueVar = NULL; + WHILE @FirstValueVar IS NULL + BEGIN + EXECUTE sys.sp_sequence_get_range @sequence_name = 'dbo.ResourceSurrogateIdUniquifierSequence', @range_size = @Count, @range_first_value = @FirstValueVar OUTPUT, @range_last_value = @LastValueVar OUTPUT; + SET @SequenceRangeFirstValue = CONVERT (INT, @FirstValueVar); + IF @SequenceRangeFirstValue > CONVERT (INT, @LastValueVar) + SET @FirstValueVar = NULL; + END + SET @TransactionId = datediff_big(millisecond, '0001-01-01', sysUTCdatetime()) * 80000 + @SequenceRangeFirstValue; + INSERT INTO dbo.Transactions (SurrogateIdRangeFirstValue, SurrogateIdRangeLastValue, HeartbeatDate) + SELECT @TransactionId, + @TransactionId + @Count - 1, + isnull(@HeartbeatDate, getUTCdate()); +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + IF @@trancount > 0 + ROLLBACK; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error'; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.MergeResourcesCommitTransaction +@TransactionId BIGINT, @FailureReason VARCHAR (MAX)=NULL, @OverrideIsControlledByClientCheck BIT=0 +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'MergeResourcesCommitTransaction', @st AS DATETIME = getUTCdate(), @InitialTranCount AS INT = @@trancount, @IsCompletedBefore AS BIT, @Rows AS INT, @msg AS VARCHAR (1000); +DECLARE @Mode AS VARCHAR (200) = 'TR=' + CONVERT (VARCHAR, @TransactionId) + ' OC=' + isnull(CONVERT (VARCHAR, @OverrideIsControlledByClientCheck), 'NULL'); +BEGIN TRY + IF @InitialTranCount = 0 + BEGIN TRANSACTION; + UPDATE dbo.Transactions + SET IsCompleted = 1, + @IsCompletedBefore = IsCompleted, + EndDate = getUTCdate(), + IsSuccess = CASE WHEN @FailureReason IS NULL THEN 1 ELSE 0 END, + FailureReason = @FailureReason + WHERE SurrogateIdRangeFirstValue = @TransactionId + AND (IsControlledByClient = 1 + OR @OverrideIsControlledByClientCheck = 1); + SET @Rows = @@rowcount; + IF @Rows = 0 + BEGIN + SET @msg = 'Transaction [' + CONVERT (VARCHAR (20), @TransactionId) + '] is not controlled by client or does not exist.'; + RAISERROR (@msg, 18, 127); + END + IF @IsCompletedBefore = 1 + BEGIN + IF @InitialTranCount = 0 + ROLLBACK; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @Rows, @Target = '@IsCompletedBefore', @Text = '=1'; + RETURN; + END + IF @InitialTranCount = 0 + COMMIT TRANSACTION; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @Rows; +END TRY +BEGIN CATCH + IF @InitialTranCount = 0 + AND @@trancount > 0 + ROLLBACK; + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error'; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.MergeResourcesDeleteInvisibleHistory +@TransactionId BIGINT, @AffectedRows INT=NULL OUTPUT +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = object_name(@@procid), @Mode AS VARCHAR (100) = 'T=' + CONVERT (VARCHAR, @TransactionId), @st AS DATETIME = getUTCdate(), @TypeId AS SMALLINT; +SET @AffectedRows = 0; +BEGIN TRY + DECLARE @Types TABLE ( + TypeId SMALLINT PRIMARY KEY, + Name VARCHAR (100)); + INSERT INTO @Types + EXECUTE dbo.GetUsedResourceTypes ; + WHILE EXISTS (SELECT * + FROM @Types) + BEGIN + SET @TypeId = (SELECT TOP 1 TypeId + FROM @Types + ORDER BY TypeId); + DELETE dbo.Resource + WHERE ResourceTypeId = @TypeId + AND HistoryTransactionId = @TransactionId + AND RawResource = 0xF; + SET @AffectedRows += @@rowcount; + DELETE @Types + WHERE TypeId = @TypeId; + END + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @AffectedRows; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error'; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.MergeResourcesGetTimeoutTransactions +@TimeoutSec INT +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = object_name(@@procid), @Mode AS VARCHAR (100) = 'T=' + CONVERT (VARCHAR, @TimeoutSec), @st AS DATETIME = getUTCdate(), @MinTransactionId AS BIGINT; +BEGIN TRY + EXECUTE dbo.MergeResourcesGetTransactionVisibility @MinTransactionId OUTPUT; + SELECT SurrogateIdRangeFirstValue + FROM dbo.Transactions + WHERE SurrogateIdRangeFirstValue > @MinTransactionId + AND IsCompleted = 0 + AND datediff(second, HeartbeatDate, getUTCdate()) > @TimeoutSec + ORDER BY SurrogateIdRangeFirstValue; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @@rowcount; +END TRY +BEGIN CATCH + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error'; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.MergeResourcesGetTransactionVisibility +@TransactionId BIGINT OUTPUT +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = object_name(@@procid), @Mode AS VARCHAR (100) = '', @st AS DATETIME = getUTCdate(); +SET @TransactionId = isnull((SELECT TOP 1 SurrogateIdRangeFirstValue + FROM dbo.Transactions + WHERE IsVisible = 1 + ORDER BY SurrogateIdRangeFirstValue DESC), -1); +EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @@rowcount, @Text = @TransactionId; + +GO +CREATE PROCEDURE dbo.MergeResourcesPutTransactionHeartbeat +@TransactionId BIGINT +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'MergeResourcesPutTransactionHeartbeat', @Mode AS VARCHAR (100) = 'TR=' + CONVERT (VARCHAR, @TransactionId); +BEGIN TRY + UPDATE dbo.Transactions + SET HeartbeatDate = getUTCdate() + WHERE SurrogateIdRangeFirstValue = @TransactionId + AND IsControlledByClient = 1; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error'; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.MergeResourcesPutTransactionInvisibleHistory +@TransactionId BIGINT +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = object_name(@@procid), @Mode AS VARCHAR (100) = 'TR=' + CONVERT (VARCHAR, @TransactionId), @st AS DATETIME = getUTCdate(); +BEGIN TRY + UPDATE dbo.Transactions + SET InvisibleHistoryRemovedDate = getUTCdate() + WHERE SurrogateIdRangeFirstValue = @TransactionId + AND InvisibleHistoryRemovedDate IS NULL; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @@rowcount; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error'; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.PutJobCancelation +@QueueType TINYINT, @GroupId BIGINT=NULL, @JobId BIGINT=NULL +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'PutJobCancelation', @Mode AS VARCHAR (100) = 'Q=' + isnull(CONVERT (VARCHAR, @QueueType), 'NULL') + ' G=' + isnull(CONVERT (VARCHAR, @GroupId), 'NULL') + ' J=' + isnull(CONVERT (VARCHAR, @JobId), 'NULL'), @st AS DATETIME = getUTCdate(), @Rows AS INT, @PartitionId AS TINYINT = @JobId % 16; +BEGIN TRY + IF @JobId IS NULL + AND @GroupId IS NULL + RAISERROR ('@JobId = NULL and @GroupId = NULL', 18, 127); + IF @JobId IS NOT NULL + BEGIN + UPDATE dbo.JobQueue + SET Status = 4, + EndDate = getUTCdate(), + Version = datediff_big(millisecond, '0001-01-01', getUTCdate()) + WHERE QueueType = @QueueType + AND PartitionId = @PartitionId + AND JobId = @JobId + AND Status = 0; + SET @Rows = @@rowcount; + IF @Rows = 0 + BEGIN + UPDATE dbo.JobQueue + SET CancelRequested = 1 + WHERE QueueType = @QueueType + AND PartitionId = @PartitionId + AND JobId = @JobId + AND Status = 1; + SET @Rows = @@rowcount; + END + END + ELSE + BEGIN + UPDATE dbo.JobQueue + SET Status = 4, + EndDate = getUTCdate(), + Version = datediff_big(millisecond, '0001-01-01', getUTCdate()) + WHERE QueueType = @QueueType + AND GroupId = @GroupId + AND Status = 0; + SET @Rows = @@rowcount; + UPDATE dbo.JobQueue + SET CancelRequested = 1 + WHERE QueueType = @QueueType + AND GroupId = @GroupId + AND Status = 1; + SET @Rows += @@rowcount; + END + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @Rows; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error'; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.PutJobHeartbeat +@QueueType TINYINT, @JobId BIGINT, @Version BIGINT, @Data BIGINT=NULL, @CurrentResult VARCHAR (MAX)=NULL, @CancelRequested BIT=0 OUTPUT +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'PutJobHeartbeat', @Mode AS VARCHAR (100), @st AS DATETIME = getUTCdate(), @Rows AS INT = 0, @PartitionId AS TINYINT = @JobId % 16; +SET @Mode = 'Q=' + CONVERT (VARCHAR, @QueueType) + ' J=' + CONVERT (VARCHAR, @JobId) + ' P=' + CONVERT (VARCHAR, @PartitionId) + ' V=' + CONVERT (VARCHAR, @Version) + ' D=' + isnull(CONVERT (VARCHAR, @Data), 'NULL'); +BEGIN TRY + IF @CurrentResult IS NULL + UPDATE dbo.JobQueue + SET @CancelRequested = CancelRequested, + HeartbeatDate = getUTCdate(), + Data = isnull(@Data, Data) + WHERE QueueType = @QueueType + AND PartitionId = @PartitionId + AND JobId = @JobId + AND Status = 1 + AND Version = @Version; + ELSE + UPDATE dbo.JobQueue + SET @CancelRequested = CancelRequested, + HeartbeatDate = getUTCdate(), + Data = isnull(@Data, Data), + Result = @CurrentResult + WHERE QueueType = @QueueType + AND PartitionId = @PartitionId + AND JobId = @JobId + AND Status = 1 + AND Version = @Version; + SET @Rows = @@rowcount; + IF @Rows = 0 + AND NOT EXISTS (SELECT * + FROM dbo.JobQueue + WHERE QueueType = @QueueType + AND PartitionId = @PartitionId + AND JobId = @JobId + AND Version = @Version + AND Status IN (2, 3, 4)) + BEGIN + IF EXISTS (SELECT * + FROM dbo.JobQueue + WHERE QueueType = @QueueType + AND PartitionId = @PartitionId + AND JobId = @JobId) + THROW 50412, 'Precondition failed', 1; + ELSE + THROW 50404, 'Job record not found', 1; + END + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @Rows; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error'; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.PutJobStatus +@QueueType TINYINT, @JobId BIGINT, @Version BIGINT, @Failed BIT, @Data BIGINT, @FinalResult VARCHAR (MAX), @RequestCancellationOnFailure BIT +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'PutJobStatus', @Mode AS VARCHAR (100), @st AS DATETIME = getUTCdate(), @Rows AS INT = 0, @PartitionId AS TINYINT = @JobId % 16, @GroupId AS BIGINT; +SET @Mode = 'Q=' + CONVERT (VARCHAR, @QueueType) + ' J=' + CONVERT (VARCHAR, @JobId) + ' P=' + CONVERT (VARCHAR, @PartitionId) + ' V=' + CONVERT (VARCHAR, @Version) + ' F=' + CONVERT (VARCHAR, @Failed) + ' R=' + isnull(@FinalResult, 'NULL'); +BEGIN TRY + UPDATE dbo.JobQueue + SET EndDate = getUTCdate(), + Status = CASE WHEN @Failed = 1 THEN 3 WHEN CancelRequested = 1 THEN 4 ELSE 2 END, + Data = @Data, + Result = @FinalResult, + @GroupId = GroupId + WHERE QueueType = @QueueType + AND PartitionId = @PartitionId + AND JobId = @JobId + AND Status = 1 + AND Version = @Version; + SET @Rows = @@rowcount; + IF @Rows = 0 + BEGIN + SET @GroupId = (SELECT GroupId + FROM dbo.JobQueue + WHERE QueueType = @QueueType + AND PartitionId = @PartitionId + AND JobId = @JobId + AND Version = @Version + AND Status IN (2, 3, 4)); + IF @GroupId IS NULL + IF EXISTS (SELECT * + FROM dbo.JobQueue + WHERE QueueType = @QueueType + AND PartitionId = @PartitionId + AND JobId = @JobId) + THROW 50412, 'Precondition failed', 1; + ELSE + THROW 50404, 'Job record not found', 1; + END + IF @Failed = 1 + AND @RequestCancellationOnFailure = 1 + EXECUTE dbo.PutJobCancelation @QueueType = @QueueType, @GroupId = @GroupId; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @Rows; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error'; + THROW; +END CATCH + +GO +CREATE OR ALTER PROCEDURE dbo.RemovePartitionFromResourceChanges_2 +@partitionNumberToSwitchOut INT, @partitionBoundaryToMerge DATETIME2 (7) +AS +BEGIN + TRUNCATE TABLE dbo.ResourceChangeDataStaging; + ALTER TABLE dbo.ResourceChangeData SWITCH PARTITION @partitionNumberToSwitchOut TO dbo.ResourceChangeDataStaging; + ALTER PARTITION FUNCTION PartitionFunction_ResourceChangeData_Timestamp( ) + MERGE RANGE (@partitionBoundaryToMerge); + TRUNCATE TABLE dbo.ResourceChangeDataStaging; +END + +GO +CREATE PROCEDURE dbo.SwitchPartitionsIn +@Tbl VARCHAR (100) +WITH EXECUTE AS 'dbo' +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'SwitchPartitionsIn', @Mode AS VARCHAR (200) = 'Tbl=' + isnull(@Tbl, 'NULL'), @st AS DATETIME = getUTCdate(), @ResourceTypeId AS SMALLINT, @Rows AS BIGINT, @Txt AS VARCHAR (1000), @TblInt AS VARCHAR (100), @Ind AS VARCHAR (200), @IndId AS INT, @DataComp AS VARCHAR (100); +DECLARE @Indexes TABLE ( + IndId INT PRIMARY KEY, + name VARCHAR (200)); +DECLARE @ResourceTypes TABLE ( + ResourceTypeId SMALLINT PRIMARY KEY); +BEGIN TRY + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Start'; + IF @Tbl IS NULL + RAISERROR ('@Tbl IS NULL', 18, 127); + INSERT INTO @Indexes + SELECT index_id, + name + FROM sys.indexes + WHERE object_id = object_id(@Tbl) + AND is_disabled = 1; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = '@Indexes', @Action = 'Insert', @Rows = @@rowcount; + WHILE EXISTS (SELECT * + FROM @Indexes) + BEGIN + SELECT TOP 1 @IndId = IndId, + @Ind = name + FROM @Indexes + ORDER BY IndId; + SET @DataComp = CASE WHEN (SELECT PropertyValue + FROM dbo.IndexProperties + WHERE TableName = @Tbl + AND IndexName = @Ind) = 'PAGE' THEN ' PARTITION = ALL WITH (DATA_COMPRESSION = PAGE)' ELSE '' END; + SET @Txt = 'IF EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id(''' + @Tbl + ''') AND name = ''' + @Ind + ''' AND is_disabled = 1) ALTER INDEX ' + @Ind + ' ON dbo.' + @Tbl + ' REBUILD' + @DataComp; + EXECUTE (@Txt); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = @Ind, @Action = 'Rebuild', @Text = @Txt; + DELETE @Indexes + WHERE IndId = @IndId; + END + INSERT INTO @ResourceTypes + SELECT CONVERT (SMALLINT, substring(name, charindex('_', name) + 1, 6)) AS ResourceTypeId + FROM sys.objects AS O + WHERE name LIKE @Tbl + '[_]%' + AND EXISTS (SELECT * + FROM sysindexes + WHERE id = O.object_id + AND indid IN (0, 1) + AND rows > 0); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = '#ResourceTypes', @Action = 'Select Into', @Rows = @@rowcount; + WHILE EXISTS (SELECT * + FROM @ResourceTypes) + BEGIN + SET @ResourceTypeId = (SELECT TOP 1 ResourceTypeId + FROM @ResourceTypes); + SET @TblInt = @Tbl + '_' + CONVERT (VARCHAR, @ResourceTypeId); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = @TblInt; + SET @Txt = 'ALTER TABLE dbo.' + @TblInt + ' SWITCH TO dbo.' + @Tbl + ' PARTITION $partition.PartitionFunction_ResourceTypeId(' + CONVERT (VARCHAR, @ResourceTypeId) + ')'; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = @Tbl, @Action = 'Switch in start', @Text = @Txt; + EXECUTE (@Txt); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = @Tbl, @Action = 'Switch in', @Text = @Txt; + IF EXISTS (SELECT * + FROM sysindexes + WHERE id = object_id(@TblInt) + AND rows > 0) + BEGIN + SET @Txt = @TblInt + ' is not empty after switch'; + RAISERROR (@Txt, 18, 127); + END + EXECUTE ('DROP TABLE dbo.' + @TblInt); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = @TblInt, @Action = 'Drop'; + DELETE @ResourceTypes + WHERE ResourceTypeId = @ResourceTypeId; + END + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error', @Start = @st; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.SwitchPartitionsInAllTables +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'SwitchPartitionsInAllTables', @Mode AS VARCHAR (200) = 'PS=PartitionScheme_ResourceTypeId', @st AS DATETIME = getUTCdate(), @Tbl AS VARCHAR (100); +BEGIN TRY + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Start'; + DECLARE @Tables TABLE ( + name VARCHAR (100) PRIMARY KEY, + supported BIT ); + INSERT INTO @Tables + EXECUTE dbo.GetPartitionedTables @IncludeNotDisabled = 1, @IncludeNotSupported = 0; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = '@Tables', @Action = 'Insert', @Rows = @@rowcount; + WHILE EXISTS (SELECT * + FROM @Tables) + BEGIN + SET @Tbl = (SELECT TOP 1 name + FROM @Tables + ORDER BY name); + EXECUTE dbo.SwitchPartitionsIn @Tbl = @Tbl; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = 'SwitchPartitionsIn', @Action = 'Execute', @Text = @Tbl; + DELETE @Tables + WHERE name = @Tbl; + END + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error', @Start = @st; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.SwitchPartitionsOut +@Tbl VARCHAR (100), @RebuildClustered BIT +WITH EXECUTE AS 'dbo' +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'SwitchPartitionsOut', @Mode AS VARCHAR (200) = 'Tbl=' + isnull(@Tbl, 'NULL') + ' ND=' + isnull(CONVERT (VARCHAR, @RebuildClustered), 'NULL'), @st AS DATETIME = getUTCdate(), @ResourceTypeId AS SMALLINT, @Rows AS BIGINT, @Txt AS VARCHAR (MAX), @TblInt AS VARCHAR (100), @IndId AS INT, @Ind AS VARCHAR (200), @Name AS VARCHAR (100), @checkName AS VARCHAR (200), @definition AS VARCHAR (200); +DECLARE @Indexes TABLE ( + IndId INT PRIMARY KEY, + name VARCHAR (200), + IsDisabled BIT ); +DECLARE @IndexesRT TABLE ( + IndId INT PRIMARY KEY, + name VARCHAR (200), + IsDisabled BIT ); +DECLARE @ResourceTypes TABLE ( + ResourceTypeId SMALLINT PRIMARY KEY, + partition_number_roundtrip INT , + partition_number INT , + row_count BIGINT ); +DECLARE @Names TABLE ( + name VARCHAR (100) PRIMARY KEY); +DECLARE @CheckConstraints TABLE ( + CheckName VARCHAR (200), + CheckDefinition VARCHAR (200)); +BEGIN TRY + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Start'; + IF @Tbl IS NULL + RAISERROR ('@Tbl IS NULL', 18, 127); + IF @RebuildClustered IS NULL + RAISERROR ('@RebuildClustered IS NULL', 18, 127); + INSERT INTO @Indexes + SELECT index_id, + name, + is_disabled + FROM sys.indexes + WHERE object_id = object_id(@Tbl) + AND (is_disabled = 0 + OR @RebuildClustered = 1); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = '@Indexes', @Action = 'Insert', @Rows = @@rowcount; + INSERT INTO @ResourceTypes + SELECT partition_number - 1 AS ResourceTypeId, + $PARTITION.PartitionFunction_ResourceTypeId (partition_number - 1) AS partition_number_roundtrip, + partition_number, + row_count + FROM sys.dm_db_partition_stats + WHERE object_id = object_id(@Tbl) + AND index_id = 1 + AND row_count > 0; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = '@ResourceTypes', @Action = 'Insert', @Rows = @@rowcount, @Text = 'For partition switch'; + IF EXISTS (SELECT * + FROM @ResourceTypes + WHERE partition_number_roundtrip <> partition_number) + RAISERROR ('Partition sanity check failed', 18, 127); + WHILE EXISTS (SELECT * + FROM @ResourceTypes) + BEGIN + SELECT TOP 1 @ResourceTypeId = ResourceTypeId, + @Rows = row_count + FROM @ResourceTypes + ORDER BY ResourceTypeId; + SET @TblInt = @Tbl + '_' + CONVERT (VARCHAR, @ResourceTypeId); + SET @Txt = 'Starting @ResourceTypeId=' + CONVERT (VARCHAR, @ResourceTypeId) + ' row_count=' + CONVERT (VARCHAR, @Rows); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Text = @Txt; + IF NOT EXISTS (SELECT * + FROM sysindexes + WHERE id = object_id(@TblInt) + AND rows > 0) + BEGIN + IF object_id(@TblInt) IS NOT NULL + BEGIN + EXECUTE ('DROP TABLE dbo.' + @TblInt); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = @TblInt, @Action = 'Drop'; + END + EXECUTE ('SELECT * INTO dbo.' + @TblInt + ' FROM dbo.' + @Tbl + ' WHERE 1 = 2'); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = @TblInt, @Action = 'Select Into', @Rows = @@rowcount; + DELETE @CheckConstraints; + INSERT INTO @CheckConstraints + SELECT name, + definition + FROM sys.check_constraints + WHERE parent_object_id = object_id(@Tbl); + WHILE EXISTS (SELECT * + FROM @CheckConstraints) + BEGIN + SELECT TOP 1 @checkName = CheckName, + @definition = CheckDefinition + FROM @CheckConstraints; + SET @Txt = 'ALTER TABLE ' + @TblInt + ' ADD CHECK ' + @definition; + EXECUTE (@Txt); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = @TblInt, @Action = 'ALTER', @Text = @Txt; + DELETE @CheckConstraints + WHERE CheckName = @checkName; + END + DELETE @Names; + INSERT INTO @Names + SELECT name + FROM sys.columns + WHERE object_id = object_id(@Tbl) + AND is_sparse = 1; + WHILE EXISTS (SELECT * + FROM @Names) + BEGIN + SET @Name = (SELECT TOP 1 name + FROM @Names + ORDER BY name); + SET @Txt = (SELECT 'ALTER TABLE dbo.' + @TblInt + ' ALTER COLUMN ' + @Name + ' ' + T.name + '(' + CONVERT (VARCHAR, C.precision) + ',' + CONVERT (VARCHAR, C.scale) + ') SPARSE NULL' + FROM sys.types AS T + INNER JOIN + sys.columns AS C + ON C.system_type_id = T.system_type_id + WHERE C.object_id = object_id(@Tbl) + AND C.name = @Name); + EXECUTE (@Txt); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = @TblInt, @Action = 'ALTER', @Text = @Txt; + DELETE @Names + WHERE name = @Name; + END + END + INSERT INTO @IndexesRT + SELECT * + FROM @Indexes + WHERE IsDisabled = 0; + WHILE EXISTS (SELECT * + FROM @IndexesRT) + BEGIN + SELECT TOP 1 @IndId = IndId, + @Ind = name + FROM @IndexesRT + ORDER BY IndId; + IF NOT EXISTS (SELECT * + FROM sys.indexes + WHERE object_id = object_id(@TblInt) + AND name = @Ind) + BEGIN + EXECUTE dbo.GetIndexCommands @Tbl = @Tbl, @Ind = @Ind, @AddPartClause = 0, @IncludeClustered = 1, @Txt = @Txt OUTPUT; + SET @Txt = replace(@Txt, '[' + @Tbl + ']', @TblInt); + EXECUTE (@Txt); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = @TblInt, @Action = 'Create Index', @Text = @Txt; + END + DELETE @IndexesRT + WHERE IndId = @IndId; + END + SET @Txt = 'ALTER TABLE dbo.' + @TblInt + ' ADD CHECK (ResourceTypeId >= ' + CONVERT (VARCHAR, @ResourceTypeId) + ' AND ResourceTypeId < ' + CONVERT (VARCHAR, @ResourceTypeId) + ' + 1)'; + EXECUTE (@Txt); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = @Tbl, @Action = 'Add check', @Text = @Txt; + SET @Txt = 'ALTER TABLE dbo.' + @Tbl + ' SWITCH PARTITION $partition.PartitionFunction_ResourceTypeId(' + CONVERT (VARCHAR, @ResourceTypeId) + ') TO dbo.' + @TblInt; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = @Tbl, @Action = 'Switch out start', @Text = @Txt; + EXECUTE (@Txt); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = @Tbl, @Action = 'Switch out end', @Text = @Txt; + DELETE @ResourceTypes + WHERE ResourceTypeId = @ResourceTypeId; + END + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error', @Start = @st; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.SwitchPartitionsOutAllTables +@RebuildClustered BIT +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'SwitchPartitionsOutAllTables', @Mode AS VARCHAR (200) = 'PS=PartitionScheme_ResourceTypeId ND=' + isnull(CONVERT (VARCHAR, @RebuildClustered), 'NULL'), @st AS DATETIME = getUTCdate(), @Tbl AS VARCHAR (100); +BEGIN TRY + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Start'; + DECLARE @Tables TABLE ( + name VARCHAR (100) PRIMARY KEY, + supported BIT ); + INSERT INTO @Tables + EXECUTE dbo.GetPartitionedTables @IncludeNotDisabled = @RebuildClustered, @IncludeNotSupported = 0; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = '@Tables', @Action = 'Insert', @Rows = @@rowcount; + WHILE EXISTS (SELECT * + FROM @Tables) + BEGIN + SET @Tbl = (SELECT TOP 1 name + FROM @Tables + ORDER BY name); + EXECUTE dbo.SwitchPartitionsOut @Tbl = @Tbl, @RebuildClustered = @RebuildClustered; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = 'SwitchPartitionsOut', @Action = 'Execute', @Text = @Tbl; + DELETE @Tables + WHERE name = @Tbl; + END + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error', @Start = @st; + THROW; +END CATCH + +GO +GO +CREATE OR ALTER PROCEDURE dbo.UpdateEventAgentCheckpoint +@CheckpointId VARCHAR (64), @LastProcessedDateTime DATETIMEOFFSET (7)=NULL, @LastProcessedIdentifier VARCHAR (64)=NULL +AS +BEGIN + IF EXISTS (SELECT * + FROM dbo.EventAgentCheckpoint + WHERE CheckpointId = @CheckpointId) + UPDATE dbo.EventAgentCheckpoint + SET CheckpointId = @CheckpointId, + LastProcessedDateTime = @LastProcessedDateTime, + LastProcessedIdentifier = @LastProcessedIdentifier, + UpdatedOn = sysutcdatetime() + WHERE CheckpointId = @CheckpointId; + ELSE + INSERT INTO dbo.EventAgentCheckpoint (CheckpointId, LastProcessedDateTime, LastProcessedIdentifier, UpdatedOn) + VALUES (@CheckpointId, @LastProcessedDateTime, @LastProcessedIdentifier, sysutcdatetime()); +END + +GO +CREATE PROCEDURE dbo.UpdateReindexJob +@id VARCHAR (64), @status VARCHAR (10), @rawJobRecord VARCHAR (MAX), @jobVersion BINARY (8) +AS +SET NOCOUNT ON; +SET XACT_ABORT ON; +BEGIN TRANSACTION; +DECLARE @currentJobVersion AS BINARY (8); +SELECT @currentJobVersion = JobVersion +FROM dbo.ReindexJob WITH (UPDLOCK, HOLDLOCK) +WHERE Id = @id; +IF (@currentJobVersion IS NULL) + BEGIN + THROW 50404, 'Reindex job record not found', 1; + END +IF (@jobVersion <> @currentJobVersion) + BEGIN + THROW 50412, 'Precondition failed', 1; + END +DECLARE @heartbeatDateTime AS DATETIME2 (7) = SYSUTCDATETIME(); +UPDATE dbo.ReindexJob +SET Status = @status, + HeartbeatDateTime = @heartbeatDateTime, + RawJobRecord = @rawJobRecord +WHERE Id = @id; +SELECT @@DBTS; +COMMIT TRANSACTION; + +GO +CREATE PROCEDURE dbo.UpdateResourceSearchParams +@FailedResources INT=0 OUTPUT, @Resources dbo.ResourceList READONLY, @ResourceWriteClaims dbo.ResourceWriteClaimList READONLY, @ReferenceSearchParams dbo.ReferenceSearchParamList READONLY, @TokenSearchParams dbo.TokenSearchParamList READONLY, @TokenTexts dbo.TokenTextList READONLY, @StringSearchParams dbo.StringSearchParamList READONLY, @UriSearchParams dbo.UriSearchParamList READONLY, @NumberSearchParams dbo.NumberSearchParamList READONLY, @QuantitySearchParams dbo.QuantitySearchParamList READONLY, @DateTimeSearchParams dbo.DateTimeSearchParamList READONLY, @ReferenceTokenCompositeSearchParams dbo.ReferenceTokenCompositeSearchParamList READONLY, @TokenTokenCompositeSearchParams dbo.TokenTokenCompositeSearchParamList READONLY, @TokenDateTimeCompositeSearchParams dbo.TokenDateTimeCompositeSearchParamList READONLY, @TokenQuantityCompositeSearchParams dbo.TokenQuantityCompositeSearchParamList READONLY, @TokenStringCompositeSearchParams dbo.TokenStringCompositeSearchParamList READONLY, @TokenNumberNumberCompositeSearchParams dbo.TokenNumberNumberCompositeSearchParamList READONLY +AS +SET NOCOUNT ON; +DECLARE @st AS DATETIME = getUTCdate(), @SP AS VARCHAR (100) = object_name(@@procid), @Mode AS VARCHAR (200) = isnull((SELECT 'RT=[' + CONVERT (VARCHAR, min(ResourceTypeId)) + ',' + CONVERT (VARCHAR, max(ResourceTypeId)) + '] Sur=[' + CONVERT (VARCHAR, min(ResourceSurrogateId)) + ',' + CONVERT (VARCHAR, max(ResourceSurrogateId)) + '] V=' + CONVERT (VARCHAR, max(Version)) + ' Rows=' + CONVERT (VARCHAR, count(*)) + FROM @Resources), 'Input=Empty'), @Rows AS INT; +BEGIN TRY + DECLARE @Ids TABLE ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL); + BEGIN TRANSACTION; + UPDATE B + SET SearchParamHash = A.SearchParamHash + OUTPUT deleted.ResourceTypeId, deleted.ResourceSurrogateId INTO @Ids + FROM @Resources AS A + INNER JOIN + dbo.Resource AS B + ON B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId + WHERE B.IsHistory = 0; + SET @Rows = @@rowcount; + DELETE B + FROM @Ids AS A + INNER JOIN + dbo.ResourceWriteClaim AS B + ON B.ResourceSurrogateId = A.ResourceSurrogateId; + DELETE B + FROM @Ids AS A + INNER JOIN + dbo.ReferenceSearchParam AS B + ON B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId; + DELETE B + FROM @Ids AS A + INNER JOIN + dbo.TokenSearchParam AS B + ON B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId; + DELETE B + FROM @Ids AS A + INNER JOIN + dbo.TokenText AS B + ON B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId; + DELETE B + FROM @Ids AS A + INNER JOIN + dbo.StringSearchParam AS B + ON B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId; + DELETE B + FROM @Ids AS A + INNER JOIN + dbo.UriSearchParam AS B + ON B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId; + DELETE B + FROM @Ids AS A + INNER JOIN + dbo.NumberSearchParam AS B + ON B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId; + DELETE B + FROM @Ids AS A + INNER JOIN + dbo.QuantitySearchParam AS B + ON B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId; + DELETE B + FROM @Ids AS A + INNER JOIN + dbo.DateTimeSearchParam AS B + ON B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId; + DELETE B + FROM @Ids AS A + INNER JOIN + dbo.ReferenceTokenCompositeSearchParam AS B + ON B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId; + DELETE B + FROM @Ids AS A + INNER JOIN + dbo.TokenTokenCompositeSearchParam AS B + ON B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId; + DELETE B + FROM @Ids AS A + INNER JOIN + dbo.TokenDateTimeCompositeSearchParam AS B + ON B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId; + DELETE B + FROM @Ids AS A + INNER JOIN + dbo.TokenQuantityCompositeSearchParam AS B + ON B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId; + DELETE B + FROM @Ids AS A + INNER JOIN + dbo.TokenStringCompositeSearchParam AS B + ON B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId; + DELETE B + FROM @Ids AS A + INNER JOIN + dbo.TokenNumberNumberCompositeSearchParam AS B + ON B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId; + INSERT INTO dbo.ResourceWriteClaim (ResourceSurrogateId, ClaimTypeId, ClaimValue) + SELECT ResourceSurrogateId, + ClaimTypeId, + ClaimValue + FROM @ResourceWriteClaims; + INSERT INTO dbo.ReferenceSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, BaseUri, ReferenceResourceTypeId, ReferenceResourceId, ReferenceResourceVersion) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + BaseUri, + ReferenceResourceTypeId, + ReferenceResourceId, + ReferenceResourceVersion + FROM @ReferenceSearchParams; + INSERT INTO dbo.TokenSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId, Code, CodeOverflow) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + SystemId, + Code, + CodeOverflow + FROM @TokenSearchParams; + INSERT INTO dbo.TokenText (ResourceTypeId, ResourceSurrogateId, SearchParamId, Text) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + Text + FROM @TokenTexts; + INSERT INTO dbo.StringSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, Text, TextOverflow, IsMin, IsMax) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + Text, + TextOverflow, + IsMin, + IsMax + FROM @StringSearchParams; + INSERT INTO dbo.UriSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, Uri) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + Uri + FROM @UriSearchParams; + INSERT INTO dbo.NumberSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, SingleValue, LowValue, HighValue) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + SingleValue, + LowValue, + HighValue + FROM @NumberSearchParams; + INSERT INTO dbo.QuantitySearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId, QuantityCodeId, SingleValue, LowValue, HighValue) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + SystemId, + QuantityCodeId, + SingleValue, + LowValue, + HighValue + FROM @QuantitySearchParams; + INSERT INTO dbo.DateTimeSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, StartDateTime, EndDateTime, IsLongerThanADay, IsMin, IsMax) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + StartDateTime, + EndDateTime, + IsLongerThanADay, + IsMin, + IsMax + FROM @DateTimeSearchParams; + INSERT INTO dbo.ReferenceTokenCompositeSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, BaseUri1, ReferenceResourceTypeId1, ReferenceResourceId1, ReferenceResourceVersion1, SystemId2, Code2, CodeOverflow2) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + BaseUri1, + ReferenceResourceTypeId1, + ReferenceResourceId1, + ReferenceResourceVersion1, + SystemId2, + Code2, + CodeOverflow2 + FROM @ReferenceTokenCompositeSearchParams; + INSERT INTO dbo.TokenTokenCompositeSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId1, Code1, CodeOverflow1, SystemId2, Code2, CodeOverflow2) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + SystemId1, + Code1, + CodeOverflow1, + SystemId2, + Code2, + CodeOverflow2 + FROM @TokenTokenCompositeSearchParams; + INSERT INTO dbo.TokenDateTimeCompositeSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId1, Code1, CodeOverflow1, StartDateTime2, EndDateTime2, IsLongerThanADay2) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + SystemId1, + Code1, + CodeOverflow1, + StartDateTime2, + EndDateTime2, + IsLongerThanADay2 + FROM @TokenDateTimeCompositeSearchParams; + INSERT INTO dbo.TokenQuantityCompositeSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId1, Code1, CodeOverflow1, SingleValue2, SystemId2, QuantityCodeId2, LowValue2, HighValue2) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + SystemId1, + Code1, + CodeOverflow1, + SingleValue2, + SystemId2, + QuantityCodeId2, + LowValue2, + HighValue2 + FROM @TokenQuantityCompositeSearchParams; + INSERT INTO dbo.TokenStringCompositeSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId1, Code1, CodeOverflow1, Text2, TextOverflow2) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + SystemId1, + Code1, + CodeOverflow1, + Text2, + TextOverflow2 + FROM @TokenStringCompositeSearchParams; + INSERT INTO dbo.TokenNumberNumberCompositeSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId1, Code1, CodeOverflow1, SingleValue2, LowValue2, HighValue2, SingleValue3, LowValue3, HighValue3, HasRange) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + SystemId1, + Code1, + CodeOverflow1, + SingleValue2, + LowValue2, + HighValue2, + SingleValue3, + LowValue3, + HighValue3, + HasRange + FROM @TokenNumberNumberCompositeSearchParams; + COMMIT TRANSACTION; + SET @FailedResources = (SELECT count(*) + FROM @Resources) - @Rows; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @Rows; +END TRY +BEGIN CATCH + IF @@trancount > 0 + ROLLBACK; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error', @Start = @st; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.UpsertSearchParams +@searchParams dbo.SearchParamTableType_2 READONLY +AS +SET NOCOUNT ON; +SET XACT_ABORT ON; +SET TRANSACTION ISOLATION LEVEL SERIALIZABLE; +BEGIN TRANSACTION; +DECLARE @lastUpdated AS DATETIMEOFFSET (7) = SYSDATETIMEOFFSET(); +DECLARE @summaryOfChanges TABLE ( + Uri VARCHAR (128) COLLATE Latin1_General_100_CS_AS NOT NULL, + Action VARCHAR (20) NOT NULL); +MERGE INTO dbo.SearchParam WITH (TABLOCKX) + AS target +USING @searchParams AS source ON target.Uri = source.Uri +WHEN MATCHED THEN UPDATE +SET Status = source.Status, + LastUpdated = @lastUpdated, + IsPartiallySupported = source.IsPartiallySupported +WHEN NOT MATCHED BY TARGET THEN INSERT (Uri, Status, LastUpdated, IsPartiallySupported) VALUES (source.Uri, source.Status, @lastUpdated, source.IsPartiallySupported) +OUTPUT source.Uri, $ACTION INTO @summaryOfChanges; +SELECT SearchParamId, + SearchParam.Uri +FROM dbo.SearchParam AS searchParam + INNER JOIN + @summaryOfChanges AS upsertedSearchParam + ON searchParam.Uri = upsertedSearchParam.Uri +WHERE upsertedSearchParam.Action = 'INSERT'; +COMMIT TRANSACTION; + +GO diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/SchemaVersion.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/SchemaVersion.cs index 2f71d5d22a..47b347521a 100644 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/SchemaVersion.cs +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/SchemaVersion.cs @@ -86,5 +86,6 @@ public enum SchemaVersion V74 = 74, V75 = 75, V76 = 76, + V77 = 77, } } diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/SchemaVersionConstants.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/SchemaVersionConstants.cs index af32756abb..59ac1b73b1 100644 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/SchemaVersionConstants.cs +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/SchemaVersionConstants.cs @@ -8,7 +8,7 @@ namespace Microsoft.Health.Fhir.SqlServer.Features.Schema public static class SchemaVersionConstants { public const int Min = (int)SchemaVersion.V73; - public const int Max = (int)SchemaVersion.V76; + public const int Max = (int)SchemaVersion.V77; public const int MinForUpgrade = (int)SchemaVersion.V73; // this is used for upgrade tests only public const int SearchParameterStatusSchemaVersion = (int)SchemaVersion.V6; public const int SupportForReferencesWithMissingTypeVersion = (int)SchemaVersion.V7; diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/Sql/Scripts/TransactionCheckWithInitialiScript.sql b/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/Sql/Scripts/TransactionCheckWithInitialiScript.sql index 0515ba9ff0..989e1eb5db 100644 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/Sql/Scripts/TransactionCheckWithInitialiScript.sql +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/Sql/Scripts/TransactionCheckWithInitialiScript.sql @@ -19,6 +19,6 @@ Go INSERT INTO dbo.SchemaVersion VALUES - (76, 'started') + (77, 'started') Go diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/Sql/Types/CompartmentAssignmentList.sql b/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/Sql/Types/CompartmentAssignmentList.sql deleted file mode 100644 index b415abfffb..0000000000 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/Sql/Types/CompartmentAssignmentList.sql +++ /dev/null @@ -1,12 +0,0 @@ ---DROP TYPE dbo.CompartmentAssignmentList -GO -CREATE TYPE dbo.CompartmentAssignmentList AS TABLE -( - ResourceTypeId smallint NOT NULL - ,ResourceSurrogateId bigint NOT NULL - ,CompartmentTypeId tinyint NOT NULL - ,ReferenceResourceId varchar(64) COLLATE Latin1_General_100_CS_AS NOT NULL - - PRIMARY KEY (ResourceTypeId, ResourceSurrogateId, CompartmentTypeId, ReferenceResourceId) -) -GO diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/Sql/Types/Types.sql b/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/Sql/Types/Types.sql index 1a02236fc9..773770dbb2 100644 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/Sql/Types/Types.sql +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/Sql/Types/Types.sql @@ -1,389 +1,4 @@ -CREATE TYPE dbo.BulkResourceWriteClaimTableType_1 AS TABLE -( - Offset int NOT NULL, - ClaimTypeId tinyint NOT NULL, - ClaimValue nvarchar(128) NOT NULL -) - - -CREATE TYPE dbo.BulkCompartmentAssignmentTableType_1 AS TABLE -( - Offset int NOT NULL, - CompartmentTypeId tinyint NOT NULL, - ReferenceResourceId varchar(64) COLLATE Latin1_General_100_CS_AS NOT NULL -) - -/************************************************************* - Reference Search Param -**************************************************************/ - -CREATE TYPE dbo.BulkReferenceSearchParamTableType_1 AS TABLE -( - Offset int NOT NULL, - SearchParamId smallint NOT NULL, - BaseUri varchar(128) COLLATE Latin1_General_100_CS_AS NULL, - ReferenceResourceTypeId smallint NULL, - ReferenceResourceId varchar(64) COLLATE Latin1_General_100_CS_AS NOT NULL, - ReferenceResourceVersion int NULL -) - -/************************************************************* - Token Search Param -**************************************************************/ - -CREATE TYPE dbo.BulkTokenSearchParamTableType_1 AS TABLE -( - Offset int NOT NULL, - SearchParamId smallint NOT NULL, - SystemId int NULL, - Code varchar(128) COLLATE Latin1_General_100_CS_AS NOT NULL -) - -/************************************************************* - Token Search Param -**************************************************************/ - -CREATE TYPE dbo.BulkTokenSearchParamTableType_2 AS TABLE -( - Offset int NOT NULL, - SearchParamId smallint NOT NULL, - SystemId int NULL, - Code varchar(256) COLLATE Latin1_General_100_CS_AS NOT NULL, - CodeOverflow varchar(max) COLLATE Latin1_General_100_CS_AS NULL -) - -/************************************************************* - Token Text -**************************************************************/ - -CREATE TYPE dbo.BulkTokenTextTableType_1 AS TABLE -( - Offset int NOT NULL, - SearchParamId smallint NOT NULL, - Text nvarchar(400) COLLATE Latin1_General_CI_AI NOT NULL -) - -/************************************************************* - String Search Param -**************************************************************/ - -CREATE TYPE dbo.BulkStringSearchParamTableType_1 AS TABLE -( - Offset int NOT NULL, - SearchParamId smallint NOT NULL, - Text nvarchar(256) COLLATE Latin1_General_100_CI_AI_SC NOT NULL, - TextOverflow nvarchar(max) COLLATE Latin1_General_100_CI_AI_SC NULL -) - -/************************************************************* - String Search Param -**************************************************************/ - -CREATE TYPE dbo.BulkStringSearchParamTableType_2 AS TABLE -( - Offset int NOT NULL, - SearchParamId smallint NOT NULL, - Text nvarchar(256) COLLATE Latin1_General_100_CI_AI_SC NOT NULL, - TextOverflow nvarchar(max) COLLATE Latin1_General_100_CI_AI_SC NULL, - IsMin bit NOT NULL, - IsMax bit NOT NULL -) - -/************************************************************* - URI Search Param -**************************************************************/ - -CREATE TYPE dbo.BulkUriSearchParamTableType_1 AS TABLE -( - Offset int NOT NULL, - SearchParamId smallint NOT NULL, - Uri varchar(256) COLLATE Latin1_General_100_CS_AS NOT NULL -) - -/************************************************************* - Number Search Param -**************************************************************/ - --- We support the underlying value being a range, though we expect the vast majority of entries to be a single value. --- Either: --- (1) SingleValue is not null and LowValue and HighValue are both null, or --- (2) SingleValue is null and LowValue and HighValue are both not null --- We make use of filtered nonclustered indexes to keep queries over the ranges limited to those rows that actually have ranges - -CREATE TYPE dbo.BulkNumberSearchParamTableType_1 AS TABLE -( - Offset int NOT NULL, - SearchParamId smallint NOT NULL, - SingleValue decimal(18,6) NULL, - LowValue decimal(18,6) NULL, - HighValue decimal(18,6) NULL -) - -/************************************************************* - Quantity Search Param -**************************************************************/ - --- See comment above for number search params for how we store ranges - -CREATE TYPE dbo.BulkQuantitySearchParamTableType_1 AS TABLE -( - Offset int NOT NULL, - SearchParamId smallint NOT NULL, - SystemId int NULL, - QuantityCodeId int NULL, - SingleValue decimal(18,6) NULL, - LowValue decimal(18,6) NULL, - HighValue decimal(18,6) NULL -) - -/************************************************************* - Date Search Param -**************************************************************/ - -CREATE TYPE dbo.BulkDateTimeSearchParamTableType_1 AS TABLE -( - Offset int NOT NULL, - SearchParamId smallint NOT NULL, - StartDateTime datetimeoffset(7) NOT NULL, - EndDateTime datetimeoffset(7) NOT NULL, - IsLongerThanADay bit NOT NULL -) - -/************************************************************* - Date Search Param -**************************************************************/ - -CREATE TYPE dbo.BulkDateTimeSearchParamTableType_2 AS TABLE -( - Offset int NOT NULL, - SearchParamId smallint NOT NULL, - StartDateTime datetimeoffset(7) NOT NULL, - EndDateTime datetimeoffset(7) NOT NULL, - IsLongerThanADay bit NOT NULL, - IsMin bit NOT NULL, - IsMax bit NOT NULL -) - -/************************************************************* - Reference$Token Composite Search Param -**************************************************************/ - -CREATE TYPE dbo.BulkReferenceTokenCompositeSearchParamTableType_1 AS TABLE -( - Offset int NOT NULL, - SearchParamId smallint NOT NULL, - BaseUri1 varchar(128) COLLATE Latin1_General_100_CS_AS NULL, - ReferenceResourceTypeId1 smallint NULL, - ReferenceResourceId1 varchar(64) COLLATE Latin1_General_100_CS_AS NOT NULL, - ReferenceResourceVersion1 int NULL, - SystemId2 int NULL, - Code2 varchar(128) COLLATE Latin1_General_100_CS_AS NOT NULL -) - -/************************************************************* - Reference$Token Composite Search Param -**************************************************************/ - -CREATE TYPE dbo.BulkReferenceTokenCompositeSearchParamTableType_2 AS TABLE -( - Offset int NOT NULL, - SearchParamId smallint NOT NULL, - BaseUri1 varchar(128) COLLATE Latin1_General_100_CS_AS NULL, - ReferenceResourceTypeId1 smallint NULL, - ReferenceResourceId1 varchar(64) COLLATE Latin1_General_100_CS_AS NOT NULL, - ReferenceResourceVersion1 int NULL, - SystemId2 int NULL, - Code2 varchar(256) COLLATE Latin1_General_100_CS_AS NOT NULL, - CodeOverflow2 varchar(max) COLLATE Latin1_General_100_CS_AS NULL -) - -/************************************************************* - Token$Token Composite Search Param -**************************************************************/ - -CREATE TYPE dbo.BulkTokenTokenCompositeSearchParamTableType_1 AS TABLE -( - Offset int NOT NULL, - SearchParamId smallint NOT NULL, - SystemId1 int NULL, - Code1 varchar(128) COLLATE Latin1_General_100_CS_AS NOT NULL, - SystemId2 int NULL, - Code2 varchar(128) COLLATE Latin1_General_100_CS_AS NOT NULL -) - -/************************************************************* - Token$Token Composite Search Param -**************************************************************/ - -CREATE TYPE dbo.BulkTokenTokenCompositeSearchParamTableType_2 AS TABLE -( - Offset int NOT NULL, - SearchParamId smallint NOT NULL, - SystemId1 int NULL, - Code1 varchar(256) COLLATE Latin1_General_100_CS_AS NOT NULL, - CodeOverflow1 varchar(max) COLLATE Latin1_General_100_CS_AS NULL, - SystemId2 int NULL, - Code2 varchar(256) COLLATE Latin1_General_100_CS_AS NOT NULL, - CodeOverflow2 varchar(max) COLLATE Latin1_General_100_CS_AS NULL -) - -/************************************************************* - Token$DateTime Composite Search Param -**************************************************************/ - -CREATE TYPE dbo.BulkTokenDateTimeCompositeSearchParamTableType_1 AS TABLE -( - Offset int NOT NULL, - SearchParamId smallint NOT NULL, - SystemId1 int NULL, - Code1 varchar(128) COLLATE Latin1_General_100_CS_AS NOT NULL, - StartDateTime2 datetimeoffset(7) NOT NULL, - EndDateTime2 datetimeoffset(7) NOT NULL, - IsLongerThanADay2 bit NOT NULL -) - -/************************************************************* - Token$DateTime Composite Search Param -**************************************************************/ - -CREATE TYPE dbo.BulkTokenDateTimeCompositeSearchParamTableType_2 AS TABLE -( - Offset int NOT NULL, - SearchParamId smallint NOT NULL, - SystemId1 int NULL, - Code1 varchar(256) COLLATE Latin1_General_100_CS_AS NOT NULL, - CodeOverflow1 varchar(max) COLLATE Latin1_General_100_CS_AS NULL, - StartDateTime2 datetimeoffset(7) NOT NULL, - EndDateTime2 datetimeoffset(7) NOT NULL, - IsLongerThanADay2 bit NOT NULL -) - -/************************************************************* - Token$Quantity Composite Search Param -**************************************************************/ - -CREATE TYPE dbo.BulkTokenQuantityCompositeSearchParamTableType_1 AS TABLE -( - Offset int NOT NULL, - SearchParamId smallint NOT NULL, - SystemId1 int NULL, - Code1 varchar(128) COLLATE Latin1_General_100_CS_AS NOT NULL, - SystemId2 int NULL, - QuantityCodeId2 int NULL, - SingleValue2 decimal(18,6) NULL, - LowValue2 decimal(18,6) NULL, - HighValue2 decimal(18,6) NULL -) - -/************************************************************* - Token$Quantity Composite Search Param -**************************************************************/ - -CREATE TYPE dbo.BulkTokenQuantityCompositeSearchParamTableType_2 AS TABLE -( - Offset int NOT NULL, - SearchParamId smallint NOT NULL, - SystemId1 int NULL, - Code1 varchar(256) COLLATE Latin1_General_100_CS_AS NOT NULL, - CodeOverflow1 varchar(max) COLLATE Latin1_General_100_CS_AS NULL, - SystemId2 int NULL, - QuantityCodeId2 int NULL, - SingleValue2 decimal(18,6) NULL, - LowValue2 decimal(18,6) NULL, - HighValue2 decimal(18,6) NULL -) - -/************************************************************* - Token$String Composite Search Param -**************************************************************/ - -CREATE TYPE dbo.BulkTokenStringCompositeSearchParamTableType_1 AS TABLE -( - Offset int NOT NULL, - SearchParamId smallint NOT NULL, - SystemId1 int NULL, - Code1 varchar(128) COLLATE Latin1_General_100_CS_AS NOT NULL, - Text2 nvarchar(256) COLLATE Latin1_General_100_CI_AI_SC NOT NULL, - TextOverflow2 nvarchar(max) COLLATE Latin1_General_100_CI_AI_SC NULL -) - -/************************************************************* - Token$String Composite Search Param -**************************************************************/ - -CREATE TYPE dbo.BulkTokenStringCompositeSearchParamTableType_2 AS TABLE -( - Offset int NOT NULL, - SearchParamId smallint NOT NULL, - SystemId1 int NULL, - Code1 varchar(256) COLLATE Latin1_General_100_CS_AS NOT NULL, - CodeOverflow1 varchar(max) COLLATE Latin1_General_100_CS_AS NULL, - Text2 nvarchar(256) COLLATE Latin1_General_100_CI_AI_SC NOT NULL, - TextOverflow2 nvarchar(max) COLLATE Latin1_General_100_CI_AI_SC NULL -) - -/************************************************************* - Token$Number$Number Composite Search Param -**************************************************************/ - --- See number search param for how we deal with null. We apply a similar pattern here, --- except that we pass in a HasRange bit though the TVP. The alternative would have --- for a computed column, but a computed column cannot be used in as a index filter --- (even if it is a persisted computed column). - -CREATE TYPE dbo.BulkTokenNumberNumberCompositeSearchParamTableType_1 AS TABLE -( - Offset int NOT NULL, - SearchParamId smallint NOT NULL, - SystemId1 int NULL, - Code1 varchar(128) COLLATE Latin1_General_100_CS_AS NOT NULL, - SingleValue2 decimal(18,6) NULL, - LowValue2 decimal(18,6) NULL, - HighValue2 decimal(18,6) NULL, - SingleValue3 decimal(18,6) NULL, - LowValue3 decimal(18,6) NULL, - HighValue3 decimal(18,6) NULL, - HasRange bit NOT NULL -) - -/************************************************************* - Token$Number$Number Composite Search Param -**************************************************************/ - --- See number search param for how we deal with null. We apply a similar pattern here, --- except that we pass in a HasRange bit though the TVP. The alternative would have --- for a computed column, but a computed column cannot be used in as a index filter --- (even if it is a persisted computed column). - -CREATE TYPE dbo.BulkTokenNumberNumberCompositeSearchParamTableType_2 AS TABLE -( - Offset int NOT NULL, - SearchParamId smallint NOT NULL, - SystemId1 int NULL, - Code1 varchar(256) COLLATE Latin1_General_100_CS_AS NOT NULL, - CodeOverflow1 varchar(max) COLLATE Latin1_General_100_CS_AS NULL, - SingleValue2 decimal(18,6) NULL, - LowValue2 decimal(18,6) NULL, - HighValue2 decimal(18,6) NULL, - SingleValue3 decimal(18,6) NULL, - LowValue3 decimal(18,6) NULL, - HighValue3 decimal(18,6) NULL, - HasRange bit NOT NULL -) - -/************************************************************* - Search Parameter Status Information -**************************************************************/ - --- We adopted this naming convention for table-valued parameters because they are immutable. -CREATE TYPE dbo.SearchParamTableType_1 AS TABLE -( - Uri varchar(128) COLLATE Latin1_General_100_CS_AS NOT NULL, - Status varchar(10) NOT NULL, - IsPartiallySupported bit NOT NULL -) - -CREATE TYPE dbo.SearchParamTableType_2 AS TABLE +CREATE TYPE dbo.SearchParamTableType_2 AS TABLE ( Uri varchar(128) COLLATE Latin1_General_100_CS_AS NOT NULL, Status varchar(20) NOT NULL, @@ -398,21 +13,3 @@ CREATE TYPE dbo.BulkReindexResourceTableType_1 AS TABLE ETag int NULL, SearchParamHash varchar(64) NOT NULL ) - - -/************************************************************* - Resource Bulk Import feature -**************************************************************/ -CREATE TYPE dbo.BulkImportResourceType_1 AS TABLE -( - ResourceTypeId smallint NOT NULL, - ResourceId varchar(64) COLLATE Latin1_General_100_CS_AS NOT NULL, - Version int NOT NULL, - IsHistory bit NOT NULL, - ResourceSurrogateId bigint NOT NULL, - IsDeleted bit NOT NULL, - RequestMethod varchar(10) NULL, - RawResource varbinary(max) NOT NULL, - IsRawResourceMetaSet bit NOT NULL DEFAULT 0, - SearchParamHash varchar(64) NULL -) diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkCompartmentAssignmentV1RowGenerator.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkCompartmentAssignmentV1RowGenerator.cs deleted file mode 100644 index 505854fc33..0000000000 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkCompartmentAssignmentV1RowGenerator.cs +++ /dev/null @@ -1,114 +0,0 @@ -// ------------------------------------------------------------------------------------------------- -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. -// ------------------------------------------------------------------------------------------------- - -using System.Collections.Generic; -using System.Linq; -using System.Threading; -using EnsureThat; -using Microsoft.Health.Fhir.Core.Features.Persistence; -using Microsoft.Health.Fhir.Core.Models; -using Microsoft.Health.Fhir.SqlServer.Features.Schema.Model; -using Microsoft.Health.SqlServer.Features.Schema.Model; - -namespace Microsoft.Health.Fhir.SqlServer.Features.Storage.TvpRowGeneration -{ - internal class BulkCompartmentAssignmentV1RowGenerator : ITableValuedParameterRowGenerator, BulkCompartmentAssignmentTableTypeV1Row> - { - private readonly ISqlServerFhirModel _model; - private readonly SearchParameterToSearchValueTypeMap _searchParameterTypeMap; - private bool _initialized; - private byte _patientCompartmentId; - private byte _encounterCompartmentId; - private byte _relatedPersonCompartmentId; - private byte _practitionerCompartmentId; - private byte _deviceCompartmentId; - - public BulkCompartmentAssignmentV1RowGenerator(ISqlServerFhirModel model, SearchParameterToSearchValueTypeMap searchParameterTypeMap) - { - EnsureArg.IsNotNull(model, nameof(model)); - EnsureArg.IsNotNull(searchParameterTypeMap, nameof(searchParameterTypeMap)); - - _model = model; - _searchParameterTypeMap = searchParameterTypeMap; - } - - public IEnumerable GenerateRows(IReadOnlyList resources) - { - EnsureInitialized(); - - for (var index = 0; index < resources.Count; index++) - { - ResourceWrapper resource = resources[index]; - - var resourceMetadata = new ResourceMetadata( - resource.CompartmentIndices, - resource.SearchIndices?.ToLookup(e => _searchParameterTypeMap.GetSearchValueType(e)), - resource.LastModifiedClaims); - - CompartmentIndices compartments = resourceMetadata.Compartments; - if (compartments == null) - { - yield break; - } - - if (compartments.PatientCompartmentEntry != null) - { - foreach (var entry in compartments.PatientCompartmentEntry) - { - yield return new BulkCompartmentAssignmentTableTypeV1Row(index, _patientCompartmentId, entry); - } - } - - if (compartments.EncounterCompartmentEntry != null) - { - foreach (var entry in compartments.EncounterCompartmentEntry) - { - yield return new BulkCompartmentAssignmentTableTypeV1Row(index, _encounterCompartmentId, entry); - } - } - - if (compartments.RelatedPersonCompartmentEntry != null) - { - foreach (var entry in compartments.RelatedPersonCompartmentEntry) - { - yield return new BulkCompartmentAssignmentTableTypeV1Row(index, _relatedPersonCompartmentId, entry); - } - } - - if (compartments.PractitionerCompartmentEntry != null) - { - foreach (var entry in compartments.PractitionerCompartmentEntry) - { - yield return new BulkCompartmentAssignmentTableTypeV1Row(index, _practitionerCompartmentId, entry); - } - } - - if (compartments.DeviceCompartmentEntry != null) - { - foreach (var entry in compartments.DeviceCompartmentEntry) - { - yield return new BulkCompartmentAssignmentTableTypeV1Row(index, _deviceCompartmentId, entry); - } - } - } - } - - private void EnsureInitialized() - { - if (Volatile.Read(ref _initialized)) - { - return; - } - - _patientCompartmentId = _model.GetCompartmentTypeId(KnownCompartmentTypes.Patient); - _encounterCompartmentId = _model.GetCompartmentTypeId(KnownCompartmentTypes.Encounter); - _relatedPersonCompartmentId = _model.GetCompartmentTypeId(KnownCompartmentTypes.RelatedPerson); - _practitionerCompartmentId = _model.GetCompartmentTypeId(KnownCompartmentTypes.Practitioner); - _deviceCompartmentId = _model.GetCompartmentTypeId(KnownCompartmentTypes.Device); - - Volatile.Write(ref _initialized, true); - } - } -} diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkCompositeSearchParameterRowGenerator.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkCompositeSearchParameterRowGenerator.cs deleted file mode 100644 index ae54223182..0000000000 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkCompositeSearchParameterRowGenerator.cs +++ /dev/null @@ -1,83 +0,0 @@ -// ------------------------------------------------------------------------------------------------- -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. -// ------------------------------------------------------------------------------------------------- - -using System; -using System.Collections.Generic; -using System.Linq; -using System.Reflection; -using System.Runtime.CompilerServices; -using Microsoft.Health.Core.Extensions; -using Microsoft.Health.Fhir.Core.Features.Search; -using Microsoft.Health.Fhir.Core.Features.Search.SearchValues; -using LinqExpression = System.Linq.Expressions.Expression; - -namespace Microsoft.Health.Fhir.SqlServer.Features.Storage.TvpRowGeneration -{ - internal abstract class BulkCompositeSearchParameterRowGenerator : BulkSearchParameterRowGenerator - where TSearchValue : ITuple - where TRow : struct - { - private readonly Func, TSearchValue> _converter = CreateConverterFunc(); - - protected BulkCompositeSearchParameterRowGenerator(SqlServerFhirModel model, SearchParameterToSearchValueTypeMap searchParameterTypeMap) - : base(model, searchParameterTypeMap) - { - } - - protected override IEnumerable ConvertSearchValue(SearchIndexEntry entry) - { - var compositeSearchValue = (CompositeSearchValue)entry.Value; - - foreach (var components in compositeSearchValue.Components.CartesianProduct()) - { - using (IEnumerator enumerator = components.GetEnumerator()) - { - yield return _converter(new EnumeratorWrapper(enumerator)); - } - } - } - - /// - /// Creates a function that takes the components of a composite search parameter as an - /// enumerator and creates a ValueTuple with fields for each component. - /// - /// The generated function. - private static Func, TSearchValue> CreateConverterFunc() - { - var parameter = LinqExpression.Parameter(typeof(EnumeratorWrapper)); - MethodInfo nextValueMethod = parameter.Type.GetMethod(nameof(EnumeratorWrapper.NextValue)); - ConstructorInfo constructorInfo = typeof(TSearchValue).GetConstructors().Single(); - - return LinqExpression.Lambda, TSearchValue>>( - LinqExpression.New( - constructorInfo, - constructorInfo.GetParameters().Select(p => LinqExpression.Convert( - LinqExpression.Call(parameter, nextValueMethod), - p.ParameterType))), - parameter).Compile(); - } - - /// - /// Helper class to make the generated code in - /// a little simpler. - /// - /// The element type - private struct EnumeratorWrapper - { - private readonly IEnumerator _enumerator; - - public EnumeratorWrapper(IEnumerator enumerator) - { - _enumerator = enumerator; - } - - public T NextValue() - { - _enumerator.MoveNext(); - return _enumerator.Current; - } - } - } -} diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkDateTimeSearchParameterV1RowGenerator.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkDateTimeSearchParameterV1RowGenerator.cs deleted file mode 100644 index c1c2d42d59..0000000000 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkDateTimeSearchParameterV1RowGenerator.cs +++ /dev/null @@ -1,49 +0,0 @@ -// ------------------------------------------------------------------------------------------------- -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. -// ------------------------------------------------------------------------------------------------- - -using System; -using Microsoft.Health.Fhir.Core.Features.Search; -using Microsoft.Health.Fhir.Core.Features.Search.SearchValues; -using Microsoft.Health.Fhir.SqlServer.Features.Schema.Model; - -namespace Microsoft.Health.Fhir.SqlServer.Features.Storage.TvpRowGeneration -{ - internal class BulkDateTimeSearchParameterV1RowGenerator : BulkSearchParameterRowGenerator - { - private short _lastUpdatedSearchParamId; - - public BulkDateTimeSearchParameterV1RowGenerator(SqlServerFhirModel model, SearchParameterToSearchValueTypeMap searchParameterTypeMap) - : base(model, searchParameterTypeMap) - { - } - - internal override bool TryGenerateRow(int offset, short searchParamId, DateTimeSearchValue searchValue, out BulkDateTimeSearchParamTableTypeV1Row row) - { - // For composite generator contains BulkDateTimeSearchParameterV1RowGenerator, it is possible to call TryGenerateRow before GenerateRow on this Generator. - EnsureInitialized(); - - if (searchParamId == _lastUpdatedSearchParamId) - { - // this value is already stored on the Resource table. - row = default; - return false; - } - - row = new BulkDateTimeSearchParamTableTypeV1Row( - offset, - searchParamId, - searchValue.Start, - searchValue.End, - Math.Abs((searchValue.End - searchValue.Start).Ticks) > TimeSpan.TicksPerDay); - - return true; - } - - protected override void Initialize() - { - _lastUpdatedSearchParamId = Model.GetSearchParamId(SearchParameterNames.LastUpdatedUri); - } - } -} diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkDateTimeSearchParameterV2RowGenerator.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkDateTimeSearchParameterV2RowGenerator.cs deleted file mode 100644 index 72165e27b6..0000000000 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkDateTimeSearchParameterV2RowGenerator.cs +++ /dev/null @@ -1,48 +0,0 @@ -// ------------------------------------------------------------------------------------------------- -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. -// ------------------------------------------------------------------------------------------------- - -using System; -using Microsoft.Health.Fhir.Core.Features.Search; -using Microsoft.Health.Fhir.Core.Features.Search.SearchValues; -using Microsoft.Health.Fhir.SqlServer.Features.Schema.Model; - -namespace Microsoft.Health.Fhir.SqlServer.Features.Storage.TvpRowGeneration -{ - internal class BulkDateTimeSearchParameterV2RowGenerator : BulkSearchParameterRowGenerator - { - private short _lastUpdatedSearchParamId; - - public BulkDateTimeSearchParameterV2RowGenerator(SqlServerFhirModel model, SearchParameterToSearchValueTypeMap searchParameterTypeMap) - : base(model, searchParameterTypeMap) - { - } - - internal override bool TryGenerateRow(int offset, short searchParamId, DateTimeSearchValue searchValue, out BulkDateTimeSearchParamTableTypeV2Row row) - { - if (searchParamId == _lastUpdatedSearchParamId) - { - // this value is already stored on the Resource table. - row = default; - return false; - } - - row = new BulkDateTimeSearchParamTableTypeV2Row( - offset, - searchParamId, - searchValue.Start, - searchValue.End, - Math.Abs((searchValue.End - searchValue.Start).Ticks) > TimeSpan.TicksPerDay, - searchValue.IsMin, - searchValue.IsMax); - - return true; - } - - protected override void Initialize() - { - _lastUpdatedSearchParamId = Model.GetSearchParamId(SearchParameterNames.LastUpdatedUri); - } - } -} diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkNumberSearchParameterV1RowGenerator.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkNumberSearchParameterV1RowGenerator.cs deleted file mode 100644 index 52c40a4209..0000000000 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkNumberSearchParameterV1RowGenerator.cs +++ /dev/null @@ -1,32 +0,0 @@ -// ------------------------------------------------------------------------------------------------- -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. -// ------------------------------------------------------------------------------------------------- - -using Microsoft.Health.Fhir.Core.Features.Search.SearchValues; -using Microsoft.Health.Fhir.SqlServer.Features.Schema.Model; - -namespace Microsoft.Health.Fhir.SqlServer.Features.Storage.TvpRowGeneration -{ - internal class BulkNumberSearchParameterV1RowGenerator : BulkSearchParameterRowGenerator - { - public BulkNumberSearchParameterV1RowGenerator(SqlServerFhirModel model, SearchParameterToSearchValueTypeMap searchParameterTypeMap) - : base(model, searchParameterTypeMap) - { - } - - internal override bool TryGenerateRow(int offset, short searchParamId, NumberSearchValue searchValue, out BulkNumberSearchParamTableTypeV1Row row) - { - var singleValue = searchValue.Low == searchValue.High ? searchValue.Low : null; - - row = new BulkNumberSearchParamTableTypeV1Row( - offset, - searchParamId, - singleValue.HasValue ? singleValue : null, - singleValue.HasValue ? singleValue : searchValue.Low ?? (decimal?)VLatest.NumberSearchParam.LowValue.MinValue, - singleValue.HasValue ? singleValue : searchValue.High ?? (decimal?)VLatest.NumberSearchParam.HighValue.MaxValue); - - return true; - } - } -} diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkQuantitySearchParameterV1RowGenerator.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkQuantitySearchParameterV1RowGenerator.cs deleted file mode 100644 index 61f113b437..0000000000 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkQuantitySearchParameterV1RowGenerator.cs +++ /dev/null @@ -1,34 +0,0 @@ -// ------------------------------------------------------------------------------------------------- -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. -// ------------------------------------------------------------------------------------------------- - -using Microsoft.Health.Fhir.Core.Features.Search.SearchValues; -using Microsoft.Health.Fhir.SqlServer.Features.Schema.Model; - -namespace Microsoft.Health.Fhir.SqlServer.Features.Storage.TvpRowGeneration -{ - internal class BulkQuantitySearchParameterV1RowGenerator : BulkSearchParameterRowGenerator - { - public BulkQuantitySearchParameterV1RowGenerator(SqlServerFhirModel model, SearchParameterToSearchValueTypeMap searchParameterTypeMap) - : base(model, searchParameterTypeMap) - { - } - - internal override bool TryGenerateRow(int offset, short searchParamId, QuantitySearchValue searchValue, out BulkQuantitySearchParamTableTypeV1Row row) - { - var singleValue = searchValue.Low == searchValue.High ? searchValue.Low : null; - - row = new BulkQuantitySearchParamTableTypeV1Row( - offset, - searchParamId, - string.IsNullOrWhiteSpace(searchValue.System) ? default(int?) : Model.GetSystemId(searchValue.System), - string.IsNullOrWhiteSpace(searchValue.Code) ? default(int?) : Model.GetQuantityCodeId(searchValue.Code), - singleValue.HasValue ? singleValue : null, - singleValue.HasValue ? singleValue : searchValue.Low ?? (decimal?)VLatest.QuantitySearchParam.LowValue.MinValue, - singleValue.HasValue ? singleValue : searchValue.High ?? (decimal?)VLatest.QuantitySearchParam.HighValue.MaxValue); - - return true; - } - } -} diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkReferenceSearchParameterV1RowGenerator.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkReferenceSearchParameterV1RowGenerator.cs deleted file mode 100644 index 3c0ebf249f..0000000000 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkReferenceSearchParameterV1RowGenerator.cs +++ /dev/null @@ -1,31 +0,0 @@ -// ------------------------------------------------------------------------------------------------- -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. -// ------------------------------------------------------------------------------------------------- - -using Microsoft.Health.Fhir.Core.Features.Search.SearchValues; -using Microsoft.Health.Fhir.SqlServer.Features.Schema.Model; - -namespace Microsoft.Health.Fhir.SqlServer.Features.Storage.TvpRowGeneration -{ - internal class BulkReferenceSearchParameterV1RowGenerator : BulkSearchParameterRowGenerator - { - public BulkReferenceSearchParameterV1RowGenerator(SqlServerFhirModel model, SearchParameterToSearchValueTypeMap searchParameterTypeMap) - : base(model, searchParameterTypeMap) - { - } - - internal override bool TryGenerateRow(int offset, short searchParamId, ReferenceSearchValue searchValue, out BulkReferenceSearchParamTableTypeV1Row row) - { - row = new BulkReferenceSearchParamTableTypeV1Row( - offset, - searchParamId, - searchValue.BaseUri?.ToString(), - searchValue.ResourceType == null ? null : Model.GetResourceTypeId(searchValue.ResourceType), - searchValue.ResourceId, - ReferenceResourceVersion: null); - - return true; - } - } -} diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkReferenceTokenCompositeSearchParameterV1RowGenerator.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkReferenceTokenCompositeSearchParameterV1RowGenerator.cs deleted file mode 100644 index fe5ce3e560..0000000000 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkReferenceTokenCompositeSearchParameterV1RowGenerator.cs +++ /dev/null @@ -1,49 +0,0 @@ -// ------------------------------------------------------------------------------------------------- -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. -// ------------------------------------------------------------------------------------------------- - -using Microsoft.Health.Fhir.Core.Features.Search.SearchValues; -using Microsoft.Health.Fhir.SqlServer.Features.Schema.Model; - -namespace Microsoft.Health.Fhir.SqlServer.Features.Storage.TvpRowGeneration -{ - internal class BulkReferenceTokenCompositeSearchParameterV1RowGenerator : BulkCompositeSearchParameterRowGenerator<(ReferenceSearchValue component1, TokenSearchValue component2), BulkReferenceTokenCompositeSearchParamTableTypeV1Row> - { - private readonly BulkReferenceSearchParameterV1RowGenerator _referenceRowGenerator; - private readonly BulkTokenSearchParameterV1RowGenerator _tokenRowGenerator; - - public BulkReferenceTokenCompositeSearchParameterV1RowGenerator( - SqlServerFhirModel model, - BulkReferenceSearchParameterV1RowGenerator referenceRowGenerator, - BulkTokenSearchParameterV1RowGenerator tokenRowGenerator, - SearchParameterToSearchValueTypeMap searchParameterTypeMap) - : base(model, searchParameterTypeMap) - { - _referenceRowGenerator = referenceRowGenerator; - _tokenRowGenerator = tokenRowGenerator; - } - - internal override bool TryGenerateRow(int offset, short searchParamId, (ReferenceSearchValue component1, TokenSearchValue component2) searchValue, out BulkReferenceTokenCompositeSearchParamTableTypeV1Row row) - { - if (_referenceRowGenerator.TryGenerateRow(offset, searchParamId, searchValue.component1, out var reference1Row) && - _tokenRowGenerator.TryGenerateRow(offset, searchParamId, searchValue.component2, out var token2Row)) - { - row = new BulkReferenceTokenCompositeSearchParamTableTypeV1Row( - offset, - searchParamId, - reference1Row.BaseUri, - reference1Row.ReferenceResourceTypeId, - reference1Row.ReferenceResourceId, - reference1Row.ReferenceResourceVersion, - token2Row.SystemId, - token2Row.Code); - - return true; - } - - row = default; - return false; - } - } -} diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkReferenceTokenCompositeSearchParameterV2RowGenerator.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkReferenceTokenCompositeSearchParameterV2RowGenerator.cs deleted file mode 100644 index 51892ef273..0000000000 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkReferenceTokenCompositeSearchParameterV2RowGenerator.cs +++ /dev/null @@ -1,50 +0,0 @@ -// ------------------------------------------------------------------------------------------------- -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. -// ------------------------------------------------------------------------------------------------- - -using Microsoft.Health.Fhir.Core.Features.Search.SearchValues; -using Microsoft.Health.Fhir.SqlServer.Features.Schema.Model; - -namespace Microsoft.Health.Fhir.SqlServer.Features.Storage.TvpRowGeneration -{ - internal class BulkReferenceTokenCompositeSearchParameterV2RowGenerator : BulkCompositeSearchParameterRowGenerator<(ReferenceSearchValue component1, TokenSearchValue component2), BulkReferenceTokenCompositeSearchParamTableTypeV2Row> - { - private readonly BulkReferenceSearchParameterV1RowGenerator _referenceRowGenerator; - private readonly BulkTokenSearchParameterV2RowGenerator _tokenRowGenerator; - - public BulkReferenceTokenCompositeSearchParameterV2RowGenerator( - SqlServerFhirModel model, - BulkReferenceSearchParameterV1RowGenerator referenceRowGenerator, - BulkTokenSearchParameterV2RowGenerator tokenRowGenerator, - SearchParameterToSearchValueTypeMap searchParameterTypeMap) - : base(model, searchParameterTypeMap) - { - _referenceRowGenerator = referenceRowGenerator; - _tokenRowGenerator = tokenRowGenerator; - } - - internal override bool TryGenerateRow(int offset, short searchParamId, (ReferenceSearchValue component1, TokenSearchValue component2) searchValue, out BulkReferenceTokenCompositeSearchParamTableTypeV2Row row) - { - if (_referenceRowGenerator.TryGenerateRow(offset, searchParamId, searchValue.component1, out var reference1Row) && - _tokenRowGenerator.TryGenerateRow(offset, searchParamId, searchValue.component2, out var token2Row)) - { - row = new BulkReferenceTokenCompositeSearchParamTableTypeV2Row( - offset, - searchParamId, - reference1Row.BaseUri, - reference1Row.ReferenceResourceTypeId, - reference1Row.ReferenceResourceId, - reference1Row.ReferenceResourceVersion, - token2Row.SystemId, - token2Row.Code, - token2Row.CodeOverflow); - - return true; - } - - row = default; - return false; - } - } -} diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkReindexResourceV1RowGenerator.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkReindexResourceV1RowGenerator.cs deleted file mode 100644 index 0e8818acc1..0000000000 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkReindexResourceV1RowGenerator.cs +++ /dev/null @@ -1,48 +0,0 @@ -// ------------------------------------------------------------------------------------------------- -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. -// ------------------------------------------------------------------------------------------------- - -using System.Collections.Generic; -using EnsureThat; -using Microsoft.Health.Fhir.Core.Features.Persistence; -using Microsoft.Health.Fhir.SqlServer.Features.Schema.Model; -using Microsoft.Health.SqlServer.Features.Schema.Model; - -namespace Microsoft.Health.Fhir.SqlServer.Features.Storage.TvpRowGeneration -{ - internal class BulkReindexResourceV1RowGenerator : ITableValuedParameterRowGenerator, BulkReindexResourceTableTypeV1Row> - { - private readonly ISqlServerFhirModel _model; - - public BulkReindexResourceV1RowGenerator(ISqlServerFhirModel model) - { - EnsureArg.IsNotNull(model, nameof(model)); - _model = model; - } - - public IEnumerable GenerateRows(IReadOnlyList input) - { - for (var index = 0; index < input.Count; index++) - { - ResourceWrapper resource = input[index]; - var resourceTypeId = _model.GetResourceTypeId(resource.ResourceTypeName); - var resourceId = resource.ResourceId; - - int etag = 0; - if (resource.Version != null && !int.TryParse(resource.Version, out etag)) - { - // Set the etag to a sentinel value to enable expected failure paths when updating with both existing and nonexistent resources. - etag = -1; - } - - yield return new BulkReindexResourceTableTypeV1Row( - index, - resourceTypeId, - resourceId, - resource.Version == null ? null : etag, - resource.SearchParameterHash); - } - } - } -} diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkResourceWriteClaimV1RowGenerator.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkResourceWriteClaimV1RowGenerator.cs deleted file mode 100644 index 32cf1b0b46..0000000000 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkResourceWriteClaimV1RowGenerator.cs +++ /dev/null @@ -1,53 +0,0 @@ -// ------------------------------------------------------------------------------------------------- -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. -// ------------------------------------------------------------------------------------------------- - -using System.Collections.Generic; -using System.Linq; -using EnsureThat; -using Microsoft.Health.Fhir.Core.Features.Persistence; -using Microsoft.Health.Fhir.SqlServer.Features.Schema.Model; -using Microsoft.Health.SqlServer.Features.Schema.Model; - -namespace Microsoft.Health.Fhir.SqlServer.Features.Storage.TvpRowGeneration -{ - internal class BulkResourceWriteClaimV1RowGenerator : ITableValuedParameterRowGenerator, BulkResourceWriteClaimTableTypeV1Row> - { - private readonly ISqlServerFhirModel _model; - private readonly SearchParameterToSearchValueTypeMap _searchParameterTypeMap; - - public BulkResourceWriteClaimV1RowGenerator(ISqlServerFhirModel model, SearchParameterToSearchValueTypeMap searchParameterTypeMap) - { - EnsureArg.IsNotNull(model, nameof(model)); - EnsureArg.IsNotNull(searchParameterTypeMap, nameof(searchParameterTypeMap)); - - _model = model; - _searchParameterTypeMap = searchParameterTypeMap; - } - - public IEnumerable GenerateRows(IReadOnlyList resources) - { - for (var index = 0; index < resources.Count; index++) - { - ResourceWrapper resource = resources[index]; - - var resourceMetadata = new ResourceMetadata( - resource.CompartmentIndices, - resource.SearchIndices?.ToLookup(e => _searchParameterTypeMap.GetSearchValueType(e)), - resource.LastModifiedClaims); - - IReadOnlyCollection> writeClaims = resourceMetadata.WriteClaims; - if (writeClaims == null) - { - yield break; - } - - foreach (var claim in writeClaims) - { - yield return new BulkResourceWriteClaimTableTypeV1Row(index, _model.GetClaimTypeId(claim.Key), claim.Value); - } - } - } - } -} diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkSearchParameterRowGenerator.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkSearchParameterRowGenerator.cs deleted file mode 100644 index 0c6f5d0223..0000000000 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkSearchParameterRowGenerator.cs +++ /dev/null @@ -1,95 +0,0 @@ -// ------------------------------------------------------------------------------------------------- -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. -// ------------------------------------------------------------------------------------------------- - -using System.Collections.Generic; -using System.Linq; -using System.Reflection; -using System.Threading; -using EnsureThat; -using Microsoft.Health.Fhir.Core.Features.Persistence; -using Microsoft.Health.Fhir.Core.Features.Search; -using Microsoft.Health.SqlServer.Features.Schema.Model; - -namespace Microsoft.Health.Fhir.SqlServer.Features.Storage.TvpRowGeneration -{ - internal abstract class BulkSearchParameterRowGenerator : ITableValuedParameterRowGenerator, TRow> - where TRow : struct - { - private readonly SearchParameterToSearchValueTypeMap _searchParameterTypeMap; - private readonly bool _isConvertSearchValueOverridden; - private bool _isInitialized; - - protected BulkSearchParameterRowGenerator(SqlServerFhirModel model, SearchParameterToSearchValueTypeMap searchParameterTypeMap) - { - EnsureArg.IsNotNull(model, nameof(model)); - EnsureArg.IsNotNull(searchParameterTypeMap, nameof(searchParameterTypeMap)); - - Model = model; - _searchParameterTypeMap = searchParameterTypeMap; - _isConvertSearchValueOverridden = GetType().GetMethod(nameof(ConvertSearchValue), BindingFlags.Instance | BindingFlags.NonPublic).DeclaringType != typeof(SearchParameterRowGenerator); - } - - protected SqlServerFhirModel Model { get; } - - public virtual IEnumerable GenerateRows(IReadOnlyList input) - { - EnsureInitialized(); - - for (var index = 0; index < input.Count; index++) - { - ResourceWrapper resource = input[index]; - - var resourceMetadata = new ResourceMetadata( - resource.CompartmentIndices, - resource.SearchIndices?.ToLookup(e => _searchParameterTypeMap.GetSearchValueType(e)), - resource.LastModifiedClaims); - - foreach (SearchIndexEntry v in resourceMetadata.GetSearchIndexEntriesByType(typeof(TSearchValue))) - { - short searchParamId = Model.GetSearchParamId(v.SearchParameter.Url); - - if (!_isConvertSearchValueOverridden) - { - // save an array allocation - if (TryGenerateRow(index, searchParamId, (TSearchValue)v.Value, out TRow row)) - { - yield return row; - } - } - else - { - foreach (var searchValue in ConvertSearchValue(v)) - { - if (TryGenerateRow(index, searchParamId, searchValue, out TRow row)) - { - yield return row; - } - } - } - } - } - } - - protected void EnsureInitialized() - { - if (Volatile.Read(ref _isInitialized)) - { - return; - } - - Initialize(); - - Volatile.Write(ref _isInitialized, true); - } - - protected virtual IEnumerable ConvertSearchValue(SearchIndexEntry entry) => new[] { (TSearchValue)entry.Value }; - - protected virtual void Initialize() - { - } - - internal abstract bool TryGenerateRow(int offset, short searchParamId, TSearchValue searchValue, out TRow row); - } -} diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkStringSearchParameterV1RowGenerator.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkStringSearchParameterV1RowGenerator.cs deleted file mode 100644 index 1003d058bb..0000000000 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkStringSearchParameterV1RowGenerator.cs +++ /dev/null @@ -1,40 +0,0 @@ -// ------------------------------------------------------------------------------------------------- -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. -// ------------------------------------------------------------------------------------------------- - -using Microsoft.Health.Fhir.Core.Features.Search.SearchValues; -using Microsoft.Health.Fhir.SqlServer.Features.Schema.Model; - -namespace Microsoft.Health.Fhir.SqlServer.Features.Storage.TvpRowGeneration -{ - internal class BulkStringSearchParameterV1RowGenerator : BulkSearchParameterRowGenerator - { - private readonly int _indexedTextMaxLength = (int)VLatest.StringSearchParam.Text.Metadata.MaxLength; - - public BulkStringSearchParameterV1RowGenerator(SqlServerFhirModel model, SearchParameterToSearchValueTypeMap searchParameterTypeMap) - : base(model, searchParameterTypeMap) - { - } - - internal override bool TryGenerateRow(int offset, short searchParamId, StringSearchValue searchValue, out BulkStringSearchParamTableTypeV1Row row) - { - string indexedPrefix; - string overflow; - if (searchValue.String.Length > _indexedTextMaxLength) - { - // TODO: this truncation can break apart grapheme clusters. - indexedPrefix = searchValue.String.Substring(0, _indexedTextMaxLength); - overflow = searchValue.String; - } - else - { - indexedPrefix = searchValue.String; - overflow = null; - } - - row = new BulkStringSearchParamTableTypeV1Row(offset, searchParamId, indexedPrefix, overflow); - return true; - } - } -} diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkStringSearchParameterV2RowGenerator.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkStringSearchParameterV2RowGenerator.cs deleted file mode 100644 index 368cf15d89..0000000000 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkStringSearchParameterV2RowGenerator.cs +++ /dev/null @@ -1,40 +0,0 @@ -// ------------------------------------------------------------------------------------------------- -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. -// ------------------------------------------------------------------------------------------------- - -using Microsoft.Health.Fhir.Core.Features.Search.SearchValues; -using Microsoft.Health.Fhir.SqlServer.Features.Schema.Model; - -namespace Microsoft.Health.Fhir.SqlServer.Features.Storage.TvpRowGeneration -{ - internal class BulkStringSearchParameterV2RowGenerator : BulkSearchParameterRowGenerator - { - private readonly int _indexedTextMaxLength = (int)VLatest.StringSearchParam.Text.Metadata.MaxLength; - - public BulkStringSearchParameterV2RowGenerator(SqlServerFhirModel model, SearchParameterToSearchValueTypeMap searchParameterTypeMap) - : base(model, searchParameterTypeMap) - { - } - - internal override bool TryGenerateRow(int offset, short searchParamId, StringSearchValue searchValue, out BulkStringSearchParamTableTypeV2Row row) - { - string indexedPrefix; - string overflow; - if (searchValue.String.Length > _indexedTextMaxLength) - { - // TODO: this truncation can break apart grapheme clusters. - indexedPrefix = searchValue.String.Substring(0, _indexedTextMaxLength); - overflow = searchValue.String; - } - else - { - indexedPrefix = searchValue.String; - overflow = null; - } - - row = new BulkStringSearchParamTableTypeV2Row(offset, searchParamId, indexedPrefix, overflow, IsMin: searchValue.IsMin, IsMax: searchValue.IsMax); - return true; - } - } -} diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkTokenDateTimeCompositeSearchParameterV1RowGenerator.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkTokenDateTimeCompositeSearchParameterV1RowGenerator.cs deleted file mode 100644 index 8b7b579b2d..0000000000 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkTokenDateTimeCompositeSearchParameterV1RowGenerator.cs +++ /dev/null @@ -1,52 +0,0 @@ -// ------------------------------------------------------------------------------------------------- -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. -// ------------------------------------------------------------------------------------------------- - -using Microsoft.Health.Fhir.Core.Features.Search.SearchValues; -using Microsoft.Health.Fhir.SqlServer.Features.Schema.Model; - -namespace Microsoft.Health.Fhir.SqlServer.Features.Storage.TvpRowGeneration -{ - internal class BulkTokenDateTimeCompositeSearchParameterV1RowGenerator : BulkCompositeSearchParameterRowGenerator<(TokenSearchValue component1, DateTimeSearchValue component2), BulkTokenDateTimeCompositeSearchParamTableTypeV1Row> - { - private readonly BulkTokenSearchParameterV1RowGenerator _tokenRowGenerator; - private readonly BulkDateTimeSearchParameterV1RowGenerator _dateTimeV1RowGenerator; - - public BulkTokenDateTimeCompositeSearchParameterV1RowGenerator( - SqlServerFhirModel model, - BulkTokenSearchParameterV1RowGenerator tokenRowGenerator, - BulkDateTimeSearchParameterV1RowGenerator dateTimeV1RowGenerator, - SearchParameterToSearchValueTypeMap searchParameterTypeMap) - : base(model, searchParameterTypeMap) - { - _tokenRowGenerator = tokenRowGenerator; - _dateTimeV1RowGenerator = dateTimeV1RowGenerator; - } - - internal override bool TryGenerateRow( - int offset, - short searchParamId, - (TokenSearchValue component1, DateTimeSearchValue component2) searchValue, - out BulkTokenDateTimeCompositeSearchParamTableTypeV1Row row) - { - if (_tokenRowGenerator.TryGenerateRow(offset, default, searchValue.component1, out var token1Row) && - _dateTimeV1RowGenerator.TryGenerateRow(offset, default, searchValue.component2, out var token2Row)) - { - row = new BulkTokenDateTimeCompositeSearchParamTableTypeV1Row( - offset, - searchParamId, - token1Row.SystemId, - token1Row.Code, - token2Row.StartDateTime, - token2Row.EndDateTime, - token2Row.IsLongerThanADay); - - return true; - } - - row = default; - return false; - } - } -} diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkTokenDateTimeCompositeSearchParameterV2RowGenerator.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkTokenDateTimeCompositeSearchParameterV2RowGenerator.cs deleted file mode 100644 index 32f92681cf..0000000000 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkTokenDateTimeCompositeSearchParameterV2RowGenerator.cs +++ /dev/null @@ -1,53 +0,0 @@ -// ------------------------------------------------------------------------------------------------- -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. -// ------------------------------------------------------------------------------------------------- - -using Microsoft.Health.Fhir.Core.Features.Search.SearchValues; -using Microsoft.Health.Fhir.SqlServer.Features.Schema.Model; - -namespace Microsoft.Health.Fhir.SqlServer.Features.Storage.TvpRowGeneration -{ - internal class BulkTokenDateTimeCompositeSearchParameterV2RowGenerator : BulkCompositeSearchParameterRowGenerator<(TokenSearchValue component1, DateTimeSearchValue component2), BulkTokenDateTimeCompositeSearchParamTableTypeV2Row> - { - private readonly BulkTokenSearchParameterV2RowGenerator _tokenRowGenerator; - private readonly BulkDateTimeSearchParameterV1RowGenerator _dateTimeV1RowGenerator; - - public BulkTokenDateTimeCompositeSearchParameterV2RowGenerator( - SqlServerFhirModel model, - BulkTokenSearchParameterV2RowGenerator tokenRowGenerator, - BulkDateTimeSearchParameterV1RowGenerator dateTimeV1RowGenerator, - SearchParameterToSearchValueTypeMap searchParameterTypeMap) - : base(model, searchParameterTypeMap) - { - _tokenRowGenerator = tokenRowGenerator; - _dateTimeV1RowGenerator = dateTimeV1RowGenerator; - } - - internal override bool TryGenerateRow( - int offset, - short searchParamId, - (TokenSearchValue component1, DateTimeSearchValue component2) searchValue, - out BulkTokenDateTimeCompositeSearchParamTableTypeV2Row row) - { - if (_tokenRowGenerator.TryGenerateRow(offset, default, searchValue.component1, out var token1Row) && - _dateTimeV1RowGenerator.TryGenerateRow(offset, default, searchValue.component2, out var token2Row)) - { - row = new BulkTokenDateTimeCompositeSearchParamTableTypeV2Row( - offset, - searchParamId, - token1Row.SystemId, - token1Row.Code, - token1Row.CodeOverflow, - token2Row.StartDateTime, - token2Row.EndDateTime, - token2Row.IsLongerThanADay); - - return true; - } - - row = default; - return false; - } - } -} diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkTokenNumberNumberCompositeSearchParameterV1RowGenerator.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkTokenNumberNumberCompositeSearchParameterV1RowGenerator.cs deleted file mode 100644 index f83933b42e..0000000000 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkTokenNumberNumberCompositeSearchParameterV1RowGenerator.cs +++ /dev/null @@ -1,54 +0,0 @@ -// ------------------------------------------------------------------------------------------------- -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. -// ------------------------------------------------------------------------------------------------- - -using Microsoft.Health.Fhir.Core.Features.Search.SearchValues; -using Microsoft.Health.Fhir.SqlServer.Features.Schema.Model; - -namespace Microsoft.Health.Fhir.SqlServer.Features.Storage.TvpRowGeneration -{ - internal class BulkTokenNumberNumberCompositeSearchParameterV1RowGenerator : BulkCompositeSearchParameterRowGenerator<(TokenSearchValue component1, NumberSearchValue component2, NumberSearchValue component3), BulkTokenNumberNumberCompositeSearchParamTableTypeV1Row> - { - private readonly BulkTokenSearchParameterV1RowGenerator _tokenRowGenerator; - private readonly BulkNumberSearchParameterV1RowGenerator _numberV1RowGenerator; - - public BulkTokenNumberNumberCompositeSearchParameterV1RowGenerator(SqlServerFhirModel model, BulkTokenSearchParameterV1RowGenerator tokenRowGenerator, BulkNumberSearchParameterV1RowGenerator numberV1RowGenerator, SearchParameterToSearchValueTypeMap searchParameterTypeMap) - : base(model, searchParameterTypeMap) - { - _tokenRowGenerator = tokenRowGenerator; - _numberV1RowGenerator = numberV1RowGenerator; - } - - internal override bool TryGenerateRow( - int offset, - short searchParamId, - (TokenSearchValue component1, NumberSearchValue component2, NumberSearchValue component3) searchValue, - out BulkTokenNumberNumberCompositeSearchParamTableTypeV1Row row) - { - if (_tokenRowGenerator.TryGenerateRow(default, default, searchValue.component1, out var token1Row) && - _numberV1RowGenerator.TryGenerateRow(default, default, searchValue.component2, out var token2Row) && - _numberV1RowGenerator.TryGenerateRow(default, default, searchValue.component3, out var token3Row)) - { - bool hasRange = token2Row.SingleValue == null || token3Row.SingleValue == null; - row = new BulkTokenNumberNumberCompositeSearchParamTableTypeV1Row( - offset, - searchParamId, - token1Row.SystemId, - token1Row.Code, - hasRange ? null : token2Row.SingleValue, - token2Row.LowValue ?? token2Row.SingleValue, - token2Row.HighValue ?? token2Row.SingleValue, - hasRange ? null : token3Row.SingleValue, - token3Row.LowValue ?? token3Row.SingleValue, - token3Row.HighValue ?? token3Row.SingleValue, - HasRange: hasRange); - - return true; - } - - row = default; - return false; - } - } -} diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkTokenNumberNumberCompositeSearchParameterV2RowGenerator.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkTokenNumberNumberCompositeSearchParameterV2RowGenerator.cs deleted file mode 100644 index 37c78e1b24..0000000000 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkTokenNumberNumberCompositeSearchParameterV2RowGenerator.cs +++ /dev/null @@ -1,55 +0,0 @@ -// ------------------------------------------------------------------------------------------------- -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. -// ------------------------------------------------------------------------------------------------- - -using Microsoft.Health.Fhir.Core.Features.Search.SearchValues; -using Microsoft.Health.Fhir.SqlServer.Features.Schema.Model; - -namespace Microsoft.Health.Fhir.SqlServer.Features.Storage.TvpRowGeneration -{ - internal class BulkTokenNumberNumberCompositeSearchParameterV2RowGenerator : BulkCompositeSearchParameterRowGenerator<(TokenSearchValue component1, NumberSearchValue component2, NumberSearchValue component3), BulkTokenNumberNumberCompositeSearchParamTableTypeV2Row> - { - private readonly BulkTokenSearchParameterV2RowGenerator _tokenRowGenerator; - private readonly BulkNumberSearchParameterV1RowGenerator _numberV1RowGenerator; - - public BulkTokenNumberNumberCompositeSearchParameterV2RowGenerator(SqlServerFhirModel model, BulkTokenSearchParameterV2RowGenerator tokenRowGenerator, BulkNumberSearchParameterV1RowGenerator numberV1RowGenerator, SearchParameterToSearchValueTypeMap searchParameterTypeMap) - : base(model, searchParameterTypeMap) - { - _tokenRowGenerator = tokenRowGenerator; - _numberV1RowGenerator = numberV1RowGenerator; - } - - internal override bool TryGenerateRow( - int offset, - short searchParamId, - (TokenSearchValue component1, NumberSearchValue component2, NumberSearchValue component3) searchValue, - out BulkTokenNumberNumberCompositeSearchParamTableTypeV2Row row) - { - if (_tokenRowGenerator.TryGenerateRow(default, default, searchValue.component1, out var token1Row) && - _numberV1RowGenerator.TryGenerateRow(default, default, searchValue.component2, out var token2Row) && - _numberV1RowGenerator.TryGenerateRow(default, default, searchValue.component3, out var token3Row)) - { - bool hasRange = token2Row.SingleValue == null || token3Row.SingleValue == null; - row = new BulkTokenNumberNumberCompositeSearchParamTableTypeV2Row( - offset, - searchParamId, - token1Row.SystemId, - token1Row.Code, - token1Row.CodeOverflow, - hasRange ? null : token2Row.SingleValue, - token2Row.LowValue ?? token2Row.SingleValue, - token2Row.HighValue ?? token2Row.SingleValue, - hasRange ? null : token3Row.SingleValue, - token3Row.LowValue ?? token3Row.SingleValue, - token3Row.HighValue ?? token3Row.SingleValue, - HasRange: hasRange); - - return true; - } - - row = default; - return false; - } - } -} diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkTokenQuantityCompositeSearchParameterV1RowGenerator.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkTokenQuantityCompositeSearchParameterV1RowGenerator.cs deleted file mode 100644 index 144e8c6028..0000000000 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkTokenQuantityCompositeSearchParameterV1RowGenerator.cs +++ /dev/null @@ -1,54 +0,0 @@ -// ------------------------------------------------------------------------------------------------- -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. -// ------------------------------------------------------------------------------------------------- - -using Microsoft.Health.Fhir.Core.Features.Search.SearchValues; -using Microsoft.Health.Fhir.SqlServer.Features.Schema.Model; - -namespace Microsoft.Health.Fhir.SqlServer.Features.Storage.TvpRowGeneration -{ - internal class BulkTokenQuantityCompositeSearchParameterV1RowGenerator : BulkCompositeSearchParameterRowGenerator<(TokenSearchValue component1, QuantitySearchValue component2), BulkTokenQuantityCompositeSearchParamTableTypeV1Row> - { - private readonly BulkTokenSearchParameterV1RowGenerator _tokenRowGenerator; - private readonly BulkQuantitySearchParameterV1RowGenerator _quantityV1RowGenerator; - - public BulkTokenQuantityCompositeSearchParameterV1RowGenerator( - SqlServerFhirModel model, - BulkTokenSearchParameterV1RowGenerator tokenRowGenerator, - BulkQuantitySearchParameterV1RowGenerator quantityV1RowGenerator, - SearchParameterToSearchValueTypeMap searchParameterTypeMap) - : base(model, searchParameterTypeMap) - { - _tokenRowGenerator = tokenRowGenerator; - _quantityV1RowGenerator = quantityV1RowGenerator; - } - - internal override bool TryGenerateRow( - int offset, - short searchParamId, - (TokenSearchValue component1, QuantitySearchValue component2) searchValue, - out BulkTokenQuantityCompositeSearchParamTableTypeV1Row row) - { - if (_tokenRowGenerator.TryGenerateRow(default, default, searchValue.component1, out var token1Row) && - _quantityV1RowGenerator.TryGenerateRow(default, default, searchValue.component2, out var token2Row)) - { - row = new BulkTokenQuantityCompositeSearchParamTableTypeV1Row( - offset, - searchParamId, - token1Row.SystemId, - token1Row.Code, - token2Row.SystemId, - token2Row.QuantityCodeId, - token2Row.SingleValue, - token2Row.LowValue, - token2Row.HighValue); - - return true; - } - - row = default; - return false; - } - } -} diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkTokenQuantityCompositeSearchParameterV2RowGenerator.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkTokenQuantityCompositeSearchParameterV2RowGenerator.cs deleted file mode 100644 index 534c367c86..0000000000 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkTokenQuantityCompositeSearchParameterV2RowGenerator.cs +++ /dev/null @@ -1,55 +0,0 @@ -// ------------------------------------------------------------------------------------------------- -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. -// ------------------------------------------------------------------------------------------------- - -using Microsoft.Health.Fhir.Core.Features.Search.SearchValues; -using Microsoft.Health.Fhir.SqlServer.Features.Schema.Model; - -namespace Microsoft.Health.Fhir.SqlServer.Features.Storage.TvpRowGeneration -{ - internal class BulkTokenQuantityCompositeSearchParameterV2RowGenerator : BulkCompositeSearchParameterRowGenerator<(TokenSearchValue component1, QuantitySearchValue component2), BulkTokenQuantityCompositeSearchParamTableTypeV2Row> - { - private readonly BulkTokenSearchParameterV2RowGenerator _tokenRowGenerator; - private readonly BulkQuantitySearchParameterV1RowGenerator _quantityV1RowGenerator; - - public BulkTokenQuantityCompositeSearchParameterV2RowGenerator( - SqlServerFhirModel model, - BulkTokenSearchParameterV2RowGenerator tokenRowGenerator, - BulkQuantitySearchParameterV1RowGenerator quantityV1RowGenerator, - SearchParameterToSearchValueTypeMap searchParameterTypeMap) - : base(model, searchParameterTypeMap) - { - _tokenRowGenerator = tokenRowGenerator; - _quantityV1RowGenerator = quantityV1RowGenerator; - } - - internal override bool TryGenerateRow( - int offset, - short searchParamId, - (TokenSearchValue component1, QuantitySearchValue component2) searchValue, - out BulkTokenQuantityCompositeSearchParamTableTypeV2Row row) - { - if (_tokenRowGenerator.TryGenerateRow(default, default, searchValue.component1, out var token1Row) && - _quantityV1RowGenerator.TryGenerateRow(default, default, searchValue.component2, out var token2Row)) - { - row = new BulkTokenQuantityCompositeSearchParamTableTypeV2Row( - offset, - searchParamId, - token1Row.SystemId, - token1Row.Code, - token1Row.CodeOverflow, - token2Row.SystemId, - token2Row.QuantityCodeId, - token2Row.SingleValue, - token2Row.LowValue, - token2Row.HighValue); - - return true; - } - - row = default; - return false; - } - } -} diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkTokenSearchParameterV1RowGenerator.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkTokenSearchParameterV1RowGenerator.cs deleted file mode 100644 index 4993d7ef6f..0000000000 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkTokenSearchParameterV1RowGenerator.cs +++ /dev/null @@ -1,45 +0,0 @@ -// ------------------------------------------------------------------------------------------------- -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. -// ------------------------------------------------------------------------------------------------- - -using Microsoft.Health.Fhir.Core.Features.Search; -using Microsoft.Health.Fhir.Core.Features.Search.SearchValues; -using Microsoft.Health.Fhir.SqlServer.Features.Schema.Model; - -namespace Microsoft.Health.Fhir.SqlServer.Features.Storage.TvpRowGeneration -{ - internal class BulkTokenSearchParameterV1RowGenerator : BulkSearchParameterRowGenerator - { - private short _resourceIdSearchParamId; - - public BulkTokenSearchParameterV1RowGenerator(SqlServerFhirModel model, SearchParameterToSearchValueTypeMap searchParameterTypeMap) - : base(model, searchParameterTypeMap) - { - } - - internal override bool TryGenerateRow(int offset, short searchParamId, TokenSearchValue searchValue, out BulkTokenSearchParamTableTypeV1Row row) - { - // For composite generator contains BulkTokenSearchParameterV1RowGenerator, it is possible to call TryGenerateRow before GenerateRow on this Generator. - EnsureInitialized(); - - // don't store if the code is empty or if this is the Resource _id parameter. The id is already maintained on the Resource table. - if (string.IsNullOrWhiteSpace(searchValue.Code) || - searchParamId == _resourceIdSearchParamId) - { - row = default; - return false; - } - - row = new BulkTokenSearchParamTableTypeV1Row( - offset, - searchParamId, - searchValue.System == null ? null : Model.GetSystemId(searchValue.System), - searchValue.Code); - - return true; - } - - protected override void Initialize() => _resourceIdSearchParamId = Model.GetSearchParamId(SearchParameterNames.IdUri); - } -} diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkTokenSearchParameterV2RowGenerator.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkTokenSearchParameterV2RowGenerator.cs deleted file mode 100644 index 36b68c22d9..0000000000 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkTokenSearchParameterV2RowGenerator.cs +++ /dev/null @@ -1,61 +0,0 @@ -// ------------------------------------------------------------------------------------------------- -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. -// ------------------------------------------------------------------------------------------------- - -using Microsoft.Health.Fhir.Core.Features.Search; -using Microsoft.Health.Fhir.Core.Features.Search.SearchValues; -using Microsoft.Health.Fhir.SqlServer.Features.Schema.Model; - -namespace Microsoft.Health.Fhir.SqlServer.Features.Storage.TvpRowGeneration -{ - internal class BulkTokenSearchParameterV2RowGenerator : BulkSearchParameterRowGenerator - { - private short _resourceIdSearchParamId; - private readonly int _indexedCodeMaxLength = (int)VLatest.TokenSearchParam.Code.Metadata.MaxLength; - - public BulkTokenSearchParameterV2RowGenerator(SqlServerFhirModel model, SearchParameterToSearchValueTypeMap searchParameterTypeMap) - : base(model, searchParameterTypeMap) - { - } - - internal override bool TryGenerateRow(int offset, short searchParamId, TokenSearchValue searchValue, out BulkTokenSearchParamTableTypeV2Row row) - { - // For composite generator contains BulkTokenSearchParameterV1RowGenerator, it is possible to call TryGenerateRow before GenerateRow on this Generator. - EnsureInitialized(); - - // don't store if the code is empty or if this is the Resource _id parameter. The id is already maintained on the Resource table. - if (string.IsNullOrWhiteSpace(searchValue.Code) || - searchParamId == _resourceIdSearchParamId) - { - row = default; - return false; - } - - string indexedPrefix; - string overflow; - if (searchValue.Code.Length > _indexedCodeMaxLength) - { - // TODO: this truncation can break apart grapheme clusters. - indexedPrefix = searchValue.Code.Substring(0, _indexedCodeMaxLength); - overflow = searchValue.Code.Substring(_indexedCodeMaxLength); - } - else - { - indexedPrefix = searchValue.Code; - overflow = null; - } - - row = new BulkTokenSearchParamTableTypeV2Row( - offset, - searchParamId, - searchValue.System == null ? null : Model.GetSystemId(searchValue.System), - indexedPrefix, - overflow); - - return true; - } - - protected override void Initialize() => _resourceIdSearchParamId = Model.GetSearchParamId(SearchParameterNames.IdUri); - } -} diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkTokenStringCompositeSearchParameterV1RowGenerator.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkTokenStringCompositeSearchParameterV1RowGenerator.cs deleted file mode 100644 index 9f9e12d1b5..0000000000 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkTokenStringCompositeSearchParameterV1RowGenerator.cs +++ /dev/null @@ -1,51 +0,0 @@ -// ------------------------------------------------------------------------------------------------- -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. -// ------------------------------------------------------------------------------------------------- - -using Microsoft.Health.Fhir.Core.Features.Search.SearchValues; -using Microsoft.Health.Fhir.SqlServer.Features.Schema.Model; - -namespace Microsoft.Health.Fhir.SqlServer.Features.Storage.TvpRowGeneration -{ - internal class BulkTokenStringCompositeSearchParameterV1RowGenerator : BulkCompositeSearchParameterRowGenerator<(TokenSearchValue component1, StringSearchValue component2), BulkTokenStringCompositeSearchParamTableTypeV1Row> - { - private readonly BulkTokenSearchParameterV1RowGenerator _tokenRowGenerator; - private readonly BulkStringSearchParameterV1RowGenerator _stringV1RowGenerator; - - public BulkTokenStringCompositeSearchParameterV1RowGenerator( - SqlServerFhirModel model, - BulkTokenSearchParameterV1RowGenerator tokenRowGenerator, - BulkStringSearchParameterV1RowGenerator stringV1RowGenerator, - SearchParameterToSearchValueTypeMap searchParameterTypeMap) - : base(model, searchParameterTypeMap) - { - _tokenRowGenerator = tokenRowGenerator; - _stringV1RowGenerator = stringV1RowGenerator; - } - - internal override bool TryGenerateRow( - int offset, - short searchParamId, - (TokenSearchValue component1, StringSearchValue component2) searchValue, - out BulkTokenStringCompositeSearchParamTableTypeV1Row row) - { - if (_tokenRowGenerator.TryGenerateRow(default, default, searchValue.component1, out var token1Row) && - _stringV1RowGenerator.TryGenerateRow(default, default, searchValue.component2, out var string2Row)) - { - row = new BulkTokenStringCompositeSearchParamTableTypeV1Row( - offset, - searchParamId, - token1Row.SystemId, - token1Row.Code, - string2Row.Text, - TextOverflow2: string2Row.TextOverflow); - - return true; - } - - row = default; - return false; - } - } -} diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkTokenStringCompositeSearchParameterV2RowGenerator.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkTokenStringCompositeSearchParameterV2RowGenerator.cs deleted file mode 100644 index 74df141f46..0000000000 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkTokenStringCompositeSearchParameterV2RowGenerator.cs +++ /dev/null @@ -1,52 +0,0 @@ -// ------------------------------------------------------------------------------------------------- -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. -// ------------------------------------------------------------------------------------------------- - -using Microsoft.Health.Fhir.Core.Features.Search.SearchValues; -using Microsoft.Health.Fhir.SqlServer.Features.Schema.Model; - -namespace Microsoft.Health.Fhir.SqlServer.Features.Storage.TvpRowGeneration -{ - internal class BulkTokenStringCompositeSearchParameterV2RowGenerator : BulkCompositeSearchParameterRowGenerator<(TokenSearchValue component1, StringSearchValue component2), BulkTokenStringCompositeSearchParamTableTypeV2Row> - { - private readonly BulkTokenSearchParameterV2RowGenerator _tokenRowGenerator; - private readonly BulkStringSearchParameterV1RowGenerator _stringV1RowGenerator; - - public BulkTokenStringCompositeSearchParameterV2RowGenerator( - SqlServerFhirModel model, - BulkTokenSearchParameterV2RowGenerator tokenRowGenerator, - BulkStringSearchParameterV1RowGenerator stringV1RowGenerator, - SearchParameterToSearchValueTypeMap searchParameterTypeMap) - : base(model, searchParameterTypeMap) - { - _tokenRowGenerator = tokenRowGenerator; - _stringV1RowGenerator = stringV1RowGenerator; - } - - internal override bool TryGenerateRow( - int offset, - short searchParamId, - (TokenSearchValue component1, StringSearchValue component2) searchValue, - out BulkTokenStringCompositeSearchParamTableTypeV2Row row) - { - if (_tokenRowGenerator.TryGenerateRow(default, default, searchValue.component1, out var token1Row) && - _stringV1RowGenerator.TryGenerateRow(default, default, searchValue.component2, out var string2Row)) - { - row = new BulkTokenStringCompositeSearchParamTableTypeV2Row( - offset, - searchParamId, - token1Row.SystemId, - token1Row.Code, - CodeOverflow1: token1Row.CodeOverflow, - string2Row.Text, - TextOverflow2: string2Row.TextOverflow); - - return true; - } - - row = default; - return false; - } - } -} diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkTokenTextSearchParameterV1RowGenerator.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkTokenTextSearchParameterV1RowGenerator.cs deleted file mode 100644 index 82a62024d2..0000000000 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkTokenTextSearchParameterV1RowGenerator.cs +++ /dev/null @@ -1,30 +0,0 @@ -// ------------------------------------------------------------------------------------------------- -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. -// ------------------------------------------------------------------------------------------------- - -using Microsoft.Health.Fhir.Core.Features.Search.SearchValues; -using Microsoft.Health.Fhir.SqlServer.Features.Schema.Model; - -namespace Microsoft.Health.Fhir.SqlServer.Features.Storage.TvpRowGeneration -{ - internal class BulkTokenTextSearchParameterV1RowGenerator : BulkSearchParameterRowGenerator - { - public BulkTokenTextSearchParameterV1RowGenerator(SqlServerFhirModel model, SearchParameterToSearchValueTypeMap searchParameterTypeMap) - : base(model, searchParameterTypeMap) - { - } - - internal override bool TryGenerateRow(int offset, short searchParamId, TokenSearchValue searchValue, out BulkTokenTextTableTypeV1Row row) - { - if (string.IsNullOrWhiteSpace(searchValue.Text)) - { - row = default; - return false; - } - - row = new BulkTokenTextTableTypeV1Row(offset, searchParamId, searchValue.Text); - return true; - } - } -} diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkTokenTokenCompositeSearchParameterV1RowGenerator.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkTokenTokenCompositeSearchParameterV1RowGenerator.cs deleted file mode 100644 index 358ed25b6e..0000000000 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkTokenTokenCompositeSearchParameterV1RowGenerator.cs +++ /dev/null @@ -1,41 +0,0 @@ -// ------------------------------------------------------------------------------------------------- -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. -// ------------------------------------------------------------------------------------------------- - -using Microsoft.Health.Fhir.Core.Features.Search.SearchValues; -using Microsoft.Health.Fhir.SqlServer.Features.Schema.Model; - -namespace Microsoft.Health.Fhir.SqlServer.Features.Storage.TvpRowGeneration -{ - internal class BulkTokenTokenCompositeSearchParameterV1RowGenerator : BulkCompositeSearchParameterRowGenerator<(TokenSearchValue component1, TokenSearchValue component2), BulkTokenTokenCompositeSearchParamTableTypeV1Row> - { - private readonly BulkTokenSearchParameterV1RowGenerator _tokenRowGenerator; - - public BulkTokenTokenCompositeSearchParameterV1RowGenerator(SqlServerFhirModel model, BulkTokenSearchParameterV1RowGenerator tokenRowGenerator, SearchParameterToSearchValueTypeMap searchParameterTypeMap) - : base(model, searchParameterTypeMap) - { - _tokenRowGenerator = tokenRowGenerator; - } - - internal override bool TryGenerateRow(int offset, short searchParamId, (TokenSearchValue component1, TokenSearchValue component2) searchValue, out BulkTokenTokenCompositeSearchParamTableTypeV1Row row) - { - if (_tokenRowGenerator.TryGenerateRow(default, default, searchValue.component1, out var token1Row) && - _tokenRowGenerator.TryGenerateRow(default, default, searchValue.component2, out var token2Row)) - { - row = new BulkTokenTokenCompositeSearchParamTableTypeV1Row( - offset, - searchParamId, - token1Row.SystemId, - token1Row.Code, - token2Row.SystemId, - token2Row.Code); - - return true; - } - - row = default; - return false; - } - } -} diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkTokenTokenCompositeSearchParameterV2RowGenerator.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkTokenTokenCompositeSearchParameterV2RowGenerator.cs deleted file mode 100644 index 30b69fe94c..0000000000 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkTokenTokenCompositeSearchParameterV2RowGenerator.cs +++ /dev/null @@ -1,43 +0,0 @@ -// ------------------------------------------------------------------------------------------------- -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. -// ------------------------------------------------------------------------------------------------- - -using Microsoft.Health.Fhir.Core.Features.Search.SearchValues; -using Microsoft.Health.Fhir.SqlServer.Features.Schema.Model; - -namespace Microsoft.Health.Fhir.SqlServer.Features.Storage.TvpRowGeneration -{ - internal class BulkTokenTokenCompositeSearchParameterV2RowGenerator : BulkCompositeSearchParameterRowGenerator<(TokenSearchValue component1, TokenSearchValue component2), BulkTokenTokenCompositeSearchParamTableTypeV2Row> - { - private readonly BulkTokenSearchParameterV2RowGenerator _tokenRowGenerator; - - public BulkTokenTokenCompositeSearchParameterV2RowGenerator(SqlServerFhirModel model, BulkTokenSearchParameterV2RowGenerator tokenRowGenerator, SearchParameterToSearchValueTypeMap searchParameterTypeMap) - : base(model, searchParameterTypeMap) - { - _tokenRowGenerator = tokenRowGenerator; - } - - internal override bool TryGenerateRow(int offset, short searchParamId, (TokenSearchValue component1, TokenSearchValue component2) searchValue, out BulkTokenTokenCompositeSearchParamTableTypeV2Row row) - { - if (_tokenRowGenerator.TryGenerateRow(default, default, searchValue.component1, out var token1Row) && - _tokenRowGenerator.TryGenerateRow(default, default, searchValue.component2, out var token2Row)) - { - row = new BulkTokenTokenCompositeSearchParamTableTypeV2Row( - offset, - searchParamId, - token1Row.SystemId, - token1Row.Code, - token1Row.CodeOverflow, - token2Row.SystemId, - token2Row.Code, - token2Row.CodeOverflow); - - return true; - } - - row = default; - return false; - } - } -} diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkUriSearchParameterV1RowGenerator.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkUriSearchParameterV1RowGenerator.cs deleted file mode 100644 index 9baf619cda..0000000000 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/BulkUriSearchParameterV1RowGenerator.cs +++ /dev/null @@ -1,24 +0,0 @@ -// ------------------------------------------------------------------------------------------------- -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. -// ------------------------------------------------------------------------------------------------- - -using Microsoft.Health.Fhir.Core.Features.Search.SearchValues; -using Microsoft.Health.Fhir.SqlServer.Features.Schema.Model; - -namespace Microsoft.Health.Fhir.SqlServer.Features.Storage.TvpRowGeneration -{ - internal class BulkUriSearchParameterV1RowGenerator : BulkSearchParameterRowGenerator - { - public BulkUriSearchParameterV1RowGenerator(SqlServerFhirModel model, SearchParameterToSearchValueTypeMap searchParameterTypeMap) - : base(model, searchParameterTypeMap) - { - } - - internal override bool TryGenerateRow(int offset, short searchParamId, UriSearchValue searchValue, out BulkUriSearchParamTableTypeV1Row row) - { - row = new BulkUriSearchParamTableTypeV1Row(offset, searchParamId, searchValue.Uri); - return true; - } - } -} diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/SearchParameterStatusV1RowGenerator.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/SearchParameterStatusV1RowGenerator.cs deleted file mode 100644 index 0748d678bd..0000000000 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/TvpRowGeneration/SearchParameterStatusV1RowGenerator.cs +++ /dev/null @@ -1,25 +0,0 @@ -// ------------------------------------------------------------------------------------------------- -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. -// ------------------------------------------------------------------------------------------------- - -using System.Collections.Generic; -using System.Linq; -using Microsoft.Health.Fhir.Core.Features.Search.Registry; -using Microsoft.Health.Fhir.SqlServer.Features.Schema.Model; -using Microsoft.Health.SqlServer.Features.Schema.Model; - -namespace Microsoft.Health.Fhir.SqlServer.Features.Storage.TvpRowGeneration -{ - internal class SearchParameterStatusV1RowGenerator : ITableValuedParameterRowGenerator, SearchParamTableTypeV1Row> - { - public IEnumerable GenerateRows(List searchParameterStatuses) - { - return searchParameterStatuses.Select(searchParameterStatus => new SearchParamTableTypeV1Row( - searchParameterStatus.Uri.OriginalString, - searchParameterStatus.Status.ToString(), - searchParameterStatus.IsPartiallySupported)) - .ToList(); - } - } -} diff --git a/src/Microsoft.Health.Fhir.SqlServer/Microsoft.Health.Fhir.SqlServer.csproj b/src/Microsoft.Health.Fhir.SqlServer/Microsoft.Health.Fhir.SqlServer.csproj index d1b39787e7..98472b1dce 100644 --- a/src/Microsoft.Health.Fhir.SqlServer/Microsoft.Health.Fhir.SqlServer.csproj +++ b/src/Microsoft.Health.Fhir.SqlServer/Microsoft.Health.Fhir.SqlServer.csproj @@ -1,7 +1,7 @@  - 76 + 77 Features\Schema\Migrations\$(LatestSchemaVersion).sql From 7272265f915193cc2faa26aafd148b3ae8260020 Mon Sep 17 00:00:00 2001 From: Brendan Kowitz Date: Fri, 29 Mar 2024 08:40:29 -0700 Subject: [PATCH 133/155] Updates Sourcelink, fix xml docs (#3785) * Updates Sourcelink * Adds ReproducibleBuilds * Fixes xml docs --- CustomAnalysisRules.ruleset | 1 + Directory.Build.props | 6 +++-- Directory.Packages.props | 5 +++-- build/jobs/package.yml | 22 +------------------ .../Configs/ExportJobFormatConfiguration.cs | 6 ++--- .../Models/ExportJobRecordOutputConverter.cs | 6 ++--- .../ReindexJobQueryResourceCountsConverter.cs | 2 +- .../Models/ReindexJobQueryStatusConverter.cs | 2 +- .../Operations/Reindex/ReindexJobTask.cs | 2 +- .../Persistence/ResourceWrapperFactory.cs | 1 - .../Features/Search/Expressions/Expression.cs | 10 ++++----- .../Search/TypedElementSearchIndexer.cs | 14 ++++++------ .../Features/Storage/FhirMemoryCache.cs | 3 +-- .../Validation/ResourceContentValidator.cs | 1 - .../ConvertData/ConvertDataRequest.cs | 2 +- .../BundleTestsCommonFunctions.cs | 8 +++---- .../Definitions.cs | 1 - .../Visitors/RemoveIncludesRewriter.cs | 2 +- .../Features/Storage/ResourceMetadata.cs | 2 +- .../Rest/Audit/AuditTests.cs | 2 +- .../Rest/HistoryTests.cs | 2 +- 21 files changed, 40 insertions(+), 60 deletions(-) diff --git a/CustomAnalysisRules.ruleset b/CustomAnalysisRules.ruleset index 342d390ef1..72d55b1330 100644 --- a/CustomAnalysisRules.ruleset +++ b/CustomAnalysisRules.ruleset @@ -44,6 +44,7 @@ + diff --git a/Directory.Build.props b/Directory.Build.props index 174c7a2c62..798a01206c 100644 --- a/Directory.Build.props +++ b/Directory.Build.props @@ -6,7 +6,8 @@ Microsoft Health Team Microsoft Corporation Copyright © Microsoft Corporation. All rights reserved. - Portable + true + embedded true true true @@ -16,7 +17,7 @@ MIT Microsoft FHIR Server for Azure true - https://github.com/microsoft/fhir-server/ + https://github.com/microsoft/fhir-server $(MSBuildThisFileDirectory)\CodeCoverage.runsettings net8.0;net6.0 true @@ -57,6 +58,7 @@ + diff --git a/Directory.Packages.props b/Directory.Packages.props index 116096cd1a..335052b23b 100644 --- a/Directory.Packages.props +++ b/Directory.Packages.props @@ -36,6 +36,7 @@ + @@ -94,7 +95,7 @@ - + @@ -122,4 +123,4 @@ - \ No newline at end of file + diff --git a/build/jobs/package.yml b/build/jobs/package.yml index 0fbf89aa9e..d66ba4577a 100644 --- a/build/jobs/package.yml +++ b/build/jobs/package.yml @@ -70,24 +70,4 @@ steps: inputs: pathtoPublish: '$(build.artifactStagingDirectory)/nupkgs' artifactName: 'nuget' - publishLocation: 'container' - - - task: CopyFiles@2 - displayName: 'copy symbols' - inputs: - sourceFolder: '$(build.sourcesDirectory)' - contents: | - **/*.pdb - !**/*.UnitTests.pdb - targetFolder: '$(build.artifactStagingDirectory)/symbols' - cleanTargetFolder: true - flattenFolders: true - overWrite: true - - - task: PublishBuildArtifacts@1 - displayName: 'publish symbol artifacts' - inputs: - pathtoPublish: '$(build.artifactStagingDirectory)/symbols' - artifactName: 'symbols' - publishLocation: 'container' - \ No newline at end of file + publishLocation: 'container' \ No newline at end of file diff --git a/src/Microsoft.Health.Fhir.Core/Configs/ExportJobFormatConfiguration.cs b/src/Microsoft.Health.Fhir.Core/Configs/ExportJobFormatConfiguration.cs index 490e5b61c6..0efcb728c4 100644 --- a/src/Microsoft.Health.Fhir.Core/Configs/ExportJobFormatConfiguration.cs +++ b/src/Microsoft.Health.Fhir.Core/Configs/ExportJobFormatConfiguration.cs @@ -16,9 +16,9 @@ public class ExportJobFormatConfiguration /// /// The format definition string. An export job's format is used to create the folder stucture inside the container. /// The format is defined by tags and characters. Supported tags are defined below. The / character is used to indicate a subfolder. - /// - Places a timestamp corisponding to the time the export job was enqueued. - /// - The name of the resource currently being exported. - /// - The GUID id of the export job. + /// <timestamp> - Places a timestamp corisponding to the time the export job was enqueued. + /// <resourcename> - The name of the resource currently being exported. + /// <id> - The GUID id of the export job. /// public string Format { get; set; } diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/Export/Models/ExportJobRecordOutputConverter.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/Export/Models/ExportJobRecordOutputConverter.cs index a390ae497d..3904d074c4 100644 --- a/src/Microsoft.Health.Fhir.Core/Features/Operations/Export/Models/ExportJobRecordOutputConverter.cs +++ b/src/Microsoft.Health.Fhir.Core/Features/Operations/Export/Models/ExportJobRecordOutputConverter.cs @@ -12,10 +12,10 @@ namespace Microsoft.Health.Fhir.Core.Features.Operations.Export.Models { /// /// A custom converter for de-serializing the Output property in ExportJobRecord correctly. - /// In SchemaVersion v1 for EJR, Output is of Dictionary format. - /// In SchemaVersion v2 it is of Dictionary> format. + /// In SchemaVersion v1 for EJR, Output is of Dictionary>string, ExportFileInfo> format. + /// In SchemaVersion v2 it is of Dictionary>string, List>ExportFileInfo>> format. /// This converter makes sure the updated code can still read v1 by returning a - /// List always. + /// List<ExportFileInfo> always. /// public class ExportJobRecordOutputConverter : JsonConverter { diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/Reindex/Models/ReindexJobQueryResourceCountsConverter.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/Reindex/Models/ReindexJobQueryResourceCountsConverter.cs index 5a8cb72a3b..f93630a6d0 100644 --- a/src/Microsoft.Health.Fhir.Core/Features/Operations/Reindex/Models/ReindexJobQueryResourceCountsConverter.cs +++ b/src/Microsoft.Health.Fhir.Core/Features/Operations/Reindex/Models/ReindexJobQueryResourceCountsConverter.cs @@ -13,7 +13,7 @@ namespace Microsoft.Health.Fhir.Core.Features.Operations.Reindex.Models { /// - /// JsonConverter to handle from the legacy version with ‹string, int› to the current version with ‹string, SearchResultReindex›. + /// JsonConverter to handle from the legacy version with <string, int> to the current version with <string, SearchResultReindex>. /// public class ReindexJobQueryResourceCountsConverter : JsonConverter> { diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/Reindex/Models/ReindexJobQueryStatusConverter.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/Reindex/Models/ReindexJobQueryStatusConverter.cs index 905e45c24b..4fecca60ed 100644 --- a/src/Microsoft.Health.Fhir.Core/Features/Operations/Reindex/Models/ReindexJobQueryStatusConverter.cs +++ b/src/Microsoft.Health.Fhir.Core/Features/Operations/Reindex/Models/ReindexJobQueryStatusConverter.cs @@ -12,7 +12,7 @@ namespace Microsoft.Health.Fhir.Core.Features.Operations.Reindex.Models { /// - /// JsonConverter to handle change from ConcurrentBag to ConcurrentDictionary. + /// JsonConverter to handle change from ConcurrentBag to ConcurrentDictionary. /// For backcompat and fact what we don't need values in dictionary and only key uniqueness, we read and write it as array and not dictionary. /// public class ReindexJobQueryStatusConverter : JsonConverter> diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/Reindex/ReindexJobTask.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/Reindex/ReindexJobTask.cs index decb4b1455..88983ff9bc 100644 --- a/src/Microsoft.Health.Fhir.Core/Features/Operations/Reindex/ReindexJobTask.cs +++ b/src/Microsoft.Health.Fhir.Core/Features/Operations/Reindex/ReindexJobTask.cs @@ -802,7 +802,7 @@ private async Task CalculateAndSetTotalAndResourceCounts() /// /// Gets called from and only gets called when all queryList items are status of completed /// - /// Task<(int totalCount, List + /// Count and resource types. private async Task<(int totalCount, List resourcesTypes)> CalculateTotalCount() { int totalCount = 0; diff --git a/src/Microsoft.Health.Fhir.Core/Features/Persistence/ResourceWrapperFactory.cs b/src/Microsoft.Health.Fhir.Core/Features/Persistence/ResourceWrapperFactory.cs index 0f8a6b4fcd..8ce65abe81 100644 --- a/src/Microsoft.Health.Fhir.Core/Features/Persistence/ResourceWrapperFactory.cs +++ b/src/Microsoft.Health.Fhir.Core/Features/Persistence/ResourceWrapperFactory.cs @@ -40,7 +40,6 @@ public class ResourceWrapperFactory : IResourceWrapperFactory /// The compartment indexer. /// The search parameter definition manager. /// Resource deserializer - /// Resource id provider public ResourceWrapperFactory( IRawResourceFactory rawResourceFactory, RequestContextAccessor fhirRequestContextAccessor, diff --git a/src/Microsoft.Health.Fhir.Core/Features/Search/Expressions/Expression.cs b/src/Microsoft.Health.Fhir.Core/Features/Search/Expressions/Expression.cs index 4bc27e0f9e..d10728fcc6 100644 --- a/src/Microsoft.Health.Fhir.Core/Features/Search/Expressions/Expression.cs +++ b/src/Microsoft.Health.Fhir.Core/Features/Search/Expressions/Expression.cs @@ -74,7 +74,7 @@ public static MultiaryExpression And(IReadOnlyList expressions) /// The target resource type. /// If this is a reversed chained expression. /// The expression. - /// A that represents chained operation on through . + /// A that represents chained operation on through . public static ChainedExpression Chained(string[] resourceTypes, SearchParameterInfo referenceSearchParameter, string[] targetResourceTypes, bool reversed, Expression expression) { return new ChainedExpression(resourceTypes, referenceSearchParameter, targetResourceTypes, reversed, expression); @@ -91,7 +91,7 @@ public static ChainedExpression Chained(string[] resourceTypes, SearchParameterI /// If this is a wildcard include. /// If this is a reversed include (revinclude) expression. /// If this is include has :iterate (:recurse) modifier. - /// A that represents an include on through . + /// A that represents an include on through . public static IncludeExpression Include(string[] resourceTypes, SearchParameterInfo referenceSearchParameter, string sourceResourceType, string targetResourceType, IEnumerable referencedTypes, bool wildCard, bool reversed, bool iterate) { return new IncludeExpression(resourceTypes, referenceSearchParameter, sourceResourceType, targetResourceType, referencedTypes, wildCard, reversed, iterate, null); @@ -263,7 +263,7 @@ public static StringExpression StartsWith(FieldName fieldName, int? componentInd } /// - /// Creates a that represents logical IN operation over . + /// Creates a that represents logical IN operation over . /// /// Type of the value included in the expression. /// The field name. @@ -318,14 +318,14 @@ public static SmartCompartmentSearchExpression SmartCompartmentSearch(string com /// /// Accumulates a "value-insensitive" hash code of this instance, meaning it ignores parameterizable values. - /// For example, date=2013&name=Smith and date=2014&name=Trudeau would have the same hash code. + /// For example, date=2013&name=Smith and date=2014&name=Trudeau would have the same hash code. /// /// The HashCode instance to accumulate into public abstract void AddValueInsensitiveHashCode(ref HashCode hashCode); /// /// Determines whether the given expression is equal to this instance, ignoring any parameterizable values. - /// For example, date=2013&name=Smith and date=2014&name=Trudeau would be considered equal + /// For example, date=2013&name=Smith and date=2014&name=Trudeau would be considered equal /// /// The expression to compare this instance to. public abstract bool ValueInsensitiveEquals(Expression other); diff --git a/src/Microsoft.Health.Fhir.Core/Features/Search/TypedElementSearchIndexer.cs b/src/Microsoft.Health.Fhir.Core/Features/Search/TypedElementSearchIndexer.cs index 0ac194cc46..4dae486f41 100644 --- a/src/Microsoft.Health.Fhir.Core/Features/Search/TypedElementSearchIndexer.cs +++ b/src/Microsoft.Health.Fhir.Core/Features/Search/TypedElementSearchIndexer.cs @@ -177,13 +177,13 @@ private IEnumerable ProcessNonCompositeSearchParameter(SearchP } else { - /// - /// searchValue should not have a null value - /// But if the input json is not in the correct format then we are parsing the body here and passing the initial validations for required fields here - /// e.g. If the body contains Coverage.status = "", then after parsing Coverage.status = null & Coverage.statusElement = null, resulting into minimum cardinality error as expected - /// If the body contains Coverage.status = , then after parsing Coverage.status = null & Coverage.statusElement = {value=null}, which passes the Firely validation and CodeToTokenSearchValueConverter returns null - /// In this case return BadRequestException with a valid message instead of 500 - /// + // + // searchValue should not have a null value + // But if the input json is not in the correct format then we are parsing the body here and passing the initial validations for required fields here + // e.g. If the body contains Coverage.status = "", then after parsing Coverage.status = null & Coverage.statusElement = null, resulting into minimum cardinality error as expected + // If the body contains Coverage.status = , then after parsing Coverage.status = null & Coverage.statusElement = {value=null}, which passes the Firely validation and CodeToTokenSearchValueConverter returns null + // In this case return BadRequestException with a valid message instead of 500 + // throw new BadRequestException(string.Format(Core.Resources.ValueCannotBeNull, searchParameter.Expression)); } } diff --git a/src/Microsoft.Health.Fhir.Core/Features/Storage/FhirMemoryCache.cs b/src/Microsoft.Health.Fhir.Core/Features/Storage/FhirMemoryCache.cs index 68bc4ffc47..bc464ea729 100644 --- a/src/Microsoft.Health.Fhir.Core/Features/Storage/FhirMemoryCache.cs +++ b/src/Microsoft.Health.Fhir.Core/Features/Storage/FhirMemoryCache.cs @@ -55,7 +55,6 @@ public FhirMemoryCache(string name, int limitSizeInMegabytes, TimeSpan expiratio /// /// Get or add the value to cache. /// - /// Type of the value in cache /// Key /// Value /// Value in cache @@ -118,7 +117,7 @@ public T Get(string key) } /// - /// Try to retrieve an item from cache, if it does not exist then returns the for that generic type. + /// Try to retrieve an item from cache, if it does not exist then returns the default for that generic type. /// /// Key /// Value diff --git a/src/Microsoft.Health.Fhir.Core/Features/Validation/ResourceContentValidator.cs b/src/Microsoft.Health.Fhir.Core/Features/Validation/ResourceContentValidator.cs index 1b1b7fbdfb..62738d4cbb 100644 --- a/src/Microsoft.Health.Fhir.Core/Features/Validation/ResourceContentValidator.cs +++ b/src/Microsoft.Health.Fhir.Core/Features/Validation/ResourceContentValidator.cs @@ -17,7 +17,6 @@ namespace Microsoft.Health.Fhir.Core.Features.Validation /// /// Validates content of resource. /// - /// The type of the element. /// /// Even if we correctly parsed resource into object it doesn't mean resource is valid. /// We need to check that properties have right cardinality, correct types, proper format, etc. diff --git a/src/Microsoft.Health.Fhir.Core/Messages/ConvertData/ConvertDataRequest.cs b/src/Microsoft.Health.Fhir.Core/Messages/ConvertData/ConvertDataRequest.cs index 4b13916925..91dd7abfad 100644 --- a/src/Microsoft.Health.Fhir.Core/Messages/ConvertData/ConvertDataRequest.cs +++ b/src/Microsoft.Health.Fhir.Core/Messages/ConvertData/ConvertDataRequest.cs @@ -58,7 +58,7 @@ public ConvertDataRequest( /// /// Reference for template collection. - /// The format is "/:" for template collection stored in container registries. + /// The format is "<registryServer>/<imageName>:<imageTag>" for template collection stored in container registries. /// Also supports image digest as reference. Will use 'latest' if no tag or digest present. /// public string TemplateCollectionReference { get; } diff --git a/src/Microsoft.Health.Fhir.Shared.Core.UnitTests/Features/Persistence/Orchestration/BundleTestsCommonFunctions.cs b/src/Microsoft.Health.Fhir.Shared.Core.UnitTests/Features/Persistence/Orchestration/BundleTestsCommonFunctions.cs index c903f5dbd0..0588825ff2 100644 --- a/src/Microsoft.Health.Fhir.Shared.Core.UnitTests/Features/Persistence/Orchestration/BundleTestsCommonFunctions.cs +++ b/src/Microsoft.Health.Fhir.Shared.Core.UnitTests/Features/Persistence/Orchestration/BundleTestsCommonFunctions.cs @@ -42,10 +42,10 @@ public static IFhirDataStore GetSubstituteForIFhirDataStore() { var dataStore = Substitute.For(); - /// In this parg of the code I'm replacing the default behavior of for the method 'MergeAsync'. - /// I've added a validation to Bundle Orchestrator Operation to avoid null instances of DataStoreOperationOutcome. - /// To make the tests operating as expected, I've overrided the default behavior of and set the mock - /// version of 'MergeAsync' to return some basic values for tests. + // In this parg of the code I'm replacing the default behavior of for the method 'MergeAsync'. + // I've added a validation to Bundle Orchestrator Operation to avoid null instances of DataStoreOperationOutcome. + // To make the tests operating as expected, I've overrided the default behavior of and set the mock + // version of 'MergeAsync' to return some basic values for tests. dataStore.MergeAsync(Arg.Any>(), Arg.Any()).ReturnsForAnyArgs(MockMergeAsync); dataStore.MergeAsync(Arg.Any>(), Arg.Any(), Arg.Any()).ReturnsForAnyArgs(MockMergeAsync); diff --git a/src/Microsoft.Health.Fhir.Shared.Tests/Definitions.cs b/src/Microsoft.Health.Fhir.Shared.Tests/Definitions.cs index b912202236..cc8f1b58eb 100644 --- a/src/Microsoft.Health.Fhir.Shared.Tests/Definitions.cs +++ b/src/Microsoft.Health.Fhir.Shared.Tests/Definitions.cs @@ -14,7 +14,6 @@ public static class Definitions /// /// Gets back a resource from a definition file. /// - /// The resource type. /// The JSON filename, omit the extension public static Bundle GetDefinition(string fileName) { diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Search/Expressions/Visitors/RemoveIncludesRewriter.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Search/Expressions/Visitors/RemoveIncludesRewriter.cs index ec3a81edcf..16c93931ae 100644 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Search/Expressions/Visitors/RemoveIncludesRewriter.cs +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Search/Expressions/Visitors/RemoveIncludesRewriter.cs @@ -10,7 +10,7 @@ namespace Microsoft.Health.Fhir.SqlServer.Features.Search.Expressions.Visitors { /// - /// A rewriter that removes s from an expression tree. + /// A rewriter that removes s from an expression tree. /// internal class RemoveIncludesRewriter : ExpressionRewriterWithInitialContext { diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/ResourceMetadata.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/ResourceMetadata.cs index f056cfeaa2..1e0d818fb3 100644 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/ResourceMetadata.cs +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/ResourceMetadata.cs @@ -31,7 +31,7 @@ public ResourceMetadata(CompartmentIndices compartmentIndices = null, ILookup /// Gets the search index entries by their type key. The type should be one returned by - /// : + /// : /// either implementing ISearchValue or for composites a Tuple with the component types as type arguments, /// for example: /// diff --git a/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Audit/AuditTests.cs b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Audit/AuditTests.cs index 763abadd33..cfdaf1a7a2 100644 --- a/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Audit/AuditTests.cs +++ b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Audit/AuditTests.cs @@ -26,7 +26,7 @@ namespace Microsoft.Health.Fhir.Tests.E2E.Rest.Audit { /// /// Provides Audit specific tests. - /// [Trait(Traits.OwningTeam, OwningTeam.Fhir)] [Trait(Traits.Category, Categories.Audit)] [HttpIntegrationFixtureArgumentSets(DataStore.CosmosDb, Format.Json)] diff --git a/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/HistoryTests.cs b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/HistoryTests.cs index f1ec78355a..cdaa24875c 100644 --- a/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/HistoryTests.cs +++ b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/HistoryTests.cs @@ -588,7 +588,7 @@ private async Task CreatePatientAndGetStartTimeForHistoryTest(st /// /// Get all the results for given search string matching the tag /// - /// List for the given search string + /// List of Bundle.EntryComponent for the given search string private async Task> GetAllResultsWithMatchingTagForGivenSearch(string searchString, string tag) { FhirResponse response; From 996b3fa214eaf167924496c5f73240121db8ce52 Mon Sep 17 00:00:00 2001 From: apurvabhaleMS <86023331+apurvabhaleMS@users.noreply.github.com> Date: Fri, 29 Mar 2024 08:44:33 -0700 Subject: [PATCH 134/155] Handle 405 errors (#3777) * Handle 405 error * Updated the diagnosticInfo * Added unit test * Updated tests --- src/Microsoft.Health.Fhir.Api/Resources.Designer.cs | 9 +++++++++ src/Microsoft.Health.Fhir.Api/Resources.resx | 3 +++ .../Controllers/FhirController.cs | 5 +++++ .../Rest/ExceptionTests.cs | 13 +++++++++++++ 4 files changed, 30 insertions(+) diff --git a/src/Microsoft.Health.Fhir.Api/Resources.Designer.cs b/src/Microsoft.Health.Fhir.Api/Resources.Designer.cs index b287622c85..e681303db3 100644 --- a/src/Microsoft.Health.Fhir.Api/Resources.Designer.cs +++ b/src/Microsoft.Health.Fhir.Api/Resources.Designer.cs @@ -564,6 +564,15 @@ public static string OperationNotImplemented { } } + /// + /// Looks up a localized string similar to The requested operation is not supported.. + /// + public static string OperationNotSupported { + get { + return ResourceManager.GetString("OperationNotSupported", resourceCulture); + } + } + /// /// Looks up a localized string similar to FHIR Server. /// diff --git a/src/Microsoft.Health.Fhir.Api/Resources.resx b/src/Microsoft.Health.Fhir.Api/Resources.resx index c53c8f73d4..34a5701129 100644 --- a/src/Microsoft.Health.Fhir.Api/Resources.resx +++ b/src/Microsoft.Health.Fhir.Api/Resources.resx @@ -182,6 +182,9 @@ The requested "{0}" operation is not supported. {0} is the operation name + + The requested operation is not supported. + FHIR Server {NumberedPlaceHolder="FHIR"} diff --git a/src/Microsoft.Health.Fhir.Shared.Api/Controllers/FhirController.cs b/src/Microsoft.Health.Fhir.Shared.Api/Controllers/FhirController.cs index d2bb4d821f..c50af53fde 100644 --- a/src/Microsoft.Health.Fhir.Shared.Api/Controllers/FhirController.cs +++ b/src/Microsoft.Health.Fhir.Shared.Api/Controllers/FhirController.cs @@ -126,6 +126,11 @@ public IActionResult CustomError(int? statusCode = null) returnCode = HttpStatusCode.NotFound; diagnosticInfo = Resources.NotFoundException; break; + case (int)HttpStatusCode.MethodNotAllowed: + issueType = OperationOutcome.IssueType.NotSupported; + returnCode = HttpStatusCode.MethodNotAllowed; + diagnosticInfo = Resources.OperationNotSupported; + break; default: issueType = OperationOutcome.IssueType.Exception; returnCode = HttpStatusCode.InternalServerError; diff --git a/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/ExceptionTests.cs b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/ExceptionTests.cs index 6966f4c0a3..3d41ee037f 100644 --- a/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/ExceptionTests.cs +++ b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/ExceptionTests.cs @@ -5,6 +5,7 @@ using System; using System.Net; +using System.Net.Http; using Hl7.Fhir.Model; using Hl7.Fhir.Validation; using Microsoft.AspNetCore.Builder; @@ -102,6 +103,18 @@ public async Task GivenAnUnknownRoute_WhenPostingToHttp_TheServerShouldReturnAnO DotNetAttributeValidation.Validate(operationOutcome, true); } + [Fact] + [Trait(Traits.Priority, Priority.One)] + public async Task GivenANotAllowedMethod_WhenRequestIsSent_TheServerShouldReturnMethodNotSupported() + { + using var requestMessage = new HttpRequestMessage(); + string uriString = _client.HttpClient.BaseAddress + "admin.html"; + requestMessage.RequestUri = new Uri(uriString); + requestMessage.Method = HttpMethod.Head; + HttpResponseMessage response = await _client.HttpClient.SendAsync(requestMessage); + Assert.Equal(HttpStatusCode.MethodNotAllowed, response.StatusCode); + } + public class StartupWithThrowingMiddleware : StartupBaseForCustomProviders { public StartupWithThrowingMiddleware(IConfiguration configuration) From 3285b6d89594f6ef1d1fbb306206adc82d160039 Mon Sep 17 00:00:00 2001 From: Robert Johnson Date: Tue, 2 Apr 2024 09:16:07 -0700 Subject: [PATCH 135/155] Add support for partial delete success (#3781) --- .../Exceptions/IncompleteDeleteException.cs | 22 +++++++++++++ .../BulkDelete/BulkDeleteProcessingJob.cs | 3 +- .../Features/Persistence/IFhirDataStore.cs | 10 +++++- .../ConditionalDeleteResourceRequest.cs | 6 +++- .../Messages/Delete/DeleteResourceRequest.cs | 8 +++-- .../Resources.Designer.cs | 9 ++++++ src/Microsoft.Health.Fhir.Core/Resources.resx | 4 +++ .../Storage/CosmosFhirDataStoreTests.cs | 20 ++++++++++++ .../Features/Storage/CosmosFhirDataStore.cs | 11 +++++-- .../StoredProcedures/HardDelete/HardDelete.cs | 4 +-- .../StoredProcedures/HardDelete/hardDelete.js | 18 +++++++---- .../Controllers/FhirController.cs | 12 ++++--- ...perationOutcomeExceptionFilterAttribute.cs | 3 ++ .../Resources/ResourceHandlerTests.cs | 2 +- .../ResourceHandlerTests_ConditionalDelete.cs | 4 +-- .../Resources/Delete/DeletionService.cs | 4 +-- .../Storage/SqlServerFhirDataStore.cs | 2 +- ...erFhirResourceChangeCaptureEnabledTests.cs | 4 +-- .../Operations/Reindex/ReindexJobTests.cs | 32 +++++++++---------- .../Operations/Reindex/ReindexSearchTests.cs | 4 +-- 20 files changed, 136 insertions(+), 46 deletions(-) create mode 100644 src/Microsoft.Health.Fhir.Core/Exceptions/IncompleteDeleteException.cs diff --git a/src/Microsoft.Health.Fhir.Core/Exceptions/IncompleteDeleteException.cs b/src/Microsoft.Health.Fhir.Core/Exceptions/IncompleteDeleteException.cs new file mode 100644 index 0000000000..1068c45522 --- /dev/null +++ b/src/Microsoft.Health.Fhir.Core/Exceptions/IncompleteDeleteException.cs @@ -0,0 +1,22 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; +using Microsoft.Health.Abstractions.Exceptions; + +namespace Microsoft.Health.Fhir.Core.Exceptions +{ + public class IncompleteDeleteException : RequestTooCostlyException + { + public IncompleteDeleteException(int numberOfResourceVersionsDeleted) + : base(message: string.Format(Resources.PartialDeleteSuccess, numberOfResourceVersionsDeleted, StringComparison.Ordinal)) + { + } + } +} diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/BulkDelete/BulkDeleteProcessingJob.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/BulkDelete/BulkDeleteProcessingJob.cs index 7bd5837fe7..1361b87022 100644 --- a/src/Microsoft.Health.Fhir.Core/Features/Operations/BulkDelete/BulkDeleteProcessingJob.cs +++ b/src/Microsoft.Health.Fhir.Core/Features/Operations/BulkDelete/BulkDeleteProcessingJob.cs @@ -87,7 +87,8 @@ public async Task ExecuteAsync(JobInfo jobInfo, CancellationToken cancel definition.DeleteOperation, maxDeleteCount: null, deleteAll: true, - versionType: definition.VersionType), + versionType: definition.VersionType, + allowPartialSuccess: false), // Explicitly setting to call out that this can be changed in the future if we want to. Bulk delete offers the possibility of automatically rerunning the operation until it succeeds, fully automating the process. cancellationToken); } catch (IncompleteOperationException ex) diff --git a/src/Microsoft.Health.Fhir.Core/Features/Persistence/IFhirDataStore.cs b/src/Microsoft.Health.Fhir.Core/Features/Persistence/IFhirDataStore.cs index 165cdb49c2..732b4bf1db 100644 --- a/src/Microsoft.Health.Fhir.Core/Features/Persistence/IFhirDataStore.cs +++ b/src/Microsoft.Health.Fhir.Core/Features/Persistence/IFhirDataStore.cs @@ -21,7 +21,15 @@ public interface IFhirDataStore Task GetAsync(ResourceKey key, CancellationToken cancellationToken); - Task HardDeleteAsync(ResourceKey key, bool keepCurrentVersion, CancellationToken cancellationToken); + /// + /// Hard deletes a resource. + /// + /// Identifier of the resource + /// Keeps the current version of the resource, only deleting history + /// Only for Cosmos. Allows for a delete to partially succeed if it fails to delete all versions of a resource in one try. + /// Cancellation Token + /// Async Task + Task HardDeleteAsync(ResourceKey key, bool keepCurrentVersion, bool allowPartialSuccess, CancellationToken cancellationToken); Task BulkUpdateSearchParameterIndicesAsync(IReadOnlyCollection resources, CancellationToken cancellationToken); diff --git a/src/Microsoft.Health.Fhir.Core/Messages/Delete/ConditionalDeleteResourceRequest.cs b/src/Microsoft.Health.Fhir.Core/Messages/Delete/ConditionalDeleteResourceRequest.cs index 433147cf1d..f3121cafc9 100644 --- a/src/Microsoft.Health.Fhir.Core/Messages/Delete/ConditionalDeleteResourceRequest.cs +++ b/src/Microsoft.Health.Fhir.Core/Messages/Delete/ConditionalDeleteResourceRequest.cs @@ -23,7 +23,8 @@ public ConditionalDeleteResourceRequest( int? maxDeleteCount, BundleResourceContext bundleResourceContext = null, bool deleteAll = false, - ResourceVersionType versionType = ResourceVersionType.Latest) + ResourceVersionType versionType = ResourceVersionType.Latest, + bool allowPartialSuccess = false) : base(resourceType, conditionalParameters, bundleResourceContext) { EnsureArg.IsNotNull(conditionalParameters, nameof(conditionalParameters)); @@ -32,6 +33,7 @@ public ConditionalDeleteResourceRequest( MaxDeleteCount = maxDeleteCount; DeleteAll = deleteAll; VersionType = versionType; + AllowPartialSuccess = allowPartialSuccess; } public DeleteOperation DeleteOperation { get; } @@ -42,6 +44,8 @@ public ConditionalDeleteResourceRequest( public ResourceVersionType VersionType { get; } + public bool AllowPartialSuccess { get; } + protected override IEnumerable GetCapabilities() => Capabilities; } } diff --git a/src/Microsoft.Health.Fhir.Core/Messages/Delete/DeleteResourceRequest.cs b/src/Microsoft.Health.Fhir.Core/Messages/Delete/DeleteResourceRequest.cs index 34518e4a8f..2acf50551f 100644 --- a/src/Microsoft.Health.Fhir.Core/Messages/Delete/DeleteResourceRequest.cs +++ b/src/Microsoft.Health.Fhir.Core/Messages/Delete/DeleteResourceRequest.cs @@ -14,16 +14,17 @@ namespace Microsoft.Health.Fhir.Core.Messages.Delete { public class DeleteResourceRequest : IRequest, IRequireCapability { - public DeleteResourceRequest(ResourceKey resourceKey, DeleteOperation deleteOperation, BundleResourceContext bundleResourceContext = null) + public DeleteResourceRequest(ResourceKey resourceKey, DeleteOperation deleteOperation, BundleResourceContext bundleResourceContext = null, bool allowPartialSuccess = false) { EnsureArg.IsNotNull(resourceKey, nameof(resourceKey)); ResourceKey = resourceKey; DeleteOperation = deleteOperation; BundleResourceContext = bundleResourceContext; + AllowPartialSuccess = allowPartialSuccess; } - public DeleteResourceRequest(string type, string id, DeleteOperation deleteOperation, BundleResourceContext bundleResourceContext = null) + public DeleteResourceRequest(string type, string id, DeleteOperation deleteOperation, BundleResourceContext bundleResourceContext = null, bool allowPartialSuccess = false) { EnsureArg.IsNotNull(type, nameof(type)); EnsureArg.IsNotNull(id, nameof(id)); @@ -31,6 +32,7 @@ public DeleteResourceRequest(string type, string id, DeleteOperation deleteOpera ResourceKey = new ResourceKey(type, id); DeleteOperation = deleteOperation; BundleResourceContext = bundleResourceContext; + AllowPartialSuccess = allowPartialSuccess; } public ResourceKey ResourceKey { get; } @@ -39,6 +41,8 @@ public DeleteResourceRequest(string type, string id, DeleteOperation deleteOpera public DeleteOperation DeleteOperation { get; } + public bool AllowPartialSuccess { get; } + public IEnumerable RequiredCapabilities() { yield return new CapabilityQuery($"CapabilityStatement.rest.resource.where(type = '{ResourceKey.ResourceType}').interaction.where(code = 'delete').exists()"); diff --git a/src/Microsoft.Health.Fhir.Core/Resources.Designer.cs b/src/Microsoft.Health.Fhir.Core/Resources.Designer.cs index 144a4a6c9e..cc8818d488 100644 --- a/src/Microsoft.Health.Fhir.Core/Resources.Designer.cs +++ b/src/Microsoft.Health.Fhir.Core/Resources.Designer.cs @@ -988,6 +988,15 @@ internal static string OrDelimiter { } } + /// + /// Looks up a localized string similar to Deleted {0} versions of the target resource.. + /// + internal static string PartialDeleteSuccess { + get { + return ResourceManager.GetString("PartialDeleteSuccess", resourceCulture); + } + } + /// /// Looks up a localized string similar to Patching immutable properties is not allowed.. /// diff --git a/src/Microsoft.Health.Fhir.Core/Resources.resx b/src/Microsoft.Health.Fhir.Core/Resources.resx index cf51079c07..7afb460c69 100644 --- a/src/Microsoft.Health.Fhir.Core/Resources.resx +++ b/src/Microsoft.Health.Fhir.Core/Resources.resx @@ -736,4 +736,8 @@ A resource should only appear once in each Bundle. Error message for a duplicate resource key in the same bundle + + Deleted {0} versions of the target resource. + {0} is replaced with the number of deleted versions of the resource. + \ No newline at end of file diff --git a/src/Microsoft.Health.Fhir.CosmosDb.UnitTests/Features/Storage/CosmosFhirDataStoreTests.cs b/src/Microsoft.Health.Fhir.CosmosDb.UnitTests/Features/Storage/CosmosFhirDataStoreTests.cs index 4da108a6e6..ecbeba93df 100644 --- a/src/Microsoft.Health.Fhir.CosmosDb.UnitTests/Features/Storage/CosmosFhirDataStoreTests.cs +++ b/src/Microsoft.Health.Fhir.CosmosDb.UnitTests/Features/Storage/CosmosFhirDataStoreTests.cs @@ -11,9 +11,11 @@ using System.Reflection; using System.Runtime.CompilerServices; using System.Threading; +using System.Threading.Tasks; using Hl7.Fhir.Model; using Hl7.Fhir.Serialization; using Microsoft.Azure.Cosmos; +using Microsoft.Azure.Cosmos.Scripts; using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging.Abstractions; using Microsoft.Extensions.Options; @@ -21,6 +23,7 @@ using Microsoft.Health.Core.Features.Context; using Microsoft.Health.Extensions.DependencyInjection; using Microsoft.Health.Fhir.Core.Configs; +using Microsoft.Health.Fhir.Core.Exceptions; using Microsoft.Health.Fhir.Core.Extensions; using Microsoft.Health.Fhir.Core.Features.Context; using Microsoft.Health.Fhir.Core.Features.Definition; @@ -283,6 +286,23 @@ public async Task GivenAnUpsertDuringABatch_When408ExceptionOccurs_RetryWillHapp await _container.Value.ReceivedWithAnyArgs(7).CreateItemAsync(Arg.Any(), Arg.Any(), Arg.Any(), Arg.Any()); } + [Fact] + public async Task GivenAHardDeleteRequest_WhenPartiallySuccessful_ThenAnExceptionIsThrown() + { + var resourceKey = new ResourceKey(KnownResourceTypes.Patient, "test"); + + var scripts = Substitute.For(); + scripts.ExecuteStoredProcedureAsync(Arg.Any(), Arg.Any(), Arg.Any(), cancellationToken: Arg.Any()).Returns((x) => + { + var response = Substitute.For>(); + response.Resource.Returns(1); + return Task.FromResult(response); + }); + _container.Value.Scripts.Returns(scripts); + + await Assert.ThrowsAsync(() => _dataStore.HardDeleteAsync(resourceKey, false, true, CancellationToken.None)); + } + private void CreateResponses(int pageSize, string continuationToken, params FeedResponse[] responses) { ICosmosQuery cosmosQuery = Substitute.For>(); diff --git a/src/Microsoft.Health.Fhir.CosmosDb/Features/Storage/CosmosFhirDataStore.cs b/src/Microsoft.Health.Fhir.CosmosDb/Features/Storage/CosmosFhirDataStore.cs index 08396b73b4..5fd1096010 100644 --- a/src/Microsoft.Health.Fhir.CosmosDb/Features/Storage/CosmosFhirDataStore.cs +++ b/src/Microsoft.Health.Fhir.CosmosDb/Features/Storage/CosmosFhirDataStore.cs @@ -489,7 +489,7 @@ public async Task GetAsync(ResourceKey key, CancellationToken c } } - public async Task HardDeleteAsync(ResourceKey key, bool keepCurrentVersion, CancellationToken cancellationToken) + public async Task HardDeleteAsync(ResourceKey key, bool keepCurrentVersion, bool allowPartialSuccess, CancellationToken cancellationToken) { EnsureArg.IsNotNull(key, nameof(key)); @@ -497,15 +497,20 @@ public async Task HardDeleteAsync(ResourceKey key, bool keepCurrentVersion, Canc { _logger.LogDebug("Obliterating {ResourceType}/{Id}. Keep current version: {KeepCurrentVersion}", key.ResourceType, key.Id, keepCurrentVersion); - StoredProcedureExecuteResponse> response = await _retryExceptionPolicyFactory.RetryPolicy.ExecuteAsync( + StoredProcedureExecuteResponse response = await _retryExceptionPolicyFactory.RetryPolicy.ExecuteAsync( async ct => await _hardDelete.Execute( _containerScope.Value.Scripts, key, keepCurrentVersion, + allowPartialSuccess, ct), cancellationToken); - _logger.LogDebug("Hard-deleted {Count} documents, which consumed {RU} RUs. The list of hard-deleted documents: {Resources}.", response.Resource.Count, response.RequestCharge, string.Join(", ", response.Resource)); + if (response.Resource > 0) + { + _logger.LogInformation("Partial success of delete operation. Deleted {NumDeleted} versions of the resource.", response.Resource); + throw new IncompleteDeleteException(response.Resource); + } } catch (CosmosException exception) { diff --git a/src/Microsoft.Health.Fhir.CosmosDb/Features/Storage/StoredProcedures/HardDelete/HardDelete.cs b/src/Microsoft.Health.Fhir.CosmosDb/Features/Storage/StoredProcedures/HardDelete/HardDelete.cs index 2e6a2bbe10..4ffeaf888a 100644 --- a/src/Microsoft.Health.Fhir.CosmosDb/Features/Storage/StoredProcedures/HardDelete/HardDelete.cs +++ b/src/Microsoft.Health.Fhir.CosmosDb/Features/Storage/StoredProcedures/HardDelete/HardDelete.cs @@ -14,12 +14,12 @@ namespace Microsoft.Health.Fhir.CosmosDb.Features.Storage.StoredProcedures.HardD { internal class HardDelete : StoredProcedureBase { - public async Task>> Execute(Scripts client, ResourceKey key, bool keepCurrentVersion, CancellationToken cancellationToken) + public async Task> Execute(Scripts client, ResourceKey key, bool keepCurrentVersion, bool allowPartialSuccess, CancellationToken cancellationToken) { EnsureArg.IsNotNull(client, nameof(client)); EnsureArg.IsNotNull(key, nameof(key)); - return await ExecuteStoredProc>(client, key.ToPartitionKey(), cancellationToken, key.ResourceType, key.Id, keepCurrentVersion); + return await ExecuteStoredProc(client, key.ToPartitionKey(), cancellationToken, key.ResourceType, key.Id, keepCurrentVersion, allowPartialSuccess); } } } diff --git a/src/Microsoft.Health.Fhir.CosmosDb/Features/Storage/StoredProcedures/HardDelete/hardDelete.js b/src/Microsoft.Health.Fhir.CosmosDb/Features/Storage/StoredProcedures/HardDelete/hardDelete.js index 6ebc812c07..1830f2d6e0 100644 --- a/src/Microsoft.Health.Fhir.CosmosDb/Features/Storage/StoredProcedures/HardDelete/hardDelete.js +++ b/src/Microsoft.Health.Fhir.CosmosDb/Features/Storage/StoredProcedures/HardDelete/hardDelete.js @@ -6,9 +6,10 @@ * @param {string} resourceTypeName - The resource type name. * @param {string} resourceId - The resource id. * @param {boolean} keepCurrentVersion - Specifies if the current version of the resource should be kept. +* @param {boolean} partialSuccess - Specifies if partial success of the delete is allowed. This will allow for some versions of the resource to be deleted even if it isn't possible to do all of them in one go. */ -function hardDelete(resourceTypeName, resourceId, keepCurrentVersion) { +function hardDelete(resourceTypeName, resourceId, keepCurrentVersion, partialSuccess) { const collection = getContext().getCollection(); const collectionLink = collection.getSelfLink(); const response = getContext().getResponse(); @@ -21,8 +22,8 @@ function hardDelete(resourceTypeName, resourceId, keepCurrentVersion) { if (!resourceId) { throwArgumentValidationError("The resourceId is undefined or null"); } - - let deletedResourceIdList = new Array(); + + let deletedResourceCount = 0; tryQueryAndHardDelete(); @@ -53,7 +54,7 @@ function hardDelete(resourceTypeName, resourceId, keepCurrentVersion) { tryHardDelete(documents); } else { // There is no more documents so we are finished. - response.setBody(deletedResourceIdList); + response.setBody(0); } }); @@ -65,7 +66,6 @@ function hardDelete(resourceTypeName, resourceId, keepCurrentVersion) { function tryHardDelete(documents) { if (documents.length > 0) { - deletedResourceIdList.push(documents[0].id); // Delete the first item. var isAccepted = collection.deleteDocument( @@ -77,6 +77,7 @@ function hardDelete(resourceTypeName, resourceId, keepCurrentVersion) { } // Successfully deleted the item, continue deleting. + deletedResourceCount++; documents.shift(); tryHardDelete(documents); }); @@ -96,6 +97,11 @@ function hardDelete(resourceTypeName, resourceId, keepCurrentVersion) { } function throwTooManyRequestsError() { - throw new Error(429, `The request could not be completed.`); + if (!partialSuccess) { + throw new Error(429, `The request could not be completed.`); + } + else { + response.setBody(deletedResourceCount) + } } } diff --git a/src/Microsoft.Health.Fhir.Shared.Api/Controllers/FhirController.cs b/src/Microsoft.Health.Fhir.Shared.Api/Controllers/FhirController.cs index c50af53fde..bec6efc0f7 100644 --- a/src/Microsoft.Health.Fhir.Shared.Api/Controllers/FhirController.cs +++ b/src/Microsoft.Health.Fhir.Shared.Api/Controllers/FhirController.cs @@ -391,17 +391,19 @@ public async Task VRead(string typeParameter, string idParameter, /// The type. /// The identifier. /// A flag indicating whether to hard-delete the resource or not. + /// Allows for partial success of delete operation. Only applicable for hard delete on Cosmos services [HttpDelete] [ValidateIdSegmentAttribute] [Route(KnownRoutes.ResourceTypeById)] [AuditEventType(AuditEventSubType.Delete)] - public async Task Delete(string typeParameter, string idParameter, [FromQuery] bool hardDelete) + public async Task Delete(string typeParameter, string idParameter, [FromQuery] bool hardDelete, [FromQuery] bool allowPartialSuccess) { DeleteResourceResponse response = await _mediator.DeleteResourceAsync( new DeleteResourceRequest( new ResourceKey(typeParameter, idParameter), hardDelete ? DeleteOperation.HardDelete : DeleteOperation.SoftDelete, - GetBundleResourceContext()), + GetBundleResourceContext(), + allowPartialSuccess), HttpContext.RequestAborted); return FhirResult.NoContent().SetETagHeader(response.WeakETag); @@ -412,17 +414,19 @@ public async Task Delete(string typeParameter, string idParameter /// /// The type. /// The identifier. + /// Allows for partial success of delete operation. Only applicable on Cosmos services [HttpDelete] [ValidateIdSegmentAttribute] [Route(KnownRoutes.PurgeHistoryResourceTypeById)] [AuditEventType(AuditEventSubType.PurgeHistory)] - public async Task PurgeHistory(string typeParameter, string idParameter) + public async Task PurgeHistory(string typeParameter, string idParameter, [FromQuery] bool allowPartialSuccess) { DeleteResourceResponse response = await _mediator.DeleteResourceAsync( new DeleteResourceRequest( new ResourceKey(typeParameter, idParameter), DeleteOperation.PurgeHistory, - GetBundleResourceContext()), + GetBundleResourceContext(), + allowPartialSuccess), HttpContext.RequestAborted); return FhirResult.NoContent().SetETagHeader(response.WeakETag); diff --git a/src/Microsoft.Health.Fhir.Shared.Api/Features/Filters/OperationOutcomeExceptionFilterAttribute.cs b/src/Microsoft.Health.Fhir.Shared.Api/Features/Filters/OperationOutcomeExceptionFilterAttribute.cs index 40a74d52f1..2dab323f42 100644 --- a/src/Microsoft.Health.Fhir.Shared.Api/Features/Filters/OperationOutcomeExceptionFilterAttribute.cs +++ b/src/Microsoft.Health.Fhir.Shared.Api/Features/Filters/OperationOutcomeExceptionFilterAttribute.cs @@ -111,6 +111,9 @@ public override void OnActionExecuted(ActionExecutedContext context) operationOutcomeResult.StatusCode = HttpStatusCode.BadRequest; break; + case IncompleteDeleteException: + operationOutcomeResult.StatusCode = HttpStatusCode.RequestEntityTooLarge; + break; case BadRequestException _: case RequestNotValidException _: case BundleEntryLimitExceededException _: diff --git a/src/Microsoft.Health.Fhir.Shared.Core.UnitTests/Features/Resources/ResourceHandlerTests.cs b/src/Microsoft.Health.Fhir.Shared.Core.UnitTests/Features/Resources/ResourceHandlerTests.cs index 874ffd0bd5..4b42c164bb 100644 --- a/src/Microsoft.Health.Fhir.Shared.Core.UnitTests/Features/Resources/ResourceHandlerTests.cs +++ b/src/Microsoft.Health.Fhir.Shared.Core.UnitTests/Features/Resources/ResourceHandlerTests.cs @@ -314,7 +314,7 @@ public async Task GivenAFhirMediator_WhenHardDeletingWithSufficientPermissions_T ResourceKey resultKey = (await _mediator.DeleteResourceAsync(resourceKey, DeleteOperation.HardDelete)).ResourceKey; - await _fhirDataStore.Received(1).HardDeleteAsync(resourceKey, Arg.Any(), Arg.Any()); + await _fhirDataStore.Received(1).HardDeleteAsync(resourceKey, Arg.Any(), Arg.Any(), Arg.Any()); Assert.NotNull(resultKey); Assert.Equal(resourceKey.Id, resultKey.Id); diff --git a/src/Microsoft.Health.Fhir.Shared.Core.UnitTests/Features/Resources/ResourceHandlerTests_ConditionalDelete.cs b/src/Microsoft.Health.Fhir.Shared.Core.UnitTests/Features/Resources/ResourceHandlerTests_ConditionalDelete.cs index 5db6ee9f75..6f673d09f3 100644 --- a/src/Microsoft.Health.Fhir.Shared.Core.UnitTests/Features/Resources/ResourceHandlerTests_ConditionalDelete.cs +++ b/src/Microsoft.Health.Fhir.Shared.Core.UnitTests/Features/Resources/ResourceHandlerTests_ConditionalDelete.cs @@ -74,7 +74,7 @@ public async Task GivenOneMatchingResource_WhenDeletingConditionallyWithHardDele await _fhirDataStore.DidNotReceive().UpsertAsync(Arg.Any(), Arg.Any()); - await _fhirDataStore.Received().HardDeleteAsync(Arg.Any(), Arg.Any(), Arg.Any()); + await _fhirDataStore.Received().HardDeleteAsync(Arg.Any(), Arg.Any(), Arg.Any(), Arg.Any()); } [Fact] @@ -128,7 +128,7 @@ private ConditionalDeleteResourceRequest SetupConditionalDelete( if (hardDelete) { - _fhirDataStore.HardDeleteAsync(Arg.Any(), Arg.Any(), Arg.Any()).Returns(Task.CompletedTask); + _fhirDataStore.HardDeleteAsync(Arg.Any(), Arg.Any(), Arg.Any(), Arg.Any()).Returns(Task.CompletedTask); } else { diff --git a/src/Microsoft.Health.Fhir.Shared.Core/Features/Resources/Delete/DeletionService.cs b/src/Microsoft.Health.Fhir.Shared.Core/Features/Resources/Delete/DeletionService.cs index f1628e8ad8..6f27e45144 100644 --- a/src/Microsoft.Health.Fhir.Shared.Core/Features/Resources/Delete/DeletionService.cs +++ b/src/Microsoft.Health.Fhir.Shared.Core/Features/Resources/Delete/DeletionService.cs @@ -102,7 +102,7 @@ public async Task DeleteAsync(DeleteResourceRequest request, Cancel break; case DeleteOperation.HardDelete: case DeleteOperation.PurgeHistory: - await _retryPolicy.ExecuteAsync(async () => await fhirDataStore.Value.HardDeleteAsync(key, request.DeleteOperation == DeleteOperation.PurgeHistory, cancellationToken)); + await _retryPolicy.ExecuteAsync(async () => await fhirDataStore.Value.HardDeleteAsync(key, request.DeleteOperation == DeleteOperation.PurgeHistory, request.AllowPartialSuccess, cancellationToken)); break; default: throw new ArgumentOutOfRangeException(nameof(request)); @@ -280,7 +280,7 @@ private async Task HardDeleteResourcePage(ConditionalDeleteResourceRequest // This throws AggrigateExceptions await Parallel.ForEachAsync(resourcesToDelete, cancellationToken, async (item, innerCt) => { - await _retryPolicy.ExecuteAsync(async () => await fhirDataStore.Value.HardDeleteAsync(new ResourceKey(item.Resource.ResourceTypeName, item.Resource.ResourceId), request.DeleteOperation == DeleteOperation.PurgeHistory, innerCt)); + await _retryPolicy.ExecuteAsync(async () => await fhirDataStore.Value.HardDeleteAsync(new ResourceKey(item.Resource.ResourceTypeName, item.Resource.ResourceId), request.DeleteOperation == DeleteOperation.PurgeHistory, request.AllowPartialSuccess, innerCt)); parallelBag.Add(item.Resource.ResourceId); }); } diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/SqlServerFhirDataStore.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/SqlServerFhirDataStore.cs index 14fb633cb7..904e3e1213 100644 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/SqlServerFhirDataStore.cs +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/SqlServerFhirDataStore.cs @@ -611,7 +611,7 @@ public async Task GetAsync(ResourceKey key, CancellationToken c return results.Count == 0 ? null : results[0]; } - public async Task HardDeleteAsync(ResourceKey key, bool keepCurrentVersion, CancellationToken cancellationToken) + public async Task HardDeleteAsync(ResourceKey key, bool keepCurrentVersion, bool allowPartialSuccess, CancellationToken cancellationToken) { await _sqlStoreClient.HardDeleteAsync(_model.GetResourceTypeId(key.ResourceType), key.Id, keepCurrentVersion, _coreFeatures.SupportsResourceChangeCapture, cancellationToken); } diff --git a/test/Microsoft.Health.Fhir.Shared.Tests.Integration/Features/ChangeFeed/SqlServerFhirResourceChangeCaptureEnabledTests.cs b/test/Microsoft.Health.Fhir.Shared.Tests.Integration/Features/ChangeFeed/SqlServerFhirResourceChangeCaptureEnabledTests.cs index 520ebcae8a..efab8cc8a8 100644 --- a/test/Microsoft.Health.Fhir.Shared.Tests.Integration/Features/ChangeFeed/SqlServerFhirResourceChangeCaptureEnabledTests.cs +++ b/test/Microsoft.Health.Fhir.Shared.Tests.Integration/Features/ChangeFeed/SqlServerFhirResourceChangeCaptureEnabledTests.cs @@ -172,7 +172,7 @@ public async Task GivenChangeCaptureEnabledAndNoVersionPolicy_AfterHardDeleting_ var resource = await store.GetAsync(new ResourceKey("Organization", create.Id, create.VersionId), CancellationToken.None); Assert.NotNull(resource); - await store.HardDeleteAsync(new ResourceKey("Organization", create.Id), false, cts.Token); + await store.HardDeleteAsync(new ResourceKey("Organization", create.Id), false, false, cts.Token); resource = await store.GetAsync(new ResourceKey("Organization", create.Id, create.VersionId), CancellationToken.None); Assert.Null(resource); @@ -204,7 +204,7 @@ public async Task GivenChangeCaptureEnabledAndNoVersionPolicy_AfterHardDeleting_ var id = create.Id; var store = (SqlServerFhirDataStore)_fixture.DataStore; - await store.HardDeleteAsync(new ResourceKey("Organization", id), false, CancellationToken.None); + await store.HardDeleteAsync(new ResourceKey("Organization", id), false, false, CancellationToken.None); var reCreate = await _fixture.Mediator.UpsertResourceAsync(Samples.GetDefaultOrganization().UpdateId(id)); Assert.Equal(id, reCreate.RawResourceElement.Id); diff --git a/test/Microsoft.Health.Fhir.Shared.Tests.Integration/Features/Operations/Reindex/ReindexJobTests.cs b/test/Microsoft.Health.Fhir.Shared.Tests.Integration/Features/Operations/Reindex/ReindexJobTests.cs index 3f08384903..58d032b4bb 100644 --- a/test/Microsoft.Health.Fhir.Shared.Tests.Integration/Features/Operations/Reindex/ReindexJobTests.cs +++ b/test/Microsoft.Health.Fhir.Shared.Tests.Integration/Features/Operations/Reindex/ReindexJobTests.cs @@ -346,8 +346,8 @@ public async Task GivenSearchParametersToReindex_ThenReindexJobShouldComplete() _searchParameterDefinitionManager.DeleteSearchParameter(searchParam.ToTypedElement()); await _testHelper.DeleteSearchParameterStatusAsync(searchParam.Url, CancellationToken.None); - await _fixture.DataStore.HardDeleteAsync(sample1.Wrapper.ToResourceKey(), false, CancellationToken.None); - await _fixture.DataStore.HardDeleteAsync(sample2.Wrapper.ToResourceKey(), false, CancellationToken.None); + await _fixture.DataStore.HardDeleteAsync(sample1.Wrapper.ToResourceKey(), false, false, CancellationToken.None); + await _fixture.DataStore.HardDeleteAsync(sample2.Wrapper.ToResourceKey(), false, false, CancellationToken.None); } } @@ -466,8 +466,8 @@ public async Task GivenNewSearchParamCreatedBeforeResourcesToBeIndexed_WhenReind _searchParameterDefinitionManager.DeleteSearchParameter(searchParam.ToTypedElement()); await _testHelper.DeleteSearchParameterStatusAsync(searchParam.Url, CancellationToken.None); - await _fixture.DataStore.HardDeleteAsync(sample1.Wrapper.ToResourceKey(), false, CancellationToken.None); - await _fixture.DataStore.HardDeleteAsync(sample2.Wrapper.ToResourceKey(), false, CancellationToken.None); + await _fixture.DataStore.HardDeleteAsync(sample1.Wrapper.ToResourceKey(), false, false, CancellationToken.None); + await _fixture.DataStore.HardDeleteAsync(sample2.Wrapper.ToResourceKey(), false, false, CancellationToken.None); } } @@ -547,10 +547,10 @@ public async Task GivenReindexJobRunning_WhenReindexJobCancelRequest_ThenReindex _searchParameterDefinitionManager.DeleteSearchParameter(searchParam.ToTypedElement()); await _testHelper.DeleteSearchParameterStatusAsync(searchParam.Url, CancellationToken.None); - await _fixture.DataStore.HardDeleteAsync(sample1.Wrapper.ToResourceKey(), false, CancellationToken.None); - await _fixture.DataStore.HardDeleteAsync(sample2.Wrapper.ToResourceKey(), false, CancellationToken.None); - await _fixture.DataStore.HardDeleteAsync(sample3.Wrapper.ToResourceKey(), false, CancellationToken.None); - await _fixture.DataStore.HardDeleteAsync(sample4.Wrapper.ToResourceKey(), false, CancellationToken.None); + await _fixture.DataStore.HardDeleteAsync(sample1.Wrapper.ToResourceKey(), false, false, CancellationToken.None); + await _fixture.DataStore.HardDeleteAsync(sample2.Wrapper.ToResourceKey(), false, false, CancellationToken.None); + await _fixture.DataStore.HardDeleteAsync(sample3.Wrapper.ToResourceKey(), false, false, CancellationToken.None); + await _fixture.DataStore.HardDeleteAsync(sample4.Wrapper.ToResourceKey(), false, false, CancellationToken.None); } } @@ -615,8 +615,8 @@ public async Task GivenNewSearchParamCreatedAfterResourcesToBeIndexed_WhenReinde _searchParameterDefinitionManager.DeleteSearchParameter(searchParam.ToTypedElement()); await _testHelper.DeleteSearchParameterStatusAsync(searchParam.Url, CancellationToken.None); - await _fixture.DataStore.HardDeleteAsync(sample1.Wrapper.ToResourceKey(), false, CancellationToken.None); - await _fixture.DataStore.HardDeleteAsync(sample2.Wrapper.ToResourceKey(), false, CancellationToken.None); + await _fixture.DataStore.HardDeleteAsync(sample1.Wrapper.ToResourceKey(), false, false, CancellationToken.None); + await _fixture.DataStore.HardDeleteAsync(sample2.Wrapper.ToResourceKey(), false, false, CancellationToken.None); } } @@ -704,9 +704,9 @@ public async Task GivenSecondFHIRServiceSynced_WhenReindexJobCompleted_ThenSecon _searchParameterDefinitionManager2.DeleteSearchParameter(searchParam.ToTypedElement()); await _testHelper.DeleteSearchParameterStatusAsync(searchParam.Url, CancellationToken.None); - await _fixture.DataStore.HardDeleteAsync(sample1.Wrapper.ToResourceKey(), false, CancellationToken.None); - await _fixture.DataStore.HardDeleteAsync(sample2.Wrapper.ToResourceKey(), false, CancellationToken.None); - await _fixture.DataStore.HardDeleteAsync(searchParamWrapper.ToResourceKey(), false, CancellationToken.None); + await _fixture.DataStore.HardDeleteAsync(sample1.Wrapper.ToResourceKey(), false, false, CancellationToken.None); + await _fixture.DataStore.HardDeleteAsync(sample2.Wrapper.ToResourceKey(), false, false, CancellationToken.None); + await _fixture.DataStore.HardDeleteAsync(searchParamWrapper.ToResourceKey(), false, false, CancellationToken.None); } } @@ -764,7 +764,7 @@ public async Task GivenSecondFHIRServiceSynced_WhenSyncParametersOccursDuringDel _searchParameterDefinitionManager.DeleteSearchParameter(searchParam.ToTypedElement()); await _testHelper.DeleteSearchParameterStatusAsync(searchParam.Url, CancellationToken.None); - await _fixture.DataStore.HardDeleteAsync(searchParamWrapper.ToResourceKey(), false, CancellationToken.None); + await _fixture.DataStore.HardDeleteAsync(searchParamWrapper.ToResourceKey(), false, false, CancellationToken.None); } } @@ -838,8 +838,8 @@ public async Task GivenNewSearchParamWithResourceBaseType_WhenReindexJobComplete _searchParameterDefinitionManager.DeleteSearchParameter(searchParam.ToTypedElement()); await _testHelper.DeleteSearchParameterStatusAsync(searchParam.Url, CancellationToken.None); - await _fixture.DataStore.HardDeleteAsync(samplePatient.Wrapper.ToResourceKey(), false, CancellationToken.None); - await _fixture.DataStore.HardDeleteAsync(sampleObservation.Wrapper.ToResourceKey(), false, CancellationToken.None); + await _fixture.DataStore.HardDeleteAsync(samplePatient.Wrapper.ToResourceKey(), false, false, CancellationToken.None); + await _fixture.DataStore.HardDeleteAsync(sampleObservation.Wrapper.ToResourceKey(), false, false, CancellationToken.None); } } diff --git a/test/Microsoft.Health.Fhir.Shared.Tests.Integration/Features/Operations/Reindex/ReindexSearchTests.cs b/test/Microsoft.Health.Fhir.Shared.Tests.Integration/Features/Operations/Reindex/ReindexSearchTests.cs index 3514d9719f..76e6cad29c 100644 --- a/test/Microsoft.Health.Fhir.Shared.Tests.Integration/Features/Operations/Reindex/ReindexSearchTests.cs +++ b/test/Microsoft.Health.Fhir.Shared.Tests.Integration/Features/Operations/Reindex/ReindexSearchTests.cs @@ -76,7 +76,7 @@ public async Task GivenResourceWithMatchingHash_WhenPerformingReindexSearch_Then { if (testPatient != null) { - await _scopedDataStore.Value.HardDeleteAsync(testPatient.ToResourceKey(), false, CancellationToken.None); + await _scopedDataStore.Value.HardDeleteAsync(testPatient.ToResourceKey(), false, false, CancellationToken.None); } } } @@ -110,7 +110,7 @@ public async Task GivenResourceWithDifferentHash_WhenPerformingReindexSearch_The { if (testPatient != null) { - await _scopedDataStore.Value.HardDeleteAsync(testPatient.ToResourceKey(), false, CancellationToken.None); + await _scopedDataStore.Value.HardDeleteAsync(testPatient.ToResourceKey(), false, false, CancellationToken.None); } } } From 2d08cf0d9e2e33347794843dd351365ac83d8179 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 2 Apr 2024 10:59:46 -0700 Subject: [PATCH 136/155] Bump Microsoft.Extensions.Http.Polly from 8.0.0 to 8.0.3 (#3784) --- Directory.Packages.props | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Directory.Packages.props b/Directory.Packages.props index 335052b23b..0927fe965e 100644 --- a/Directory.Packages.props +++ b/Directory.Packages.props @@ -70,7 +70,7 @@ - + From 02c37955d2cc377d04d2bb1161c00ff3ceb3b871 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 2 Apr 2024 11:00:43 -0700 Subject: [PATCH 137/155] Bump Microsoft.Extensions.FileProviders.Embedded from 8.0.0 to 8.0.3 (#3791) --- Directory.Packages.props | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Directory.Packages.props b/Directory.Packages.props index 0927fe965e..8812a67553 100644 --- a/Directory.Packages.props +++ b/Directory.Packages.props @@ -68,7 +68,7 @@ - + From 3f8c1b90beef3d33d64a5df5c3f4c4c888ea2555 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 2 Apr 2024 11:03:27 -0700 Subject: [PATCH 138/155] Bump prometheus-net.SystemMetrics from 2.0.0 to 3.1.0 (#3787) --- Directory.Packages.props | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Directory.Packages.props b/Directory.Packages.props index 8812a67553..c587b2ce30 100644 --- a/Directory.Packages.props +++ b/Directory.Packages.props @@ -104,7 +104,7 @@ - + From ddea837c5e4aa4a61ad517ec59fb08a577f72db5 Mon Sep 17 00:00:00 2001 From: rajithaalurims <110048715+rajithaalurims@users.noreply.github.com> Date: Wed, 3 Apr 2024 10:50:11 -0500 Subject: [PATCH 139/155] App names changed to avoid collission on HP side (#3795) --- build/jobs/e2e-tests.yml | 24 +++++----- build/jobs/run-export-tests.yml | 48 +++++++++---------- .../TestApplications.cs | 12 ++--- testauthenvironment.json | 12 ++--- 4 files changed, 48 insertions(+), 48 deletions(-) diff --git a/build/jobs/e2e-tests.yml b/build/jobs/e2e-tests.yml index b0fdfed9f1..89a651b9a4 100644 --- a/build/jobs/e2e-tests.yml +++ b/build/jobs/e2e-tests.yml @@ -125,15 +125,15 @@ steps: 'app_nativeClient_secret': $(app_nativeClient_secret) 'app_wrongAudienceClient_id': $(app_wrongAudienceClient_id) 'app_wrongAudienceClient_secret': $(app_wrongAudienceClient_secret) - 'app_globalAdminUser_id': $(app_globalAdminUser_id) - 'app_globalAdminUser_secret': $(app_globalAdminUser_secret) - 'app_globalConverterUser_id': $(app_globalConverterUser_id) - 'app_globalConverterUser_secret': $(app_globalConverterUser_secret) - 'app_globalExporterUser_id': $(app_globalExporterUser_id) - 'app_globalExporterUser_secret': $(app_globalExporterUser_secret) - 'app_globalImporterUser_id': $(app_globalImporterUser_id) - 'app_globalImporterUser_secret': $(app_globalImporterUser_secret) - 'app_globalReaderUser_id': $(app_globalReaderUser_id) - 'app_globalReaderUser_secret': $(app_globalReaderUser_secret) - 'app_globalWriterUser_id': $(app_globalWriterUser_id) - 'app_globalWriterUser_secret': $(app_globalWriterUser_secret) + 'app_globalAdminUserApp_id': $(app_globalAdminUserApp_id) + 'app_globalAdminUserApp_secret': $(app_globalAdminUserApp_secret) + 'app_globalConverterUserApp_id': $(app_globalConverterUserApp_id) + 'app_globalConverterUserApp_secret': $(app_globalConverterUserApp_secret) + 'app_globalExporterUserApp_id': $(app_globalExporterUserApp_id) + 'app_globalExporterUserApp_secret': $(app_globalExporterUserApp_secret) + 'app_globalImporterUserApp_id': $(app_globalImporterUserApp_id) + 'app_globalImporterUserApp_secret': $(app_globalImporterUserApp_secret) + 'app_globalReaderUserApp_id': $(app_globalReaderUserApp_id) + 'app_globalReaderUserApp_secret': $(app_globalReaderUserApp_secret) + 'app_globalWriterUserApp_id': $(app_globalWriterUserApp_id) + 'app_globalWriterUserApp_secret': $(app_globalWriterUserApp_secret) diff --git a/build/jobs/run-export-tests.yml b/build/jobs/run-export-tests.yml index cd6bbadff9..871aa4ef0a 100644 --- a/build/jobs/run-export-tests.yml +++ b/build/jobs/run-export-tests.yml @@ -95,18 +95,18 @@ jobs: 'app_nativeClient_secret': $(app_nativeClient_secret) 'app_wrongAudienceClient_id': $(app_wrongAudienceClient_id) 'app_wrongAudienceClient_secret': $(app_wrongAudienceClient_secret) - 'app_globalAdminUser_id': $(app_globalAdminUser_id) - 'app_globalAdminUser_secret': $(app_globalAdminUser_secret) - 'app_globalConverterUser_id': $(app_globalConverterUser_id) - 'app_globalConverterUser_secret': $(app_globalConverterUser_secret) - 'app_globalExporterUser_id': $(app_globalExporterUser_id) - 'app_globalExporterUser_secret': $(app_globalExporterUser_secret) - 'app_globalImporterUser_id': $(app_globalImporterUser_id) - 'app_globalImporterUser_secret': $(app_globalImporterUser_secret) - 'app_globalReaderUser_id': $(app_globalReaderUser_id) - 'app_globalReaderUser_secret': $(app_globalReaderUser_secret) - 'app_globalWriterUser_id': $(app_globalWriterUser_id) - 'app_globalWriterUser_secret': $(app_globalWriterUser_secret) + 'app_globalAdminUserApp_id': $(app_globalAdminUserApp_id) + 'app_globalAdminUserApp_secret': $(app_globalAdminUserApp_secret) + 'app_globalConverterUserApp_id': $(app_globalConverterUserApp_id) + 'app_globalConverterUserApp_secret': $(app_globalConverterUserApp_secret) + 'app_globalExporterUserApp_id': $(app_globalExporterUserApp_id) + 'app_globalExporterUserApp_secret': $(app_globalExporterUserApp_secret) + 'app_globalImporterUserApp_id': $(app_globalImporterUserApp_id) + 'app_globalImporterUserApp_secret': $(app_globalImporterUserApp_secret) + 'app_globalReaderUserApp_id': $(app_globalReaderUserApp_id) + 'app_globalReaderUserApp_secret': $(app_globalReaderUserApp_secret) + 'app_globalWriterUserApp_id': $(app_globalWriterUserApp_id) + 'app_globalWriterUserApp_secret': $(app_globalWriterUserApp_secret) - job: 'sqlE2eTests' dependsOn: [] @@ -198,16 +198,16 @@ jobs: 'app_nativeClient_secret': $(app_nativeClient_secret) 'app_wrongAudienceClient_id': $(app_wrongAudienceClient_id) 'app_wrongAudienceClient_secret': $(app_wrongAudienceClient_secret) - 'app_globalAdminUser_id': $(app_globalAdminUser_id) - 'app_globalAdminUser_secret': $(app_globalAdminUser_secret) - 'app_globalConverterUser_id': $(app_globalConverterUser_id) - 'app_globalConverterUser_secret': $(app_globalConverterUser_secret) - 'app_globalExporterUser_id': $(app_globalExporterUser_id) - 'app_globalExporterUser_secret': $(app_globalExporterUser_secret) - 'app_globalImporterUser_id': $(app_globalImporterUser_id) - 'app_globalImporterUser_secret': $(app_globalImporterUser_secret) - 'app_globalReaderUser_id': $(app_globalReaderUser_id) - 'app_globalReaderUser_secret': $(app_globalReaderUser_secret) - 'app_globalWriterUser_id': $(app_globalWriterUser_id) - 'app_globalWriterUser_secret': $(app_globalWriterUser_secret) + 'app_globalAdminUserApp_id': $(app_globalAdminUserApp_id) + 'app_globalAdminUserApp_secret': $(app_globalAdminUserApp_secret) + 'app_globalConverterUserApp_id': $(app_globalConverterUserApp_id) + 'app_globalConverterUserApp_secret': $(app_globalConverterUserApp_secret) + 'app_globalExporterUserApp_id': $(app_globalExporterUserApp_id) + 'app_globalExporterUserApp_secret': $(app_globalExporterUserApp_secret) + 'app_globalImporterUserApp_id': $(app_globalImporterUserApp_id) + 'app_globalImporterUserApp_secret': $(app_globalImporterUserApp_secret) + 'app_globalReaderUserApp_id': $(app_globalReaderUserApp_id) + 'app_globalReaderUserApp_secret': $(app_globalReaderUserApp_secret) + 'app_globalWriterUserApp_id': $(app_globalWriterUserApp_id) + 'app_globalWriterUserApp_secret': $(app_globalWriterUserApp_secret) diff --git a/test/Microsoft.Health.Fhir.Shared.Tests.E2E.Common/TestApplications.cs b/test/Microsoft.Health.Fhir.Shared.Tests.E2E.Common/TestApplications.cs index 571c4ccc9f..f5f1aaedd2 100644 --- a/test/Microsoft.Health.Fhir.Shared.Tests.E2E.Common/TestApplications.cs +++ b/test/Microsoft.Health.Fhir.Shared.Tests.E2E.Common/TestApplications.cs @@ -14,13 +14,13 @@ namespace Microsoft.Health.Fhir.Tests.E2E.Common public static class TestApplications { - public static TestApplication AdminUser { get; } = new TestApplication("globalAdminUser"); + public static TestApplication AdminUser { get; } = new TestApplication("globalAdminUserApp"); - public static TestApplication BulkImportUser { get; } = new TestApplication("globalImporterUser"); + public static TestApplication BulkImportUser { get; } = new TestApplication("globalImporterUserApp"); - public static TestApplication ConvertDataUser { get; } = new TestApplication("globalConverterUser"); + public static TestApplication ConvertDataUser { get; } = new TestApplication("globalConverterUserApp"); - public static TestApplication ExportUser { get; } = new TestApplication("globalExporterUser"); + public static TestApplication ExportUser { get; } = new TestApplication("globalExporterUserApp"); public static TestApplication GlobalAdminServicePrincipal { get; } = new TestApplication("globalAdminServicePrincipal"); @@ -28,9 +28,9 @@ public static class TestApplications public static TestApplication NativeClient { get; } = new TestApplication("nativeClient"); - public static TestApplication ReadOnlyUser { get; } = new TestApplication("globalReaderUser"); + public static TestApplication ReadOnlyUser { get; } = new TestApplication("globalReaderUserApp"); - public static TestApplication ReadWriteUser { get; } = new TestApplication("globalWriterUser"); + public static TestApplication ReadWriteUser { get; } = new TestApplication("globalWriterUserApp"); public static TestApplication SmartUserClient { get; } = new TestApplication("smartUserClient"); diff --git a/testauthenvironment.json b/testauthenvironment.json index ac0cb13388..70a9ed8e44 100644 --- a/testauthenvironment.json +++ b/testauthenvironment.json @@ -3,37 +3,37 @@ ], "clientApplications": [ { - "id": "globalReaderUser", + "id": "globalReaderUserApp", "roles": [ "globalReader" ] }, { - "id": "globalWriterUser", + "id": "globalWriterUserApp", "roles": [ "globalWriter" ] }, { - "id": "globalExporterUser", + "id": "globalExporterUserApp", "roles": [ "globalExporter" ] }, { - "id": "globalConverterUser", + "id": "globalConverterUserApp", "roles": [ "globalConverter" ] }, { - "id": "globalImporterUser", + "id": "globalImporterUserApp", "roles": [ "globalImporter" ] }, { - "id": "globalAdminUser", + "id": "globalAdminUserApp", "roles": [ "globalAdmin" ] From 06fa67cb73820b8f49d5b8ef4b84f1f3fd9943ec Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 3 Apr 2024 09:20:09 -0700 Subject: [PATCH 140/155] Bump Azure.Storage.Blobs from 12.17.0 to 12.19.1 (#3766) --- Directory.Packages.props | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Directory.Packages.props b/Directory.Packages.props index c587b2ce30..459f52eacc 100644 --- a/Directory.Packages.props +++ b/Directory.Packages.props @@ -34,7 +34,7 @@ - + From e150df6c9c7a9541e2acfc6ba76e1af93d598ec2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 3 Apr 2024 09:57:04 -0700 Subject: [PATCH 141/155] Bump Microsoft.Extensions.TimeProvider.Testing from 8.0.0 to 8.3.0 (#3794) --- Directory.Packages.props | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Directory.Packages.props b/Directory.Packages.props index 459f52eacc..7950b2442b 100644 --- a/Directory.Packages.props +++ b/Directory.Packages.props @@ -76,7 +76,7 @@ - + From fad0977dfbb01e27861ae35f65a853f38bfc75db Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 4 Apr 2024 09:13:55 -0700 Subject: [PATCH 142/155] Bump coverlet.collector from 6.0.0 to 6.0.2 (#3793) --- Directory.Packages.props | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Directory.Packages.props b/Directory.Packages.props index 7950b2442b..3ca4834dc0 100644 --- a/Directory.Packages.props +++ b/Directory.Packages.props @@ -35,7 +35,7 @@ - + From 10c082779ad5ae794e4e4e95ba6a07667b38de5d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 4 Apr 2024 10:38:42 -0700 Subject: [PATCH 143/155] Bump xunit.runner.visualstudio from 2.5.0 to 2.5.7 (#3779) --- Directory.Packages.props | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Directory.Packages.props b/Directory.Packages.props index 3ca4834dc0..8a2782bba0 100644 --- a/Directory.Packages.props +++ b/Directory.Packages.props @@ -117,7 +117,7 @@ - + From f2fac4ebd17c262c2170a92a15d85d17c82c21ad Mon Sep 17 00:00:00 2001 From: SergeyGaluzo <95932081+SergeyGaluzo@users.noreply.github.com> Date: Mon, 8 Apr 2024 09:36:37 -0700 Subject: [PATCH 144/155] Import error handling (#3796) * Adding ability to raise SQL exceptions in E2E tests * error handling * removed console * job hosting tests * limit to in proc * no retriable in export * correct wait * retriable back in export * rest * minus using * remove pragma * Remove retriable from export * tests * retriable obsolete * polly retries and simpler tests * Addressed comments * job conflict exception back * do not show error message for InternalServerError * job conflict exception test * removed generic exception from tests * removed not used vars * better tests * test for exution timeout --- .../Export/ExportProcessingJobTests.cs | 4 +- .../Import/GetImportRequestHandlerTests.cs | 2 +- .../BulkDelete/BulkDeleteProcessingJob.cs | 4 +- .../Operations/Export/ExportProcessingJob.cs | 15 +- .../Import/GetImportRequestHandler.cs | 11 +- .../Operations/Import/ImportErrorStore.cs | 17 ++- .../Import/ImportFileEtagNotMatchException.cs | 24 ---- ...ErrorResult.cs => ImportJobErrorResult.cs} | 7 +- .../Import/ImportProcessingException.cs | 24 ---- .../Import/ImportProcessingJobErrorResult.cs | 14 -- .../Storage/Queues/CosmosQueueClient.cs | 10 +- .../FhirClient.cs | 7 +- .../Import/ImportOrchestratorJobTests.cs | 117 ++++------------ .../Import/ImportProcessingJobTests.cs | 4 +- .../Import/ImportOrchestratorJob.cs | 96 +++++-------- .../Operations/Import/ImportProcessingJob.cs | 46 +++---- .../Features/Storage/SqlQueueClient.cs | 51 ++----- .../JobHostingTests.cs | 50 +------ .../JobExecutionException.cs | 2 - .../JobHosting.cs | 9 +- .../RetriableJobException.cs | 1 + .../Rest/Import/ImportRebuildIndexesTests.cs | 2 +- .../Rest/Import/ImportTestFixture.cs | 2 + .../Rest/Import/ImportTestHelper.cs | 2 +- .../Rest/Import/ImportTests.cs | 130 ++++++++++++++++-- .../Rest/InProcTestFhirServer.cs | 5 + 26 files changed, 258 insertions(+), 398 deletions(-) delete mode 100644 src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportFileEtagNotMatchException.cs rename src/Microsoft.Health.Fhir.Core/Features/Operations/Import/{ImportOrchestratorJobErrorResult.cs => ImportJobErrorResult.cs} (79%) delete mode 100644 src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportProcessingException.cs delete mode 100644 src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportProcessingJobErrorResult.cs diff --git a/src/Microsoft.Health.Fhir.Core.UnitTests/Features/Operations/Export/ExportProcessingJobTests.cs b/src/Microsoft.Health.Fhir.Core.UnitTests/Features/Operations/Export/ExportProcessingJobTests.cs index b856c4cf40..4daac4101a 100644 --- a/src/Microsoft.Health.Fhir.Core.UnitTests/Features/Operations/Export/ExportProcessingJobTests.cs +++ b/src/Microsoft.Health.Fhir.Core.UnitTests/Features/Operations/Export/ExportProcessingJobTests.cs @@ -56,7 +56,7 @@ public async Task GivenAnExportJob_WhenItIsCancelled_ThenAnExceptionIsThrown() var expectedResults = GenerateJobRecord(OperationStatus.Canceled); var processingJob = new ExportProcessingJob(new Func(MakeMockJob), new TestQueueClient(), new NullLogger()); - await Assert.ThrowsAsync(() => processingJob.ExecuteAsync(GenerateJobInfo(expectedResults), CancellationToken.None)); + await Assert.ThrowsAsync(() => processingJob.ExecuteAsync(GenerateJobInfo(expectedResults), CancellationToken.None)); } [Theory] @@ -67,7 +67,7 @@ public async Task GivenAnExportJob_WhenItFinishesInANonTerminalState_ThenAnExcep var expectedResults = GenerateJobRecord(status); var processingJob = new ExportProcessingJob(new Func(MakeMockJobThatReturnsImmediately), new TestQueueClient(), new NullLogger()); - await Assert.ThrowsAsync(() => processingJob.ExecuteAsync(GenerateJobInfo(expectedResults), CancellationToken.None)); + await Assert.ThrowsAsync(() => processingJob.ExecuteAsync(GenerateJobInfo(expectedResults), CancellationToken.None)); } [Fact] diff --git a/src/Microsoft.Health.Fhir.Core.UnitTests/Features/Operations/Import/GetImportRequestHandlerTests.cs b/src/Microsoft.Health.Fhir.Core.UnitTests/Features/Operations/Import/GetImportRequestHandlerTests.cs index c8dfbc2818..34ec51f998 100644 --- a/src/Microsoft.Health.Fhir.Core.UnitTests/Features/Operations/Import/GetImportRequestHandlerTests.cs +++ b/src/Microsoft.Health.Fhir.Core.UnitTests/Features/Operations/Import/GetImportRequestHandlerTests.cs @@ -85,7 +85,7 @@ public async Task GivenAFhirMediator_WhenGettingAnExistingBulkImportJobWithCompl [Fact] public async Task GivenAFhirMediator_WhenGettingAnCompletedImportJobWithFailure_ThenHttpResponseCodeShouldBeExpected() { - var orchestratorJobResult = new ImportOrchestratorJobErrorResult() + var orchestratorJobResult = new ImportJobErrorResult() { HttpStatusCode = HttpStatusCode.BadRequest, ErrorMessage = "error", diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/BulkDelete/BulkDeleteProcessingJob.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/BulkDelete/BulkDeleteProcessingJob.cs index 1361b87022..a92c531d70 100644 --- a/src/Microsoft.Health.Fhir.Core/Features/Operations/BulkDelete/BulkDeleteProcessingJob.cs +++ b/src/Microsoft.Health.Fhir.Core/Features/Operations/BulkDelete/BulkDeleteProcessingJob.cs @@ -104,9 +104,7 @@ public async Task ExecuteAsync(JobInfo jobInfo, CancellationToken cancel if (exception != null) { - var jobException = new JobExecutionException($"Exception encounted while deleting resources: {result.Issues.First()}", result, exception); - jobException.RequestCancellationOnFailure = true; - throw jobException; + throw new JobExecutionException($"Exception encounted while deleting resources: {result.Issues.First()}", result, exception); } if (types.Count > 1) diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/Export/ExportProcessingJob.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/Export/ExportProcessingJob.cs index 9eb4ad86e5..9e348eef40 100644 --- a/src/Microsoft.Health.Fhir.Core/Features/Operations/Export/ExportProcessingJob.cs +++ b/src/Microsoft.Health.Fhir.Core/Features/Operations/Export/ExportProcessingJob.cs @@ -64,19 +64,16 @@ public Task ExecuteAsync(JobInfo jobInfo, CancellationToken cancellation return JsonConvert.SerializeObject(record); case OperationStatus.Failed: - var exception = new JobExecutionException(record.FailureDetails.FailureReason, record); - exception.RequestCancellationOnFailure = true; - throw exception; + throw new JobExecutionException(record.FailureDetails.FailureReason, record); case OperationStatus.Canceled: - // This throws a RetriableJobException so the job handler doesn't change the job status. The job will not be retried as cancelled jobs are ignored. - throw new RetriableJobException($"[GroupId:{jobInfo.GroupId}/JobId:{jobInfo.Id}] Export job cancelled."); + throw new OperationCanceledException($"[GroupId:{jobInfo.GroupId}/JobId:{jobInfo.Id}] Export job cancelled."); case OperationStatus.Queued: case OperationStatus.Running: - throw new RetriableJobException($"[GroupId:{jobInfo.GroupId}/JobId:{jobInfo.Id}] Export job finished in non-terminal state. See logs from ExportJobTask."); + // If code works as designed, this exception shouldn't be reached + throw new JobExecutionException($"[GroupId:{jobInfo.GroupId}/JobId:{jobInfo.Id}] Export job finished in non-terminal state. See logs from ExportJobTask.", record); default: -#pragma warning disable CA2201 // Do not raise reserved exception types. This exception shouldn't be reached, but a switch statement needs a default condition. Nothing really fits here. - throw new Exception($"[GroupId:{jobInfo.GroupId}/JobId:{jobInfo.Id}] Job status not set."); -#pragma warning restore CA2201 // Do not raise reserved exception types + // If code works as designed, this exception shouldn't be reached + throw new JobExecutionException($"[GroupId:{jobInfo.GroupId}/JobId:{jobInfo.Id}] Job status not set."); } }, cancellationToken, diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/GetImportRequestHandler.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/GetImportRequestHandler.cs index 82c7bd2bff..b6341d78f4 100644 --- a/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/GetImportRequestHandler.cs +++ b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/GetImportRequestHandler.cs @@ -96,13 +96,10 @@ public async Task Handle(GetImportRequest request, Cancellati } else if (coordInfo.Status == JobStatus.Failed) { - ImportOrchestratorJobErrorResult errorResult = JsonConvert.DeserializeObject(coordInfo.Result); - - string failureReason = errorResult.ErrorMessage; - HttpStatusCode failureStatusCode = errorResult.HttpStatusCode; - - throw new OperationFailedException( - string.Format(Core.Resources.OperationFailed, OperationsConstants.Import, failureReason), failureStatusCode); + var errorResult = JsonConvert.DeserializeObject(coordInfo.Result); + //// do not show error message for InternalServerError + var failureReason = errorResult.HttpStatusCode == HttpStatusCode.InternalServerError ? HttpStatusCode.InternalServerError.ToString() : errorResult.ErrorMessage; + throw new OperationFailedException(string.Format(Core.Resources.OperationFailed, OperationsConstants.Import, failureReason), errorResult.HttpStatusCode); } else if (coordInfo.Status == JobStatus.Cancelled) { diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportErrorStore.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportErrorStore.cs index 48bec833c3..4a3133e341 100644 --- a/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportErrorStore.cs +++ b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportErrorStore.cs @@ -5,12 +5,14 @@ using System; using System.IO; +using System.Security.Cryptography; using System.Threading; using System.Threading.Tasks; using EnsureThat; using Microsoft.Extensions.Logging; using Microsoft.Health.JobManagement; using Microsoft.IO; +using Polly; namespace Microsoft.Health.Fhir.Core.Features.Operations.Import { @@ -20,6 +22,9 @@ public class ImportErrorStore : IImportErrorStore private Uri _fileUri; private RecyclableMemoryStreamManager _recyclableMemoryStreamManager; private ILogger _logger; + private static readonly AsyncPolicy _retries = Policy + .Handle() + .WaitAndRetryAsync(3, _ => TimeSpan.FromMilliseconds(RandomNumberGenerator.GetInt32(1000, 5000))); public ImportErrorStore(IIntegrationDataStoreClient integrationDataStoreClient, Uri fileUri, ILogger logger) { @@ -41,7 +46,6 @@ public ImportErrorStore(IIntegrationDataStoreClient integrationDataStoreClient, /// /// New import errors /// Cancellaltion Token - [System.Diagnostics.CodeAnalysis.SuppressMessage("Reliability", "CA2016:Forward the 'CancellationToken' parameter to methods", Justification = ".NET 6/8 compat")] public async Task UploadErrorsAsync(string[] importErrors, CancellationToken cancellationToken) { if (importErrors == null || importErrors.Length == 0) @@ -49,7 +53,7 @@ public async Task UploadErrorsAsync(string[] importErrors, CancellationToken can return; } - try + await _retries.ExecuteAsync(async () => { using var stream = new RecyclableMemoryStream(_recyclableMemoryStreamManager, tag: nameof(ImportErrorStore)); using StreamWriter writer = new StreamWriter(stream); @@ -64,13 +68,8 @@ public async Task UploadErrorsAsync(string[] importErrors, CancellationToken can string blockId = Convert.ToBase64String(Guid.NewGuid().ToByteArray()); await _integrationDataStoreClient.UploadBlockAsync(_fileUri, stream, blockId, cancellationToken); - await _integrationDataStoreClient.AppendCommitAsync(_fileUri, new string[] { blockId }, cancellationToken); - } - catch (Exception ex) - { - _logger.LogWarning(ex, "Failed to upload import error log."); - throw new RetriableJobException(ex.Message, ex); - } + await _integrationDataStoreClient.AppendCommitAsync(_fileUri, [blockId], cancellationToken); + }); } } } diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportFileEtagNotMatchException.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportFileEtagNotMatchException.cs deleted file mode 100644 index c278a4a0bb..0000000000 --- a/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportFileEtagNotMatchException.cs +++ /dev/null @@ -1,24 +0,0 @@ -// ------------------------------------------------------------------------------------------------- -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. -// ------------------------------------------------------------------------------------------------- - -using System; -using System.Diagnostics; - -namespace Microsoft.Health.Fhir.Core.Features.Operations.Import -{ - public class ImportFileEtagNotMatchException : Exception - { - public ImportFileEtagNotMatchException(string message) - : base(message, null) - { - } - - public ImportFileEtagNotMatchException(string message, Exception innerException) - : base(message, innerException) - { - Debug.Assert(!string.IsNullOrEmpty(message), "Exception message should not be empty."); - } - } -} diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportOrchestratorJobErrorResult.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportJobErrorResult.cs similarity index 79% rename from src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportOrchestratorJobErrorResult.cs rename to src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportJobErrorResult.cs index 34e5be28cd..f28b4f1ac2 100644 --- a/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportOrchestratorJobErrorResult.cs +++ b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportJobErrorResult.cs @@ -7,7 +7,7 @@ namespace Microsoft.Health.Fhir.Core.Features.Operations.Import { - public class ImportOrchestratorJobErrorResult + public class ImportJobErrorResult { /// /// Err http status code @@ -19,11 +19,6 @@ public class ImportOrchestratorJobErrorResult /// public string ErrorMessage { get; set; } - /// - /// Inner error if there're multiple errors - /// - public ImportOrchestratorJobErrorResult InnerError { get; set; } - /// /// Details /// diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportProcessingException.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportProcessingException.cs deleted file mode 100644 index 1862f9fbbe..0000000000 --- a/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportProcessingException.cs +++ /dev/null @@ -1,24 +0,0 @@ -// ------------------------------------------------------------------------------------------------- -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. -// ------------------------------------------------------------------------------------------------- - -using System; -using System.Diagnostics; - -namespace Microsoft.Health.Fhir.Core.Features.Operations.Import -{ - public class ImportProcessingException : Exception - { - public ImportProcessingException(string message) - : this(message, null) - { - } - - public ImportProcessingException(string message, Exception innerException) - : base(message, innerException) - { - Debug.Assert(!string.IsNullOrEmpty(message), "Exception message should not be empty."); - } - } -} diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportProcessingJobErrorResult.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportProcessingJobErrorResult.cs deleted file mode 100644 index 19d7345608..0000000000 --- a/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportProcessingJobErrorResult.cs +++ /dev/null @@ -1,14 +0,0 @@ -// ------------------------------------------------------------------------------------------------- -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. -// ------------------------------------------------------------------------------------------------- - -namespace Microsoft.Health.Fhir.Core.Features.Operations.Import -{ - public class ImportProcessingJobErrorResult - { - public string Message { get; set; } - - public string Details { get; set; } - } -} diff --git a/src/Microsoft.Health.Fhir.CosmosDb/Features/Storage/Queues/CosmosQueueClient.cs b/src/Microsoft.Health.Fhir.CosmosDb/Features/Storage/Queues/CosmosQueueClient.cs index 82ec650a76..ee361c8490 100644 --- a/src/Microsoft.Health.Fhir.CosmosDb/Features/Storage/Queues/CosmosQueueClient.cs +++ b/src/Microsoft.Health.Fhir.CosmosDb/Features/Storage/Queues/CosmosQueueClient.cs @@ -30,7 +30,7 @@ public class CosmosQueueClient : IQueueClient private readonly ICosmosQueryFactory _queryFactory; private readonly ICosmosDbDistributedLockFactory _distributedLockFactory; private static readonly AsyncPolicy _retryPolicy = Policy - .Handle() + .Handle(ex => ex.StatusCode == HttpStatusCode.PreconditionFailed) .Or(ex => ex.StatusCode == HttpStatusCode.TooManyRequests) .Or() .WaitAndRetryAsync(5, _ => TimeSpan.FromMilliseconds(RandomNumberGenerator.GetInt32(100, 1000))); @@ -558,14 +558,6 @@ await container.Value.UpsertItemAsync( ignoreEtag ? new() : new() { IfMatchEtag = definition.ETag }, cancellationToken: cancellationToken)); } - catch (CosmosException ex) when (ex.StatusCode == HttpStatusCode.PreconditionFailed) - { - throw new RetriableJobException("Job precondition failed.", ex); - } - catch (CosmosException ex) when (ex.StatusCode == HttpStatusCode.TooManyRequests) - { - throw new RetriableJobException("Service too busy.", ex); - } catch (CosmosException ex) when (ex.StatusCode == HttpStatusCode.RequestEntityTooLarge) { throw new JobExecutionException("Job data too large.", ex); diff --git a/src/Microsoft.Health.Fhir.Shared.Client/FhirClient.cs b/src/Microsoft.Health.Fhir.Shared.Client/FhirClient.cs index 32d5436a51..04455d4094 100644 --- a/src/Microsoft.Health.Fhir.Shared.Client/FhirClient.cs +++ b/src/Microsoft.Health.Fhir.Shared.Client/FhirClient.cs @@ -470,14 +470,17 @@ public async Task CancelImport(Uri contentLocation, Cancell return await HttpClient.SendAsync(message, cancellationToken); } - public async Task CheckImportAsync(Uri contentLocation, CancellationToken cancellationToken = default) + public async Task CheckImportAsync(Uri contentLocation, bool checkSuccessStatus = true, CancellationToken cancellationToken = default) { using var message = new HttpRequestMessage(HttpMethod.Get, contentLocation); message.Headers.Add("Prefer", "respond-async"); var response = await HttpClient.SendAsync(message, cancellationToken); - await EnsureSuccessStatusCodeAsync(response); + if (checkSuccessStatus) + { + await EnsureSuccessStatusCodeAsync(response); + } return response; } diff --git a/src/Microsoft.Health.Fhir.SqlServer.UnitTests/Features/Operations/Import/ImportOrchestratorJobTests.cs b/src/Microsoft.Health.Fhir.SqlServer.UnitTests/Features/Operations/Import/ImportOrchestratorJobTests.cs index d6b3e0b399..4da43f4307 100644 --- a/src/Microsoft.Health.Fhir.SqlServer.UnitTests/Features/Operations/Import/ImportOrchestratorJobTests.cs +++ b/src/Microsoft.Health.Fhir.SqlServer.UnitTests/Features/Operations/Import/ImportOrchestratorJobTests.cs @@ -62,13 +62,13 @@ public async Task GivenAnOrchestratorJob_WhenResumeFromFailureSomeJobStillRunnin } [Fact] - public async Task GivenAnOrchestratorJob_WhenSomeJobsCancelled_ThenOperationCanceledExceptionShouldBeThrowAndWaitForOtherSubJobsCompleted() + public async Task GivenAnOrchestratorJob_WhenSomeJobsCancelled_ThenOperationCanceledExceptionShouldBeThrownAndWaitForOtherSubJobsCompleted() { await VerifyJobStatusChangedAsync(100, JobStatus.Cancelled, 20, 20); } [Fact] - public async Task GivenAnOrchestratorJob_WhenSomeJobsFailed_ThenImportProcessingExceptionShouldBeThrowAndWaitForOtherSubJobsCompleted() + public async Task GivenAnOrchestratorJob_WhenSomeJobsFailed_ThenExceptionWithBadRequestShouldBeThrownAndWaitForOtherSubJobsCompleted() { await VerifyJobStatusChangedAsync(100, JobStatus.Failed, 14, 14); } @@ -76,7 +76,7 @@ public async Task GivenAnOrchestratorJob_WhenSomeJobsFailed_ThenImportProcessing [InlineData(ImportMode.InitialLoad)] [InlineData(ImportMode.IncrementalLoad)] [Theory] - public async Task GivenAnOrchestratorJobAndWrongEtag_WhenOrchestratorJobStart_ThenJobShouldFailedWithDetails(ImportMode importMode) + public async Task GivenAnOrchestratorJobAndWrongEtag_WhenOrchestratorJobStart_ThenJobShouldFailWithDetails(ImportMode importMode) { RequestContextAccessor contextAccessor = Substitute.For>(); ILoggerFactory loggerFactory = new NullLoggerFactory(); @@ -116,7 +116,7 @@ public async Task GivenAnOrchestratorJobAndWrongEtag_WhenOrchestratorJobStart_Th auditLogger); JobExecutionException jobExecutionException = await Assert.ThrowsAsync(async () => await orchestratorJob.ExecuteAsync(orchestratorJobInfo, CancellationToken.None)); - ImportOrchestratorJobErrorResult resultDetails = (ImportOrchestratorJobErrorResult)jobExecutionException.Error; + ImportJobErrorResult resultDetails = (ImportJobErrorResult)jobExecutionException.Error; Assert.Equal(HttpStatusCode.BadRequest, resultDetails.HttpStatusCode); Assert.NotEmpty(resultDetails.ErrorMessage); @@ -173,7 +173,7 @@ public async Task GivenAnOrchestratorJobAndWrongEtag_WhenOrchestratorJobStart_Th [InlineData(ImportMode.InitialLoad)] [InlineData(ImportMode.IncrementalLoad)] [Theory] - public async Task GivenAnOrchestratorJob_WhenIntegrationExceptionThrow_ThenJobShouldFailedWithDetails(ImportMode importMode) + public async Task GivenAnOrchestratorJob_WhenIntegrationExceptionThrown_ThenJobShouldFailWithDetails(ImportMode importMode) { RequestContextAccessor contextAccessor = Substitute.For>(); ILoggerFactory loggerFactory = new NullLoggerFactory(); @@ -209,7 +209,7 @@ public async Task GivenAnOrchestratorJob_WhenIntegrationExceptionThrow_ThenJobSh auditLogger); JobExecutionException jobExecutionException = await Assert.ThrowsAsync(async () => await orchestratorJob.ExecuteAsync(orchestratorJobInfo, CancellationToken.None)); - ImportOrchestratorJobErrorResult resultDetails = (ImportOrchestratorJobErrorResult)jobExecutionException.Error; + ImportJobErrorResult resultDetails = (ImportJobErrorResult)jobExecutionException.Error; Assert.Equal(HttpStatusCode.Unauthorized, resultDetails.HttpStatusCode); Assert.NotEmpty(resultDetails.ErrorMessage); @@ -266,66 +266,7 @@ public async Task GivenAnOrchestratorJob_WhenIntegrationExceptionThrow_ThenJobSh [InlineData(ImportMode.InitialLoad)] [InlineData(ImportMode.IncrementalLoad)] [Theory] - public async Task GivenAnOrchestratorJob_WhenRetriableExceptionThrow_ThenJobExecutionShuldFailedWithRetriableException(ImportMode importMode) - { - RequestContextAccessor contextAccessor = Substitute.For>(); - ILoggerFactory loggerFactory = new NullLoggerFactory(); - IIntegrationDataStoreClient integrationDataStoreClient = Substitute.For(); - IMediator mediator = Substitute.For(); - ImportOrchestratorJobDefinition importOrchestratorInputData = new ImportOrchestratorJobDefinition(); - List<(long begin, long end)> surrogatedIdRanges = new List<(long begin, long end)>(); - IAuditLogger auditLogger = Substitute.For(); - - importOrchestratorInputData.BaseUri = new Uri("http://dummy"); - var inputs = new[] { new InputResource() { Type = "Resource", Url = new Uri($"http://dummy") } }; - importOrchestratorInputData.Input = inputs; - importOrchestratorInputData.InputFormat = "ndjson"; - importOrchestratorInputData.InputSource = new Uri("http://dummy"); - importOrchestratorInputData.RequestUri = new Uri("http://dummy"); - importOrchestratorInputData.ImportMode = importMode; - - integrationDataStoreClient.GetPropertiesAsync(Arg.Any(), Arg.Any()) - .Returns(callInfo => - { - throw new RetriableJobException("test"); - #pragma warning disable CS0162 // Unreachable code detected - return new Dictionary(); - }); - - TestQueueClient testQueueClient = new TestQueueClient(); - JobInfo orchestratorJobInfo = (await testQueueClient.EnqueueAsync(0, new string[] { JsonConvert.SerializeObject(importOrchestratorInputData) }, 1, false, false, CancellationToken.None)).First(); - - ImportOrchestratorJob orchestratorJob = new ImportOrchestratorJob( - mediator, - contextAccessor, - integrationDataStoreClient, - testQueueClient, - Options.Create(new ImportTaskConfiguration()), - loggerFactory, - auditLogger); - orchestratorJob.PollingPeriodSec = 0; - - await Assert.ThrowsAnyAsync(() => orchestratorJob.ExecuteAsync(orchestratorJobInfo, CancellationToken.None)); - - _ = mediator.DidNotReceive().Publish( - Arg.Any(), - Arg.Any()); - - auditLogger.DidNotReceiveWithAnyArgs().LogAudit( - auditAction: default, - operation: default, - resourceType: default, - requestUri: default, - statusCode: default, - correlationId: default, - callerIpAddress: default, - callerClaims: default); - } - - [InlineData(ImportMode.InitialLoad)] - [InlineData(ImportMode.IncrementalLoad)] - [Theory] - public async Task GivenAnOrchestratorJob_WhenLastSubJobFailed_ThenImportProcessingExceptionShouldBeThrowAndWaitForOtherSubJobsCancelledAndCompleted(ImportMode importMode) + public async Task GivenAnOrchestratorJob_WhenLastSubJobFailed_ThenExceptionWithBadRequestShouldThrownAndWaitForOtherSubJobsCancelledAndCompleted(ImportMode importMode) { RequestContextAccessor contextAccessor = Substitute.For>(); ILoggerFactory loggerFactory = new NullLoggerFactory(); @@ -343,7 +284,7 @@ public async Task GivenAnOrchestratorJob_WhenLastSubJobFailed_ThenImportProcessi if (jobInfo.Id == 3) { jobInfo.Status = JobStatus.Failed; - jobInfo.Result = JsonConvert.SerializeObject(new ImportProcessingJobErrorResult() { Message = "Job Failed" }); + jobInfo.Result = JsonConvert.SerializeObject(new ImportJobErrorResult() { ErrorMessage = "Job Failed", HttpStatusCode = HttpStatusCode.BadRequest }); } return jobInfo; @@ -397,7 +338,7 @@ public async Task GivenAnOrchestratorJob_WhenLastSubJobFailed_ThenImportProcessi orchestratorJob.PollingPeriodSec = 0; var jobExecutionException = await Assert.ThrowsAnyAsync(() => orchestratorJob.ExecuteAsync(orchestratorJobInfo, CancellationToken.None)); - ImportOrchestratorJobErrorResult resultDetails = (ImportOrchestratorJobErrorResult)jobExecutionException.Error; + ImportJobErrorResult resultDetails = (ImportJobErrorResult)jobExecutionException.Error; Assert.Equal(HttpStatusCode.BadRequest, resultDetails.HttpStatusCode); Assert.Equal(1, testQueueClient.JobInfos.Count(t => t.Status == JobStatus.Failed)); Assert.Equal(2, testQueueClient.JobInfos.Count(t => t.Status == JobStatus.Cancelled)); @@ -446,7 +387,7 @@ public async Task GivenAnOrchestratorJob_WhenLastSubJobFailed_ThenImportProcessi [InlineData(ImportMode.InitialLoad)] [InlineData(ImportMode.IncrementalLoad)] [Theory] - public async Task GivenAnOrchestratorJob_WhenSubJobFailedAndOthersRunning_ThenImportProcessingExceptionShouldBeThrowAndContextUpdated(ImportMode importMode) + public async Task GivenAnOrchestratorJob_WhenSubJobFailedAndOthersRunning_ThenExceptionWithBadRequestShouldThrownAndContextUpdated(ImportMode importMode) { RequestContextAccessor contextAccessor = Substitute.For>(); ILoggerFactory loggerFactory = new NullLoggerFactory(); @@ -464,7 +405,7 @@ public async Task GivenAnOrchestratorJob_WhenSubJobFailedAndOthersRunning_ThenIm { Id = id, Status = JobManagement.JobStatus.Failed, - Result = JsonConvert.SerializeObject(new ImportProcessingJobErrorResult() { Message = "error" }), + Result = JsonConvert.SerializeObject(new ImportJobErrorResult() { ErrorMessage = "error", HttpStatusCode = HttpStatusCode.BadRequest }), }; } @@ -530,7 +471,7 @@ public async Task GivenAnOrchestratorJob_WhenSubJobFailedAndOthersRunning_ThenIm orchestratorJob.PollingPeriodSec = 0; var jobExecutionException = await Assert.ThrowsAnyAsync(() => orchestratorJob.ExecuteAsync(orchestratorJobInfo, CancellationToken.None)); - ImportOrchestratorJobErrorResult resultDetails = (ImportOrchestratorJobErrorResult)jobExecutionException.Error; + ImportJobErrorResult resultDetails = (ImportJobErrorResult)jobExecutionException.Error; Assert.Equal(HttpStatusCode.BadRequest, resultDetails.HttpStatusCode); @@ -580,7 +521,7 @@ public async Task GivenAnOrchestratorJob_WhenSubJobFailedAndOthersRunning_ThenIm [InlineData(ImportMode.InitialLoad)] [InlineData(ImportMode.IncrementalLoad)] [Theory] - public async Task GivenAnOrchestratorJob_WhneSubJobCancelledAfterThreeCalls_ThenOperationCanceledExceptionShouldBeThrowAndContextUpdate(ImportMode importMode) + public async Task GivenAnOrchestratorJob_WhneSubJobCancelledAfterThreeCalls_ThenOperationCanceledExceptionShouldBeThrownAndContextUpdate(ImportMode importMode) { RequestContextAccessor contextAccessor = Substitute.For>(); ILoggerFactory loggerFactory = new NullLoggerFactory(); @@ -596,7 +537,7 @@ public async Task GivenAnOrchestratorJob_WhneSubJobCancelledAfterThreeCalls_Then if (++callTime > 3) { jobInfo.Status = JobStatus.Cancelled; - jobInfo.Result = JsonConvert.SerializeObject(new ImportProcessingJobErrorResult() { Message = "Job Cancelled" }); + jobInfo.Result = JsonConvert.SerializeObject(new ImportJobErrorResult() { ErrorMessage = "Job Cancelled" }); } return jobInfo; @@ -633,7 +574,7 @@ public async Task GivenAnOrchestratorJob_WhneSubJobCancelledAfterThreeCalls_Then orchestratorJob.PollingPeriodSec = 0; var jobExecutionException = await Assert.ThrowsAnyAsync(() => orchestratorJob.ExecuteAsync(orchestratorJobInfo, CancellationToken.None)); - ImportOrchestratorJobErrorResult resultDetails = (ImportOrchestratorJobErrorResult)jobExecutionException.Error; + ImportJobErrorResult resultDetails = (ImportJobErrorResult)jobExecutionException.Error; Assert.Equal(HttpStatusCode.BadRequest, resultDetails.HttpStatusCode); @@ -683,7 +624,7 @@ public async Task GivenAnOrchestratorJob_WhneSubJobCancelledAfterThreeCalls_Then [InlineData(ImportMode.InitialLoad)] [InlineData(ImportMode.IncrementalLoad)] [Theory] - public async Task GivenAnOrchestratorJob_WhenSubJobFailedAfterThreeCalls_ThenImportProcessingExceptionShouldBeThrowAndContextUpdated(ImportMode importMode) + public async Task GivenAnOrchestratorJob_WhenSubJobFailedAfterThreeCalls_ThenExceptionWithBadRequestShouldThrownAndContextUpdated(ImportMode importMode) { RequestContextAccessor contextAccessor = Substitute.For>(); ILoggerFactory loggerFactory = new NullLoggerFactory(); @@ -699,7 +640,7 @@ public async Task GivenAnOrchestratorJob_WhenSubJobFailedAfterThreeCalls_ThenImp if (++callTime > 3) { jobInfo.Status = JobStatus.Failed; - jobInfo.Result = JsonConvert.SerializeObject(new ImportProcessingJobErrorResult() { Message = "error" }); + jobInfo.Result = JsonConvert.SerializeObject(new ImportJobErrorResult() { ErrorMessage = "error", HttpStatusCode = HttpStatusCode.BadRequest }); } return jobInfo; @@ -736,7 +677,7 @@ public async Task GivenAnOrchestratorJob_WhenSubJobFailedAfterThreeCalls_ThenImp orchestratorJob.PollingPeriodSec = 0; var jobExecutionException = await Assert.ThrowsAnyAsync(() => orchestratorJob.ExecuteAsync(orchestratorJobInfo, CancellationToken.None)); - ImportOrchestratorJobErrorResult resultDetails = (ImportOrchestratorJobErrorResult)jobExecutionException.Error; + ImportJobErrorResult resultDetails = (ImportJobErrorResult)jobExecutionException.Error; Assert.Equal(HttpStatusCode.BadRequest, resultDetails.HttpStatusCode); Assert.Equal("error", resultDetails.ErrorMessage); @@ -787,7 +728,7 @@ public async Task GivenAnOrchestratorJob_WhenSubJobFailedAfterThreeCalls_ThenImp [InlineData(ImportMode.InitialLoad)] [InlineData(ImportMode.IncrementalLoad)] [Theory] - public async Task GivenAnOrchestratorJob_WhenSubJobCancelled_ThenOperationCancelledExceptionShouldBeThrowAndContextUpdated(ImportMode importMode) + public async Task GivenAnOrchestratorJob_WhenSubJobCancelled_ThenOperationCancelledExceptionShouldBeThrownAndContextUpdated(ImportMode importMode) { RequestContextAccessor contextAccessor = Substitute.For>(); ILoggerFactory loggerFactory = new NullLoggerFactory(); @@ -801,7 +742,7 @@ public async Task GivenAnOrchestratorJob_WhenSubJobCancelled_ThenOperationCancel JobInfo jobInfo = new JobInfo() { Status = JobManagement.JobStatus.Cancelled, - Result = JsonConvert.SerializeObject(new ImportProcessingJobErrorResult() { Message = "error" }), + Result = JsonConvert.SerializeObject(new ImportJobErrorResult() { ErrorMessage = "error" }), }; return jobInfo; @@ -840,7 +781,7 @@ public async Task GivenAnOrchestratorJob_WhenSubJobCancelled_ThenOperationCancel orchestratorJob.PollingPeriodSec = 0; var jobExecutionException = await Assert.ThrowsAnyAsync(() => orchestratorJob.ExecuteAsync(orchestratorJobInfo, CancellationToken.None)); - ImportOrchestratorJobErrorResult resultDetails = (ImportOrchestratorJobErrorResult)jobExecutionException.Error; + ImportJobErrorResult resultDetails = (ImportJobErrorResult)jobExecutionException.Error; Assert.Equal(HttpStatusCode.BadRequest, resultDetails.HttpStatusCode); @@ -890,7 +831,7 @@ public async Task GivenAnOrchestratorJob_WhenSubJobCancelled_ThenOperationCancel [InlineData(ImportMode.InitialLoad)] [InlineData(ImportMode.IncrementalLoad)] [Theory] - public async Task GivenAnOrchestratorJob_WhenSubJobFailed_ThenImportProcessingExceptionShouldBeThrowAndContextUpdated(ImportMode importMode) + public async Task GivenAnOrchestratorJob_WhenSubJobFailed_ThenExceptionWithBadRequestShouldThrownAndContextUpdated(ImportMode importMode) { RequestContextAccessor contextAccessor = Substitute.For>(); ILoggerFactory loggerFactory = new NullLoggerFactory(); @@ -904,7 +845,7 @@ public async Task GivenAnOrchestratorJob_WhenSubJobFailed_ThenImportProcessingEx JobInfo jobInfo = new JobInfo() { Status = JobManagement.JobStatus.Failed, - Result = JsonConvert.SerializeObject(new ImportProcessingJobErrorResult() { Message = "error" }), + Result = JsonConvert.SerializeObject(new ImportJobErrorResult() { ErrorMessage = "error", HttpStatusCode = HttpStatusCode.BadRequest }), }; return jobInfo; @@ -943,7 +884,7 @@ public async Task GivenAnOrchestratorJob_WhenSubJobFailed_ThenImportProcessingEx orchestratorJob.PollingPeriodSec = 0; var jobExecutionException = await Assert.ThrowsAnyAsync(() => orchestratorJob.ExecuteAsync(orchestratorJobInfo, CancellationToken.None)); - ImportOrchestratorJobErrorResult resultDetails = (ImportOrchestratorJobErrorResult)jobExecutionException.Error; + ImportJobErrorResult resultDetails = (ImportJobErrorResult)jobExecutionException.Error; Assert.Equal(HttpStatusCode.BadRequest, resultDetails.HttpStatusCode); Assert.Equal("error", resultDetails.ErrorMessage); @@ -1079,7 +1020,7 @@ private static async Task VerifyJobStatusChangedAsync(int inputFileCount, JobSta return null; } - if (jobInfo.Status == JobManagement.JobStatus.Completed) + if (jobInfo.Status == JobStatus.Completed) { return jobInfo; } @@ -1090,7 +1031,7 @@ private static async Task VerifyJobStatusChangedAsync(int inputFileCount, JobSta { Id = jobInfo.Id, Status = jobStatus, - Result = JsonConvert.SerializeObject(new ImportProcessingJobErrorResult() { Message = "error" }), + Result = JsonConvert.SerializeObject(new ImportJobErrorResult() { ErrorMessage = "error", HttpStatusCode = HttpStatusCode.BadRequest }), }; } @@ -1101,7 +1042,7 @@ private static async Task VerifyJobStatusChangedAsync(int inputFileCount, JobSta processingResult.ErrorLogLocation = "http://dummy/error"; jobInfo.Result = JsonConvert.SerializeObject(processingResult); - jobInfo.Status = JobManagement.JobStatus.Completed; + jobInfo.Status = JobStatus.Completed; return jobInfo; }; @@ -1151,7 +1092,7 @@ private static async Task VerifyJobStatusChangedAsync(int inputFileCount, JobSta importOrchestratorJobInputData.InputFormat = "ndjson"; importOrchestratorJobInputData.InputSource = new Uri("http://dummy"); importOrchestratorJobInputData.RequestUri = new Uri("http://dummy"); - JobInfo orchestratorJobInfo = (await testQueueClient.EnqueueAsync(0, new string[] { JsonConvert.SerializeObject(importOrchestratorJobInputData) }, 1, false, false, CancellationToken.None)).First(); + JobInfo orchestratorJobInfo = (await testQueueClient.EnqueueAsync(0, [JsonConvert.SerializeObject(importOrchestratorJobInputData)], 1, false, false, CancellationToken.None)).First(); integrationDataStoreClient.GetPropertiesAsync(Arg.Any(), Arg.Any()) .Returns(callInfo => @@ -1172,7 +1113,7 @@ private static async Task VerifyJobStatusChangedAsync(int inputFileCount, JobSta auditLogger); orchestratorJob.PollingPeriodSec = 0; var jobExecutionException = await Assert.ThrowsAnyAsync(() => orchestratorJob.ExecuteAsync(orchestratorJobInfo, CancellationToken.None)); - ImportOrchestratorJobErrorResult resultDetails = (ImportOrchestratorJobErrorResult)jobExecutionException.Error; + ImportJobErrorResult resultDetails = (ImportJobErrorResult)jobExecutionException.Error; Assert.Equal(HttpStatusCode.BadRequest, resultDetails.HttpStatusCode); _ = mediator.Received().Publish( diff --git a/src/Microsoft.Health.Fhir.SqlServer.UnitTests/Features/Operations/Import/ImportProcessingJobTests.cs b/src/Microsoft.Health.Fhir.SqlServer.UnitTests/Features/Operations/Import/ImportProcessingJobTests.cs index 38c1c0f0be..8c410bd7b9 100644 --- a/src/Microsoft.Health.Fhir.SqlServer.UnitTests/Features/Operations/Import/ImportProcessingJobTests.cs +++ b/src/Microsoft.Health.Fhir.SqlServer.UnitTests/Features/Operations/Import/ImportProcessingJobTests.cs @@ -94,7 +94,7 @@ public async Task GivenImportInput_WhenStartFromClean_ThenAllResoruceShouldBeImp } [Fact] - public async Task GivenImportInput_WhenExceptionThrowForLoad_ThenRetriableExceptionShouldBeThrow() + public async Task GivenImportInput_WhenExceptionThrowForLoad_ThenJobExecutionExceptionShouldBeThrown() { ImportProcessingJobDefinition inputData = GetInputData(); ImportProcessingJobResult result = new ImportProcessingJobResult(); @@ -140,7 +140,7 @@ public async Task GivenImportInput_WhenExceptionThrowForLoad_ThenRetriableExcept contextAccessor, loggerFactory); - await Assert.ThrowsAsync(() => job.ExecuteAsync(GetJobInfo(inputData, result), CancellationToken.None)); + await Assert.ThrowsAsync(() => job.ExecuteAsync(GetJobInfo(inputData, result), CancellationToken.None)); } [Fact] diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/ImportOrchestratorJob.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/ImportOrchestratorJob.cs index 5b69861548..8a61fa0d0c 100644 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/ImportOrchestratorJob.cs +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/ImportOrchestratorJob.cs @@ -31,6 +31,7 @@ using Microsoft.Health.Fhir.Core.Features.Operations.Import.Models; using Microsoft.Health.JobManagement; using Newtonsoft.Json; +using Polly; using JobStatus = Microsoft.Health.JobManagement.JobStatus; namespace Microsoft.Health.Fhir.SqlServer.Features.Operations.Import @@ -48,6 +49,9 @@ public class ImportOrchestratorJob : IJob private IIntegrationDataStoreClient _integrationDataStoreClient; private readonly IAuditLogger _auditLogger; internal const string DefaultCallerAgent = "Microsoft.Health.Fhir.Server"; + private static readonly AsyncPolicy _timeoutRetries = Policy + .Handle(ex => ex.IsExecutionTimeout()) + .WaitAndRetryAsync(3, _ => TimeSpan.FromMilliseconds(RandomNumberGenerator.GetInt32(1000, 5000))); public ImportOrchestratorJob( IMediator mediator, @@ -99,7 +103,7 @@ public async Task ExecuteAsync(JobInfo jobInfo, CancellationToken cancel currentResult.Request = inputData.RequestUri.ToString(); - ImportOrchestratorJobErrorResult errorResult = null; + ImportJobErrorResult errorResult = null; try { @@ -114,88 +118,51 @@ public async Task ExecuteAsync(JobInfo jobInfo, CancellationToken cancel catch (TaskCanceledException taskCanceledEx) { _logger.LogJobInformation(taskCanceledEx, jobInfo, "Import job canceled. {Message}", taskCanceledEx.Message); - - errorResult = new ImportOrchestratorJobErrorResult() + errorResult = new ImportJobErrorResult() { HttpStatusCode = HttpStatusCode.BadRequest, ErrorMessage = taskCanceledEx.Message, }; - - // Processing jobs has been cancelled by CancelImportRequestHandler await WaitCancelledJobCompletedAsync(jobInfo); await SendImportMetricsNotification(JobStatus.Cancelled, jobInfo, currentResult, inputData.ImportMode, fhirRequestContext); } catch (OperationCanceledException canceledEx) { _logger.LogJobInformation(canceledEx, jobInfo, "Import job canceled. {Message}", canceledEx.Message); - - errorResult = new ImportOrchestratorJobErrorResult() + errorResult = new ImportJobErrorResult() { HttpStatusCode = HttpStatusCode.BadRequest, ErrorMessage = canceledEx.Message, }; - - // Processing jobs has been cancelled by CancelImportRequestHandler await WaitCancelledJobCompletedAsync(jobInfo); await SendImportMetricsNotification(JobStatus.Cancelled, jobInfo, currentResult, inputData.ImportMode, fhirRequestContext); } catch (IntegrationDataStoreException integrationDataStoreEx) { _logger.LogJobInformation(integrationDataStoreEx, jobInfo, "Failed to access input files."); - - errorResult = new ImportOrchestratorJobErrorResult() + errorResult = new ImportJobErrorResult() { HttpStatusCode = integrationDataStoreEx.StatusCode, ErrorMessage = integrationDataStoreEx.Message, }; - await SendImportMetricsNotification(JobStatus.Failed, jobInfo, currentResult, inputData.ImportMode, fhirRequestContext); } - catch (ImportFileEtagNotMatchException eTagEx) + catch (JobExecutionException ex) { - _logger.LogJobInformation(eTagEx, jobInfo, "Import file etag not match."); - - errorResult = new ImportOrchestratorJobErrorResult() + _logger.LogJobInformation(ex, jobInfo, "Failed to process input resources."); + errorResult = ex.Error != null ? (ImportJobErrorResult)ex.Error : new ImportJobErrorResult() { ErrorMessage = ex.Message, ErrorDetails = ex.ToString() }; + if (errorResult.HttpStatusCode == 0) { - HttpStatusCode = HttpStatusCode.BadRequest, - ErrorMessage = eTagEx.Message, - }; - - await SendImportMetricsNotification(JobStatus.Failed, jobInfo, currentResult, inputData.ImportMode, fhirRequestContext); - } - catch (ImportProcessingException processingEx) - { - _logger.LogJobInformation(processingEx, jobInfo, "Failed to process input resources."); - - errorResult = new ImportOrchestratorJobErrorResult() - { - HttpStatusCode = HttpStatusCode.BadRequest, - ErrorMessage = processingEx.Message, - ErrorDetails = processingEx.ToString(), - }; + errorResult.HttpStatusCode = HttpStatusCode.InternalServerError; + } - // Cancel other processing jobs await CancelProcessingJobsAsync(jobInfo); await SendImportMetricsNotification(JobStatus.Failed, jobInfo, currentResult, inputData.ImportMode, fhirRequestContext); } - catch (RetriableJobException ex) - { - _logger.LogJobInformation(ex, jobInfo, "Failed with RetriableJobException."); - - throw; - } catch (Exception ex) { _logger.LogJobInformation(ex, jobInfo, "Failed to import data."); - - errorResult = new ImportOrchestratorJobErrorResult() - { - HttpStatusCode = HttpStatusCode.InternalServerError, - ErrorMessage = ex.Message, - ErrorDetails = ex.ToString(), - }; - - // Cancel processing jobs for critical error in orchestrator job + errorResult = new ImportJobErrorResult() { ErrorMessage = ex.Message, ErrorDetails = ex.ToString(), HttpStatusCode = HttpStatusCode.InternalServerError }; await CancelProcessingJobsAsync(jobInfo); await SendImportMetricsNotification(JobStatus.Failed, jobInfo, currentResult, inputData.ImportMode, fhirRequestContext); } @@ -218,7 +185,9 @@ private async Task ValidateResourcesAsync(ImportOrchestratorJobDefinition inputD { if (!input.Etag.Equals(properties[IntegrationDataStoreClientConstants.BlobPropertyETag])) { - throw new ImportFileEtagNotMatchException(string.Format("Input file Etag not match. {0}", input.Url)); + var errorMessage = string.Format("Input file Etag not match. {0}", input.Url); + var errorResult = new ImportJobErrorResult { ErrorMessage = errorMessage, HttpStatusCode = HttpStatusCode.BadRequest }; + throw new JobExecutionException(errorMessage, errorResult); } } }); @@ -320,13 +289,13 @@ private async Task WaitCompletion(JobInfo orchestratorInfo, IList jobIds, try { var start = Stopwatch.StartNew(); - jobInfos.AddRange(await _queueClient.GetJobsByIdsAsync(QueueType.Import, jobIdsToCheck.ToArray(), false, cancellationToken)); + jobInfos.AddRange(await _timeoutRetries.ExecuteAsync(async () => await _queueClient.GetJobsByIdsAsync(QueueType.Import, jobIdsToCheck.ToArray(), false, cancellationToken))); duration = start.Elapsed.TotalSeconds; } - catch (Exception ex) + catch (SqlException ex) { _logger.LogJobError(ex, orchestratorInfo, "Failed to get running jobs."); - throw new RetriableJobException(ex.Message, ex); + throw new JobExecutionException(ex.Message, ex); } foreach (var jobInfo in jobInfos) @@ -342,9 +311,9 @@ private async Task WaitCompletion(JobInfo orchestratorInfo, IList jobIds, } else if (jobInfo.Status == JobStatus.Failed) { - var procesingJobResult = jobInfo.DeserializeResult(); - _logger.LogJobError(jobInfo, "Job is set to 'Failed'. Message: {Message}.", procesingJobResult.Message); - throw new ImportProcessingException(procesingJobResult.Message); + var procesingJobResult = jobInfo.DeserializeResult(); + _logger.LogJobError(jobInfo, "Job is set to 'Failed'. Message: {Message}.", procesingJobResult.ErrorMessage); + throw new JobExecutionException(procesingJobResult.ErrorMessage, procesingJobResult); } else if (jobInfo.Status == JobStatus.Cancelled) { @@ -405,19 +374,20 @@ private async Task> EnqueueProcessingJobsAsync(IEnumerable x.Id).OrderBy(x => x).ToList(); + var jobIds = await _timeoutRetries.ExecuteAsync(async () => (await _queueClient.EnqueueAsync(QueueType.Import, cancellationToken, groupId: groupId, definitions: definitions.ToArray())).Select(x => x.Id).OrderBy(x => x).ToList()); return jobIds; } catch (SqlException ex) when (ex.Number == 2627) { const string message = "Duplicate file detected in list of files to import."; _logger.LogJobError(ex, orchestratorInfo, message); - throw new JobExecutionException(message, ex); + var error = new ImportJobErrorResult() { ErrorMessage = ex.Message, ErrorDetails = ex.ToString(), HttpStatusCode = HttpStatusCode.BadRequest }; + throw new JobExecutionException(message, error, ex); } catch (Exception ex) { - _logger.LogJobError(ex, orchestratorInfo, "Failed to enqueue job."); - throw new RetriableJobException(ex.Message, ex); + _logger.LogJobError(ex, orchestratorInfo, "Failed to enqueue jobs."); + throw new JobExecutionException("Failed to enqueue jobs.", ex); } } @@ -444,18 +414,16 @@ private async Task WaitCancelledJobCompletedAsync(JobInfo jobInfo) try { _logger.LogJobInformation(jobInfo, nameof(WaitCancelledJobCompletedAsync)); - - IEnumerable jobInfos = await _queueClient.GetJobByGroupIdAsync(QueueType.Import, jobInfo.GroupId, false, CancellationToken.None); - + var jobInfos = await _timeoutRetries.ExecuteAsync(async () => await _queueClient.GetJobByGroupIdAsync(QueueType.Import, jobInfo.GroupId, false, CancellationToken.None)); if (jobInfos.All(t => (t.Status != JobStatus.Created && t.Status != JobStatus.Running) || !t.CancelRequested || t.Id == jobInfo.Id)) { break; } } - catch (Exception ex) + catch (SqlException ex) { _logger.LogJobWarning(ex, jobInfo, "Failed to get jobs by groupId {GroupId}.", jobInfo.GroupId); - throw new RetriableJobException(ex.Message, ex); + throw new JobExecutionException(ex.Message, ex); } await Task.Delay(TimeSpan.FromSeconds(5)); diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/ImportProcessingJob.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/ImportProcessingJob.cs index 86e96e11d0..f6756faed2 100644 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/ImportProcessingJob.cs +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/ImportProcessingJob.cs @@ -79,22 +79,14 @@ public async Task ExecuteAsync(JobInfo jobInfo, CancellationToken cancel (Channel importResourceChannel, Task loadTask) = _importResourceLoader.LoadResources(definition.ResourceLocation, definition.Offset, definition.BytesToRead, definition.ResourceType, definition.ImportMode, cancellationToken); // Import to data store - try - { - var importProgress = await _importer.Import(importResourceChannel, importErrorStore, definition.ImportMode, cancellationToken); + var importProgress = await _importer.Import(importResourceChannel, importErrorStore, definition.ImportMode, cancellationToken); - currentResult.SucceededResources = importProgress.SucceededResources; - currentResult.FailedResources = importProgress.FailedResources; - currentResult.ErrorLogLocation = importErrorStore.ErrorFileLocation; - currentResult.ProcessedBytes = importProgress.ProcessedBytes; + currentResult.SucceededResources = importProgress.SucceededResources; + currentResult.FailedResources = importProgress.FailedResources; + currentResult.ErrorLogLocation = importErrorStore.ErrorFileLocation; + currentResult.ProcessedBytes = importProgress.ProcessedBytes; - _logger.LogJobInformation(jobInfo, "Import Job {JobId} progress: succeed {SucceedCount}, failed: {FailedCount}", jobInfo.Id, currentResult.SucceededResources, currentResult.FailedResources); - } - catch (Exception ex) - { - _logger.LogJobError(ex, jobInfo, "Failed to import data."); - throw; - } + _logger.LogJobInformation(jobInfo, "Import Job {JobId} progress: succeed {SucceedCount}, failed: {FailedCount}", jobInfo.Id, currentResult.SucceededResources, currentResult.FailedResources); try { @@ -113,19 +105,26 @@ public async Task ExecuteAsync(JobInfo jobInfo, CancellationToken cancel catch (RequestFailedException ex) when (ex.Status == (int)HttpStatusCode.Forbidden || ex.Status == (int)HttpStatusCode.Unauthorized) { _logger.LogJobInformation(ex, jobInfo, "Due to unauthorized request, import processing operation failed."); - var error = new ImportProcessingJobErrorResult() { Message = "Due to unauthorized request, import processing operation failed." }; + var error = new ImportJobErrorResult() { ErrorMessage = "Due to unauthorized request, import processing operation failed.", HttpStatusCode = HttpStatusCode.BadRequest }; throw new JobExecutionException(ex.Message, error, ex); } catch (RequestFailedException ex) when (ex.Status == (int)HttpStatusCode.NotFound) { _logger.LogJobInformation(ex, jobInfo, "Input file deleted, renamed, or moved during job. Import processing operation failed."); - var error = new ImportProcessingJobErrorResult() { Message = "Input file deleted, renamed, or moved during job. Import processing operation failed." }; + var error = new ImportJobErrorResult() { ErrorMessage = "Input file deleted, renamed, or moved during job. Import processing operation failed.", HttpStatusCode = HttpStatusCode.BadRequest }; + throw new JobExecutionException(ex.Message, error, ex); + } + catch (IntegrationDataStoreException ex) + { + _logger.LogJobInformation(ex, jobInfo, "Failed to access input files."); + var error = new ImportJobErrorResult() { ErrorMessage = ex.Message, HttpStatusCode = ex.StatusCode }; throw new JobExecutionException(ex.Message, error, ex); } catch (Exception ex) { - _logger.LogJobError(ex, jobInfo, "RetriableJobException. Generic exception. Failed to load data."); - throw new RetriableJobException("Failed to load data", ex); + _logger.LogJobError(ex, jobInfo, "Generic exception. Failed to load data."); + var error = new ImportJobErrorResult() { ErrorMessage = "Generic exception. Failed to load data." }; + throw new JobExecutionException(ex.Message, error, ex); } jobInfo.Data = currentResult.SucceededResources + currentResult.FailedResources; @@ -134,24 +133,19 @@ public async Task ExecuteAsync(JobInfo jobInfo, CancellationToken cancel catch (TaskCanceledException canceledEx) { _logger.LogJobInformation(canceledEx, jobInfo, CancelledErrorMessage); - var error = new ImportProcessingJobErrorResult() { Message = CancelledErrorMessage }; + var error = new ImportJobErrorResult() { ErrorMessage = CancelledErrorMessage }; throw new JobExecutionException(canceledEx.Message, error, canceledEx); } catch (OperationCanceledException canceledEx) { _logger.LogJobInformation(canceledEx, jobInfo, "Import processing operation is canceled."); - var error = new ImportProcessingJobErrorResult() { Message = CancelledErrorMessage }; + var error = new ImportJobErrorResult() { ErrorMessage = CancelledErrorMessage }; throw new JobExecutionException(canceledEx.Message, error, canceledEx); } - catch (RetriableJobException retriableEx) - { - _logger.LogJobInformation(retriableEx, jobInfo, "Error in import processing job."); - throw; - } catch (Exception ex) { _logger.LogJobInformation(ex, jobInfo, "Critical error in import processing job."); - var error = new ImportProcessingJobErrorResult() { Message = ex.Message, Details = ex.ToString() }; + var error = new ImportJobErrorResult() { ErrorMessage = ex.Message, ErrorDetails = ex.ToString() }; throw new JobExecutionException(ex.Message, error, ex); } } diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/SqlQueueClient.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/SqlQueueClient.cs index 7754aca653..7983c105a7 100644 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/SqlQueueClient.cs +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/SqlQueueClient.cs @@ -41,44 +41,18 @@ public SqlQueueClient(ISqlRetryService sqlRetryService, ILogger public async Task CancelJobByGroupIdAsync(byte queueType, long groupId, CancellationToken cancellationToken) { - try - { - using var cmd = new SqlCommand("dbo.PutJobCancelation") { CommandType = CommandType.StoredProcedure }; - cmd.Parameters.AddWithValue("@QueueType", queueType); - cmd.Parameters.AddWithValue("@GroupId", groupId); - await cmd.ExecuteNonQueryAsync(_sqlRetryService, _logger, cancellationToken); - } - catch (Exception ex) - { - _logger.LogError(ex, "CancelJobByGroupIdAsync failed."); - if (ex.IsRetriable()) - { - throw new RetriableJobException(ex.Message, ex); - } - - throw; - } + using var cmd = new SqlCommand("dbo.PutJobCancelation") { CommandType = CommandType.StoredProcedure }; + cmd.Parameters.AddWithValue("@QueueType", queueType); + cmd.Parameters.AddWithValue("@GroupId", groupId); + await cmd.ExecuteNonQueryAsync(_sqlRetryService, _logger, cancellationToken); } public async Task CancelJobByIdAsync(byte queueType, long jobId, CancellationToken cancellationToken) { - try - { - using var cmd = new SqlCommand("dbo.PutJobCancelation") { CommandType = CommandType.StoredProcedure }; - cmd.Parameters.AddWithValue("@QueueType", queueType); - cmd.Parameters.AddWithValue("@JobId", jobId); - await cmd.ExecuteNonQueryAsync(_sqlRetryService, _logger, cancellationToken); - } - catch (Exception ex) - { - _logger.LogError(ex, "CancelJobByIdAsync failed."); - if (ex.IsRetriable()) - { - throw new RetriableJobException(ex.Message, ex); - } - - throw; - } + using var cmd = new SqlCommand("dbo.PutJobCancelation") { CommandType = CommandType.StoredProcedure }; + cmd.Parameters.AddWithValue("@QueueType", queueType); + cmd.Parameters.AddWithValue("@JobId", jobId); + await cmd.ExecuteNonQueryAsync(_sqlRetryService, _logger, cancellationToken); } public virtual async Task CompleteJobAsync(JobInfo jobInfo, bool requestCancellationOnFailure, CancellationToken cancellationToken) @@ -176,14 +150,9 @@ public async Task> EnqueueAsync(byte queueType, string[] { return await sqlCommand.ExecuteReaderAsync(_sqlRetryService, JobInfoExtensions.LoadJobInfo, _logger, cancellationToken); } - catch (SqlException sqlEx) + catch (SqlException sqlEx) when (forceOneActiveJobGroup && sqlEx.State == 127) { - if (sqlEx.State == 127) - { - throw new JobManagement.JobConflictException(sqlEx.Message); - } - - throw; + throw new JobConflictException(sqlEx.Message); } } diff --git a/src/Microsoft.Health.TaskManagement.UnitTests/JobHostingTests.cs b/src/Microsoft.Health.TaskManagement.UnitTests/JobHostingTests.cs index a2ab3b0139..85810a8b8d 100644 --- a/src/Microsoft.Health.TaskManagement.UnitTests/JobHostingTests.cs +++ b/src/Microsoft.Health.TaskManagement.UnitTests/JobHostingTests.cs @@ -83,11 +83,10 @@ public async Task GivenJobWithCriticalException_WhenJobHostingStart_ThenJobShoul string groupDefinition2 = "groupDefinition2"; TestQueueClient queueClient = new TestQueueClient(); - JobInfo job1 = (await queueClient.EnqueueAsync(0, new string[] { definition1 }, 1, false, false, CancellationToken.None)).First(); - JobInfo job2 = (await queueClient.EnqueueAsync(0, new string[] { definition2 }, 2, false, false, CancellationToken.None)).First(); - - JobInfo jobGroup1 = (await queueClient.EnqueueAsync(0, new string[] { groupDefinition1 }, 1, false, false, CancellationToken.None)).First(); - JobInfo jobGroup2 = (await queueClient.EnqueueAsync(0, new string[] { groupDefinition2 }, 2, false, false, CancellationToken.None)).First(); + JobInfo jobGroup1 = (await queueClient.EnqueueAsync(0, [groupDefinition1], 1, false, false, CancellationToken.None)).First(); + JobInfo job1 = (await queueClient.EnqueueAsync(0, [definition1], 1, false, false, CancellationToken.None)).First(); + JobInfo jobGroup2 = (await queueClient.EnqueueAsync(0, [groupDefinition2], 2, false, false, CancellationToken.None)).First(); + JobInfo job2 = (await queueClient.EnqueueAsync(0, [definition2], 2, false, false, CancellationToken.None)).First(); int executeCount = 0; TestJobFactory factory = new TestJobFactory(t => @@ -98,7 +97,6 @@ public async Task GivenJobWithCriticalException_WhenJobHostingStart_ThenJobShoul (token) => { Interlocked.Increment(ref executeCount); - throw new JobExecutionException(errorMessage, error); }); } @@ -138,9 +136,9 @@ public async Task GivenJobWithCriticalException_WhenJobHostingStart_ThenJobShoul Assert.Equal(JobStatus.Failed, job2.Status); - // Job2's error includes the stack trace with can't be easily added to the expected value, so we just look for the message. + // Job2's error includes the stack trace whitch can't be easily added to the expected value, so we just look for the message. Assert.Contains(errorMessage, job2.Result); - Assert.Equal(JobStatus.Cancelled, jobGroup2.Status); + Assert.Equal(JobStatus.Completed, jobGroup2.Status); } [Fact] @@ -216,42 +214,6 @@ public async Task GivenAnLongRunningJob_WhenJobHostingStop_ThenJobShouldBeComple Assert.Equal(1, executeCount0); } - [Fact] - public async Task GivenJobWithRetriableException_WhenJobHostingStart_ThenJobShouldBeRetry() - { - int executeCount0 = 0; - TestJobFactory factory = new TestJobFactory(t => - { - return new TestJob( - (token) => - { - Interlocked.Increment(ref executeCount0); - if (executeCount0 <= 1) - { - throw new RetriableJobException("test"); - } - - return Task.FromResult(t.Result); - }); - }); - - TestQueueClient queueClient = new TestQueueClient(); - JobInfo job1 = (await queueClient.EnqueueAsync(0, new string[] { "task1" }, null, false, false, CancellationToken.None)).First(); - - JobHosting jobHosting = new JobHosting(queueClient, factory, _logger); - jobHosting.PollingFrequencyInSeconds = 0; - jobHosting.MaxRunningJobCount = 1; - jobHosting.JobHeartbeatTimeoutThresholdInSeconds = 1; - - CancellationTokenSource tokenSource = new CancellationTokenSource(); - - tokenSource.CancelAfter(TimeSpan.FromSeconds(2)); - await jobHosting.ExecuteAsync(0, "test", tokenSource); - - Assert.Equal(JobStatus.Completed, job1.Status); - Assert.Equal(2, executeCount0); - } - [Fact] public async Task GivenJobWithInvalidOperationException_WhenJobHostingStart_ThenJobFail() { diff --git a/src/Microsoft.Health.TaskManagement/JobExecutionException.cs b/src/Microsoft.Health.TaskManagement/JobExecutionException.cs index 5f42e94c66..03e6870c02 100644 --- a/src/Microsoft.Health.TaskManagement/JobExecutionException.cs +++ b/src/Microsoft.Health.TaskManagement/JobExecutionException.cs @@ -39,7 +39,5 @@ public JobExecutionException(string message, object error, Exception innerExcept } public object Error { get; private set; } - - public bool RequestCancellationOnFailure { get; set; } } } diff --git a/src/Microsoft.Health.TaskManagement/JobHosting.cs b/src/Microsoft.Health.TaskManagement/JobHosting.cs index afda8ecb0c..bc6babd0dc 100644 --- a/src/Microsoft.Health.TaskManagement/JobHosting.cs +++ b/src/Microsoft.Health.TaskManagement/JobHosting.cs @@ -163,13 +163,6 @@ private async Task ExecuteJobAsync(JobInfo jobInfo) jobInfo.Result = await runningJob; } - catch (RetriableJobException ex) - { - _logger.LogJobError(ex, jobInfo, "Job with id: {JobId} and group id: {GroupId} of type: {JobType} failed with retriable exception.", jobInfo.Id, jobInfo.GroupId, jobInfo.QueueType); - - // Not complete the job for retriable exception. - return; - } catch (JobExecutionException ex) { _logger.LogJobError(ex, jobInfo, "Job with id: {JobId} and group id: {GroupId} of type: {JobType} failed.", jobInfo.Id, jobInfo.GroupId, jobInfo.QueueType); @@ -178,7 +171,7 @@ private async Task ExecuteJobAsync(JobInfo jobInfo) try { - await _queueClient.CompleteJobAsync(jobInfo, ex.RequestCancellationOnFailure, CancellationToken.None); + await _queueClient.CompleteJobAsync(jobInfo, true, CancellationToken.None); } catch (Exception completeEx) { diff --git a/src/Microsoft.Health.TaskManagement/RetriableJobException.cs b/src/Microsoft.Health.TaskManagement/RetriableJobException.cs index d7d5d78003..c9c38e5554 100644 --- a/src/Microsoft.Health.TaskManagement/RetriableJobException.cs +++ b/src/Microsoft.Health.TaskManagement/RetriableJobException.cs @@ -8,6 +8,7 @@ namespace Microsoft.Health.JobManagement { + [Obsolete("RetriableJobException class is deprecated. Replace by in-place retries.")] public class RetriableJobException : Exception { public RetriableJobException(string message) diff --git a/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportRebuildIndexesTests.cs b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportRebuildIndexesTests.cs index 433ce239a4..cff1c3059e 100644 --- a/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportRebuildIndexesTests.cs +++ b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportRebuildIndexesTests.cs @@ -88,7 +88,7 @@ private async Task ImportCheckAsync(ImportRequest request, TestFhirClient c Uri checkLocation = await ImportTestHelper.CreateImportTaskAsync(client, request); HttpResponseMessage response; - while ((response = await client.CheckImportAsync(checkLocation, CancellationToken.None)).StatusCode == System.Net.HttpStatusCode.Accepted) + while ((response = await client.CheckImportAsync(checkLocation)).StatusCode == System.Net.HttpStatusCode.Accepted) { await Task.Delay(TimeSpan.FromSeconds(5)); } diff --git a/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportTestFixture.cs b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportTestFixture.cs index d193da0fa5..950f5839d9 100644 --- a/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportTestFixture.cs +++ b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportTestFixture.cs @@ -27,5 +27,7 @@ public MetricHandler MetricHandler } public ImportTestStorageAccount StorageAccount { get; private set; } + + internal string ConnectionString => (TestFhirServer as InProcTestFhirServer)?.ConnectionString; } } diff --git a/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportTestHelper.cs b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportTestHelper.cs index 9f7064d722..48b289d4e9 100644 --- a/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportTestHelper.cs +++ b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportTestHelper.cs @@ -201,7 +201,7 @@ private static async Task ImportCheckAsync(TestFhirClient testFhirClient, Import { Uri checkLocation = await CreateImportTaskAsync(testFhirClient, request); - while ((await testFhirClient.CheckImportAsync(checkLocation, CancellationToken.None)).StatusCode == System.Net.HttpStatusCode.Accepted) + while ((await testFhirClient.CheckImportAsync(checkLocation)).StatusCode == System.Net.HttpStatusCode.Accepted) { await Task.Delay(TimeSpan.FromSeconds(5)); } diff --git a/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportTests.cs b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportTests.cs index cadd4be422..892e3ab26e 100644 --- a/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportTests.cs +++ b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportTests.cs @@ -15,8 +15,10 @@ using Hl7.Fhir.Serialization; using IdentityServer4.Models; using MediatR; +using Microsoft.Data.SqlClient; using Microsoft.Health.Fhir.Api.Features.Operations.Import; using Microsoft.Health.Fhir.Client; +using Microsoft.Health.Fhir.Core.Features.Operations; using Microsoft.Health.Fhir.Core.Features.Operations.Import; using Microsoft.Health.Fhir.Core.Features.Operations.Import.Models; using Microsoft.Health.Fhir.Tests.Common; @@ -50,6 +52,115 @@ public ImportTests(ImportTestFixture fixture) _fixture = fixture; } + [Fact] + public async Task GivenIncrementalLoad_WithNotRetriableSqlExceptionForOrchestrator_ImportShouldFail() + { + if (!_fixture.IsUsingInProcTestServer) + { + return; + } + + ExecuteSql("IF object_id('JobQueue_Trigger') IS NOT NULL DROP TRIGGER JobQueue_Trigger"); + try + { + var registration = await RegisterImport(); + ExecuteSql(@" +CREATE TRIGGER JobQueue_Trigger ON JobQueue FOR INSERT +AS +RAISERROR('TestError',18,127) + "); + var message = await ImportWaitAsync(registration.CheckLocation, false); + Assert.Equal(HttpStatusCode.InternalServerError, message.StatusCode); + Assert.True(!message.ReasonPhrase.Contains("TestError")); // message provided to customer should not contain internal details + var result = (string)ExecuteSql($"SELECT Result FROM dbo.JobQueue WHERE QueueType = 2 AND Status = 3 AND JobId = {registration.JobId}"); + Assert.Contains("TestError", result); // job result should contain all details + } + finally + { + ExecuteSql("IF object_id('JobQueue_Trigger') IS NOT NULL DROP TRIGGER JobQueue_Trigger"); + } + } + + [Fact] + public async Task GivenIncrementalLoad_WithNotRetriableSqlExceptionForWorker_ImportShouldFail() + { + if (!_fixture.IsUsingInProcTestServer) + { + return; + } + + ExecuteSql("IF object_id('Transactions_Trigger') IS NOT NULL DROP TRIGGER Transactions_Trigger"); + try + { + ExecuteSql(@" +CREATE TRIGGER Transactions_Trigger ON Transactions FOR UPDATE +AS +RAISERROR('TestError',18,127) + "); + var registration = await RegisterImport(); + var message = await ImportWaitAsync(registration.CheckLocation, false); + Assert.Equal(HttpStatusCode.InternalServerError, message.StatusCode); + Assert.True(!message.ReasonPhrase.Contains("TestError")); // message provided to customer should not contain internal details + var result = (string)ExecuteSql($"SELECT Result FROM dbo.JobQueue WHERE QueueType = 2 AND Status = 3 AND GroupId = {registration.JobId} AND GroupId <> JobId"); + Assert.Contains("TestError", result); // job result should contain all details + } + finally + { + ExecuteSql("IF object_id('Transactions_Trigger') IS NOT NULL DROP TRIGGER Transactions_Trigger"); + } + } + + [Theory] + [InlineData(3)] // import should succeed + [InlineData(6)] // import shoul fail + public async Task GivenIncrementalLoad_WithExecutionTimeoutExceptionForWorker_ImportShouldReturnCorrectly(int requestedExceptions) + { + if (!_fixture.IsUsingInProcTestServer) + { + return; + } + + ExecuteSql("IF object_id('Transactions_Trigger') IS NOT NULL DROP TRIGGER Transactions_Trigger"); + try + { + ExecuteSql("TRUNCATE TABLE EventLog"); + ExecuteSql("TRUNCATE TABLE Transactions"); + ExecuteSql(@$" +CREATE TRIGGER Transactions_Trigger ON Transactions FOR UPDATE +AS +IF (SELECT count(*) FROM EventLog WHERE Process = 'MergeResourcesCommitTransaction' AND Status = 'Error') < {requestedExceptions} + RAISERROR('execution timeout expired',18,127) + "); + var registration = await RegisterImport(); + var message = await ImportWaitAsync(registration.CheckLocation, false); + Assert.Equal(requestedExceptions == 6 ? HttpStatusCode.InternalServerError : HttpStatusCode.OK, message.StatusCode); + var retries = (int)ExecuteSql("SELECT count(*) FROM EventLog WHERE Process = 'MergeResourcesCommitTransaction' AND Status = 'Error'"); + Assert.Equal(requestedExceptions == 6 ? 5 : 3, retries); + } + finally + { + ExecuteSql("IF object_id('Transactions_Trigger') IS NOT NULL DROP TRIGGER Transactions_Trigger"); + } + } + + private object ExecuteSql(string sql) + { + using var conn = new SqlConnection(_fixture.ConnectionString); + conn.Open(); + using var cmd = new SqlCommand(sql, conn); + return cmd.ExecuteScalar(); + } + + private async Task<(Uri CheckLocation, long JobId)> RegisterImport() + { + var ndJson = PrepareResource(Guid.NewGuid().ToString("N"), null, null); // do not specify (version/last updated) to run without transaction + var location = (await ImportTestHelper.UploadFileAsync(ndJson, _fixture.StorageAccount)).location; + var request = CreateImportRequest(location, ImportMode.IncrementalLoad); + var checkLocation = await ImportTestHelper.CreateImportTaskAsync(_client, request); + var id = long.Parse(checkLocation.LocalPath.Split('/').Last()); + return (checkLocation, id); + } + [Fact] public async Task GivenIncrementalLoad_MultipleInputVersionsOutOfOrderSomeNotExplicit_ResourceNotExisting_NoGap() { @@ -233,7 +344,7 @@ public async Task GivenIncrementalImportInvalidResource_WhenImportData_ThenError Uri checkLocation = await ImportTestHelper.CreateImportTaskAsync(_client, request); HttpResponseMessage response; - while ((response = await _client.CheckImportAsync(checkLocation, CancellationToken.None)).StatusCode == System.Net.HttpStatusCode.Accepted) + while ((response = await _client.CheckImportAsync(checkLocation)).StatusCode == System.Net.HttpStatusCode.Accepted) { await Task.Delay(TimeSpan.FromSeconds(5)); } @@ -669,7 +780,7 @@ public async Task GivenImportResourceWithWrongType_ThenErrorLogShouldBeUploaded( Uri checkLocation = await ImportTestHelper.CreateImportTaskAsync(_client, request); HttpResponseMessage response; - while ((response = await _client.CheckImportAsync(checkLocation, CancellationToken.None)).StatusCode == System.Net.HttpStatusCode.Accepted) + while ((response = await _client.CheckImportAsync(checkLocation)).StatusCode == System.Net.HttpStatusCode.Accepted) { await Task.Delay(TimeSpan.FromSeconds(5)); } @@ -771,7 +882,7 @@ public async Task GivenImportInvalidResource_ThenErrorLogsShouldBeOutput() Uri checkLocation = await ImportTestHelper.CreateImportTaskAsync(_client, request); HttpResponseMessage response; - while ((response = await _client.CheckImportAsync(checkLocation, CancellationToken.None)).StatusCode == System.Net.HttpStatusCode.Accepted) + while ((response = await _client.CheckImportAsync(checkLocation)).StatusCode == System.Net.HttpStatusCode.Accepted) { await Task.Delay(TimeSpan.FromSeconds(5)); } @@ -925,8 +1036,7 @@ public async Task GivenImportOperationEnabled_WhenImportInvalidResourceUrl_ThenB FhirClientException fhirException = await Assert.ThrowsAsync( async () => { - HttpResponseMessage response; - while ((response = await _client.CheckImportAsync(checkLocation, CancellationToken.None)).StatusCode == System.Net.HttpStatusCode.Accepted) + while ((await _client.CheckImportAsync(checkLocation)).StatusCode == HttpStatusCode.Accepted) { await Task.Delay(TimeSpan.FromSeconds(5)); } @@ -976,8 +1086,7 @@ public async Task GivenImportInvalidETag_ThenBadRequestShouldBeReturned() FhirClientException fhirException = await Assert.ThrowsAsync( async () => { - HttpResponseMessage response; - while ((response = await _client.CheckImportAsync(checkLocation, CancellationToken.None)).StatusCode == System.Net.HttpStatusCode.Accepted) + while ((await _client.CheckImportAsync(checkLocation)).StatusCode == HttpStatusCode.Accepted) { await Task.Delay(TimeSpan.FromSeconds(5)); } @@ -1031,7 +1140,7 @@ private async Task ImportCheckAsync(ImportRequest request, TestFhirClient c client = client ?? _client; Uri checkLocation = await ImportTestHelper.CreateImportTaskAsync(client, request); - var response = await ImportWaitAsync(checkLocation, client); + var response = await ImportWaitAsync(checkLocation); Assert.Equal(System.Net.HttpStatusCode.OK, response.StatusCode); ImportJobResult result = JsonConvert.DeserializeObject(await response.Content.ReadAsStringAsync()); @@ -1050,11 +1159,10 @@ private async Task ImportCheckAsync(ImportRequest request, TestFhirClient c return checkLocation; } - private async Task ImportWaitAsync(Uri checkLocation, TestFhirClient client = null) + private async Task ImportWaitAsync(Uri checkLocation, bool checkSuccessStatus = true) { - client = client ?? _client; HttpResponseMessage response; - while ((response = await client.CheckImportAsync(checkLocation, CancellationToken.None)).StatusCode == HttpStatusCode.Accepted) + while ((response = await _client.CheckImportAsync(checkLocation, checkSuccessStatus)).StatusCode == HttpStatusCode.Accepted) { await Task.Delay(TimeSpan.FromSeconds(2)); } diff --git a/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/InProcTestFhirServer.cs b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/InProcTestFhirServer.cs index 5da6c73159..844d7e22b7 100644 --- a/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/InProcTestFhirServer.cs +++ b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/InProcTestFhirServer.cs @@ -80,6 +80,9 @@ public InProcTestFhirServer(DataStore dataStore, Type startupType) { var connectionStringBuilder = new SqlConnectionStringBuilder(configuration["SqlServer:ConnectionString"]); var databaseName = connectionStringBuilder.InitialCatalog += "_" + startupType.Name; + var temp = new SqlConnectionStringBuilder(configuration["SqlServer:ConnectionString"]); + temp.InitialCatalog = databaseName; + ConnectionString = temp.ToString(); configuration["SqlServer:ConnectionString"] = connectionStringBuilder.ToString(); configuration["TaskHosting:PollingFrequencyInSeconds"] = "1"; @@ -148,6 +151,8 @@ string ValueOrFallback(string configKey, string fallbackValue) _builtConfiguration = Server.Services.GetRequiredService(); } + internal string ConnectionString { get; private set; } + public TestServer Server { get; } internal override HttpMessageHandler CreateMessageHandler() From a54cfefb5739172cf756038a28ab1abe616782e3 Mon Sep 17 00:00:00 2001 From: apurvabhaleMS <86023331+apurvabhaleMS@users.noreply.github.com> Date: Tue, 9 Apr 2024 10:23:58 -0700 Subject: [PATCH 145/155] Pass in the cancellation token to BatchAndTransactions call (#3802) --- .../Controllers/FhirController.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/Microsoft.Health.Fhir.Shared.Api/Controllers/FhirController.cs b/src/Microsoft.Health.Fhir.Shared.Api/Controllers/FhirController.cs index bec6efc0f7..18cc6096cc 100644 --- a/src/Microsoft.Health.Fhir.Shared.Api/Controllers/FhirController.cs +++ b/src/Microsoft.Health.Fhir.Shared.Api/Controllers/FhirController.cs @@ -665,7 +665,7 @@ public async Task Versions() [AuditEventType(AuditEventSubType.BundlePost)] public async Task BatchAndTransactions([FromBody] Resource bundle) { - ResourceElement bundleResponse = await _mediator.PostBundle(bundle.ToResourceElement()); + ResourceElement bundleResponse = await _mediator.PostBundle(bundle.ToResourceElement(), HttpContext.RequestAborted); return FhirResult.Create(bundleResponse); } From 7dcf8bfc2beba51ffdb55da1a81e1a1d409be843 Mon Sep 17 00:00:00 2001 From: SergeyGaluzo <95932081+SergeyGaluzo@users.noreply.github.com> Date: Tue, 9 Apr 2024 16:41:45 -0700 Subject: [PATCH 146/155] Import self orchestration (#3799) * start * Fix bytes * dump resource ids before * do not go to the database * SuccessOnRetry * fixed tests * Moved wait logic request handler * Removed cancel request from job exec exception * plus * test correction * Adding ability to raise SQL exceptions in E2E tests * Temp * Users/sergal/importtests (#3797) * Adding ability to raise SQL exceptions in E2E tests * error handling * removed console * job hosting tests * limit to in proc * no retriable in export * correct wait * retriable back in export * rest * minus using * remove pragma * Remove retriable from export * tests * retriable obsolete * polly retries and simpler tests * Addressed comments * After merge fixes plus get import request handler tests * removed not applicable orch tests * second place to hide message * time wait limit * new retriable error * Fixes after merge * using * using * test adjusted for grouped * [] * Removed task cancelled exception logic --- .../Import/GetImportRequestHandlerTests.cs | 152 +-- .../Import/GetImportRequestHandler.cs | 119 +- .../Import/ImportProcessingJobResult.cs | 5 - .../Import/ImportOrchestratorJobTests.cs | 1030 ----------------- .../Import/ImportProcessingJobTests.cs | 24 +- .../Features/ExceptionExtension.cs | 3 +- .../Import/ImportOrchestratorJob.cs | 231 +--- .../Operations/Import/ImportProcessingJob.cs | 60 +- .../Features/Operations/Import/SqlImporter.cs | 17 +- .../Storage/SqlRetry/SqlRetryService.cs | 2 +- .../Storage/SqlServerFhirDataStore.cs | 5 + .../JobHosting.cs | 2 +- .../Rest/Import/ImportTests.cs | 17 +- tools/PerfTester/Program.cs | 4 +- 14 files changed, 255 insertions(+), 1416 deletions(-) diff --git a/src/Microsoft.Health.Fhir.Core.UnitTests/Features/Operations/Import/GetImportRequestHandlerTests.cs b/src/Microsoft.Health.Fhir.Core.UnitTests/Features/Operations/Import/GetImportRequestHandlerTests.cs index 34ec51f998..058601c674 100644 --- a/src/Microsoft.Health.Fhir.Core.UnitTests/Features/Operations/Import/GetImportRequestHandlerTests.cs +++ b/src/Microsoft.Health.Fhir.Core.UnitTests/Features/Operations/Import/GetImportRequestHandlerTests.cs @@ -43,151 +43,127 @@ public GetImportRequestHandlerTests() } [Fact] - public async Task GivenAFhirMediator_WhenGettingAnExistingBulkImportJobWithCompletedStatus_ThenHttpResponseCodeShouldBeOk() + public async Task WhenGettingCompletedJob_ThenResponseCodeShouldBeOk() { - var coordResult = new ImportOrchestratorJobResult() - { - Request = "Request", - }; - - var orchestratorJob = new JobInfo() - { - Id = 0, - GroupId = 0, - Status = JobStatus.Completed, - Result = JsonConvert.SerializeObject(coordResult), - Definition = JsonConvert.SerializeObject(new ImportOrchestratorJobDefinition()), - }; + var coordResult = new ImportOrchestratorJobResult() { Request = "Request" }; + var coord = new JobInfo() { Status = JobStatus.Completed, Result = JsonConvert.SerializeObject(coordResult), Definition = JsonConvert.SerializeObject(new ImportOrchestratorJobDefinition()) }; + var workerResult = new ImportProcessingJobResult() { SucceededResources = 1, FailedResources = 1, ErrorLogLocation = "http://xyz" }; + var worker = new JobInfo() { Id = 1, Status = JobStatus.Completed, Result = JsonConvert.SerializeObject(workerResult), Definition = JsonConvert.SerializeObject(new ImportProcessingJobDefinition() { ResourceLocation = "http://xyz" }) }; - var processingJobResult = new ImportProcessingJobResult() - { - SucceededResources = 1, - FailedResources = 1, - ErrorLogLocation = "http://ResourceErrorLogLocation", - }; - - var processingJob = new JobInfo() - { - Id = 1, - GroupId = 0, - Status = JobStatus.Completed, - Result = JsonConvert.SerializeObject(processingJobResult), - Definition = JsonConvert.SerializeObject(new ImportProcessingJobDefinition() { ResourceLocation = "http://ResourceLocation" }), - }; - - GetImportResponse result = await SetupAndExecuteGetBulkImportJobByIdAsync(orchestratorJob, new List() { processingJob }); + var result = await SetupAndExecuteGetBulkImportJobByIdAsync(coord, [worker]); Assert.Equal(HttpStatusCode.OK, result.StatusCode); Assert.Single(result.JobResult.Output); Assert.Single(result.JobResult.Error); } + [Theory] + [InlineData(HttpStatusCode.BadRequest)] + [InlineData(HttpStatusCode.InternalServerError)] + [InlineData((HttpStatusCode)0)] + public async Task WhenGettingFailedJob_ThenExecptionIsTrownWithCorrectResponseCode(HttpStatusCode statusCode) + { + var coord = new JobInfo() { Status = JobStatus.Completed }; + var workerResult = new ImportJobErrorResult() { ErrorMessage = "Error", HttpStatusCode = statusCode }; + var worker = new JobInfo() { Id = 1, Status = JobStatus.Failed, Result = JsonConvert.SerializeObject(workerResult) }; + + var ofe = await Assert.ThrowsAsync(() => SetupAndExecuteGetBulkImportJobByIdAsync(coord, [worker])); + + Assert.Equal(statusCode == 0 ? HttpStatusCode.InternalServerError : statusCode, ofe.ResponseStatusCode); + Assert.Equal(string.Format(Core.Resources.OperationFailed, OperationsConstants.Import, ofe.ResponseStatusCode == HttpStatusCode.InternalServerError ? HttpStatusCode.InternalServerError : "Error"), ofe.Message); + } + [Fact] - public async Task GivenAFhirMediator_WhenGettingAnCompletedImportJobWithFailure_ThenHttpResponseCodeShouldBeExpected() + public async Task WhenGettingFailedJob_WithGenericException_ThenExecptionIsTrownWithCorrectResponseCode() { - var orchestratorJobResult = new ImportJobErrorResult() - { - HttpStatusCode = HttpStatusCode.BadRequest, - ErrorMessage = "error", - }; + var coord = new JobInfo() { Status = JobStatus.Completed }; + object workerResult = new { message = "Error", stackTrace = "Trace" }; + var worker = new JobInfo() { Id = 1, Status = JobStatus.Failed, Result = JsonConvert.SerializeObject(workerResult) }; - var orchestratorJob = new JobInfo() - { - Status = JobStatus.Failed, - Result = JsonConvert.SerializeObject(orchestratorJobResult), - }; + var ofe = await Assert.ThrowsAsync(() => SetupAndExecuteGetBulkImportJobByIdAsync(coord, [worker])); - OperationFailedException ofe = await Assert.ThrowsAsync(() => SetupAndExecuteGetBulkImportJobByIdAsync(orchestratorJob, new List())); + Assert.Equal(HttpStatusCode.InternalServerError, ofe.ResponseStatusCode); + Assert.Equal(string.Format(Core.Resources.OperationFailed, OperationsConstants.Import, HttpStatusCode.InternalServerError), ofe.Message); + } + [Fact] + public async Task WhenGettingImpprtWithCancelledOrchestratorJob_ThenExceptionIsThrownWithBadResponseCode() + { + var coord = new JobInfo() { Status = JobStatus.Cancelled }; + var ofe = await Assert.ThrowsAsync(() => SetupAndExecuteGetBulkImportJobByIdAsync(coord, [])); Assert.Equal(HttpStatusCode.BadRequest, ofe.ResponseStatusCode); - Assert.NotNull(ofe.Message); } [Fact] - public async Task GivenAFhirMediator_WhenGettingAnExistingBulkImportJobThatWasCanceled_ThenOperationFailedExceptionIsThrownWithBadRequestHttpResponseCode() + public async Task WhenGettingImportWithCancelledWorkerJob_ThenExceptionIsThrownWithBadResponseCode() { - var orchestratorJob = new JobInfo() - { - Status = JobStatus.Cancelled, - }; - OperationFailedException ofe = await Assert.ThrowsAsync(() => SetupAndExecuteGetBulkImportJobByIdAsync(orchestratorJob, new List())); - + var coord = new JobInfo() { Status = JobStatus.Completed }; + var worker = new JobInfo() { Id = 1, Status = JobStatus.Cancelled }; + var ofe = await Assert.ThrowsAsync(() => SetupAndExecuteGetBulkImportJobByIdAsync(coord, [worker])); Assert.Equal(HttpStatusCode.BadRequest, ofe.ResponseStatusCode); } [Fact] - public async Task GivenAFhirMediator_WhenGettingAnExistingBulkImportJobWithNotCompletedStatus_ThenHttpResponseCodeShouldBeAccepted() + public async Task WhenGettingInFlightJob_ThenResponseCodeShouldBeAccepted() { - var orchestratorJobResult = new ImportOrchestratorJobResult() - { - Request = "Request", - }; + var coordResult = new ImportOrchestratorJobResult() { Request = "Request" }; + var coord = new JobInfo() { Status = JobStatus.Completed, Result = JsonConvert.SerializeObject(coordResult), Definition = JsonConvert.SerializeObject(new ImportOrchestratorJobDefinition()) }; - var orchestratorJob = new JobInfo() - { - Id = 1, - GroupId = 1, - Status = JobStatus.Running, - Result = JsonConvert.SerializeObject(orchestratorJobResult), - Definition = JsonConvert.SerializeObject(new ImportOrchestratorJobDefinition()), - }; + var workerResult = new ImportProcessingJobResult() { SucceededResources = 1, FailedResources = 1, ErrorLogLocation = "http://xyz" }; - var processingJobResult = new ImportProcessingJobResult() + // jobs 1 and 2 are created for the same input file, they are grouped together in the results + var worker1 = new JobInfo() { - SucceededResources = 1, - FailedResources = 1, - ErrorLogLocation = "http://ResourceErrorLogLocation", + Id = 1, + Status = JobStatus.Completed, + Result = JsonConvert.SerializeObject(workerResult), + Definition = JsonConvert.SerializeObject(new ImportProcessingJobDefinition() { ResourceLocation = "http://xyz" }), }; - var processingJob1 = new JobInfo() + var worker2 = new JobInfo() { Id = 2, - GroupId = 1, Status = JobStatus.Completed, - Result = JsonConvert.SerializeObject(processingJobResult), - Definition = JsonConvert.SerializeObject(new ImportProcessingJobDefinition() { ResourceLocation = "http://ResourceLocation" }), + Result = JsonConvert.SerializeObject(workerResult), + Definition = JsonConvert.SerializeObject(new ImportProcessingJobDefinition() { ResourceLocation = "http://xyz" }), }; - var processingJob2 = new JobInfo() + var worker3 = new JobInfo() { Id = 3, - GroupId = 1, Status = JobStatus.Completed, - Result = JsonConvert.SerializeObject(processingJobResult), - Definition = JsonConvert.SerializeObject(new ImportProcessingJobDefinition() { ResourceLocation = "http://ResourceLocation" }), + Result = JsonConvert.SerializeObject(workerResult), + Definition = JsonConvert.SerializeObject(new ImportProcessingJobDefinition() { ResourceLocation = "http://xyz2" }), }; - var processingJob3 = new JobInfo() + var worker4 = new JobInfo() { Id = 4, - GroupId = 1, Status = JobStatus.Running, - Result = JsonConvert.SerializeObject(processingJobResult), - Definition = JsonConvert.SerializeObject(new ImportProcessingJobDefinition() { ResourceLocation = "http://ResourceLocation" }), }; - GetImportResponse result = await SetupAndExecuteGetBulkImportJobByIdAsync(orchestratorJob, new List() { processingJob1, processingJob2, processingJob3 }); + var result = await SetupAndExecuteGetBulkImportJobByIdAsync(coord, [worker1, worker2, worker3, worker4]); Assert.Equal(HttpStatusCode.Accepted, result.StatusCode); Assert.Equal(2, result.JobResult.Output.Count); - Assert.Equal(2, result.JobResult.Error.Count); + Assert.Equal(3, result.JobResult.Error.Count); } [Fact] - public async Task GivenAFhirMediator_WhenGettingWithNotExistJob_ThenNotFoundShouldBeReturned() + public async Task WhenGettingANotExistingJob_ThenNotFoundShouldBeReturned() { await Assert.ThrowsAsync(async () => await _mediator.GetImportStatusAsync(1, CancellationToken.None)); } - private async Task SetupAndExecuteGetBulkImportJobByIdAsync(JobInfo orchestratorJobInfo, List processingJobInfos) + private async Task SetupAndExecuteGetBulkImportJobByIdAsync(JobInfo coord, List workers) { - _queueClient.GetJobByIdAsync(Arg.Any(), Arg.Any(), Arg.Any(), Arg.Any()).Returns(orchestratorJobInfo); + _queueClient.GetJobByIdAsync(Arg.Any(), Arg.Any(), Arg.Any(), Arg.Any()).Returns(coord); - var allJobs = new List(processingJobInfos); - allJobs.Add(orchestratorJobInfo); + var allJobs = new List(workers); + allJobs.Add(coord); _queueClient.GetJobByGroupIdAsync(Arg.Any(), Arg.Any(), Arg.Any(), Arg.Any()).Returns(allJobs); - return await _mediator.GetImportStatusAsync(orchestratorJobInfo.Id, CancellationToken.None); + return await _mediator.GetImportStatusAsync(coord.Id, CancellationToken.None); } } } diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/GetImportRequestHandler.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/GetImportRequestHandler.cs index b6341d78f4..a08a147300 100644 --- a/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/GetImportRequestHandler.cs +++ b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/GetImportRequestHandler.cs @@ -45,93 +45,90 @@ public async Task Handle(GetImportRequest request, Cancellati throw new UnauthorizedFhirActionException(); } - JobInfo coordInfo = await _queueClient.GetJobByIdAsync(QueueType.Import, request.JobId, false, cancellationToken); - if (coordInfo == null || coordInfo.Status == JobStatus.Archived) + var coord = await _queueClient.GetJobByIdAsync(QueueType.Import, request.JobId, false, cancellationToken); + if (coord == null || coord.Status == JobStatus.Archived) { throw new ResourceNotFoundException(string.Format(Core.Resources.ImportJobNotFound, request.JobId)); } - - if (coordInfo.Status == JobStatus.Created) + else if (coord.Status == JobStatus.Created || coord.Status == JobStatus.Running) { return new GetImportResponse(HttpStatusCode.Accepted); } - else if (coordInfo.Status == JobStatus.Running) + else if (coord.Status == JobStatus.Cancelled) { - if (string.IsNullOrEmpty(coordInfo.Result)) - { - return new GetImportResponse(HttpStatusCode.Accepted); - } - - ImportOrchestratorJobResult orchestratorJobResult = JsonConvert.DeserializeObject(coordInfo.Result); - - (List completedOperationOutcome, List failedOperationOutcome) - = await GetProcessingResultAsync(coordInfo.GroupId, cancellationToken); - - var result = new ImportJobResult() - { - Request = orchestratorJobResult.Request, - TransactionTime = coordInfo.CreateDate, - Output = completedOperationOutcome, - Error = failedOperationOutcome, - }; - - return new GetImportResponse(HttpStatusCode.Accepted, result); + throw new OperationFailedException(Core.Resources.UserRequestedCancellation, HttpStatusCode.BadRequest); } - else if (coordInfo.Status == JobStatus.Completed) + else if (coord.Status == JobStatus.Failed) { - ImportOrchestratorJobResult orchestratorJobResult = JsonConvert.DeserializeObject(coordInfo.Result); - - (List completedOperationOutcome, List failedOperationOutcome) - = await GetProcessingResultAsync(coordInfo.GroupId, cancellationToken); - - var result = new ImportJobResult() + var errorResult = JsonConvert.DeserializeObject(coord.Result); + if (errorResult.HttpStatusCode == 0) { - Request = orchestratorJobResult.Request, - TransactionTime = coordInfo.CreateDate, - Output = completedOperationOutcome, - Error = failedOperationOutcome, - }; + errorResult.HttpStatusCode = HttpStatusCode.InternalServerError; + } - return new GetImportResponse(HttpStatusCode.OK, result); - } - else if (coordInfo.Status == JobStatus.Failed) - { - var errorResult = JsonConvert.DeserializeObject(coordInfo.Result); - //// do not show error message for InternalServerError + // hide error message for InternalServerError var failureReason = errorResult.HttpStatusCode == HttpStatusCode.InternalServerError ? HttpStatusCode.InternalServerError.ToString() : errorResult.ErrorMessage; throw new OperationFailedException(string.Format(Core.Resources.OperationFailed, OperationsConstants.Import, failureReason), errorResult.HttpStatusCode); } - else if (coordInfo.Status == JobStatus.Cancelled) + else if (coord.Status == JobStatus.Completed) { - throw new OperationFailedException(Core.Resources.UserRequestedCancellation, HttpStatusCode.BadRequest); + var start = Stopwatch.StartNew(); + var jobs = (await _queueClient.GetJobByGroupIdAsync(QueueType.Import, coord.GroupId, true, cancellationToken)).Where(x => x.Id != coord.Id).ToList(); + var results = GetProcessingResultAsync(jobs); + await Task.Delay(TimeSpan.FromSeconds(start.Elapsed.TotalSeconds > 6 ? 60 : start.Elapsed.TotalSeconds * 10), cancellationToken); // throttle to avoid misuse. + var inFlightJobsExist = jobs.Any(x => x.Status == JobStatus.Running || x.Status == JobStatus.Created); + var cancelledJobsExist = jobs.Any(x => x.Status == JobStatus.Cancelled || (x.Status == JobStatus.Running && x.CancelRequested)); + var failedJobsExist = jobs.Any(x => x.Status == JobStatus.Failed); + + if (cancelledJobsExist && !failedJobsExist) + { + throw new OperationFailedException(Core.Resources.UserRequestedCancellation, HttpStatusCode.BadRequest); + } + else if (failedJobsExist) + { + var failed = jobs.First(x => x.Status == JobStatus.Failed); + var errorResult = JsonConvert.DeserializeObject(failed.Result); + if (errorResult.HttpStatusCode == 0) + { + errorResult.HttpStatusCode = HttpStatusCode.InternalServerError; + } + + // hide error message for InternalServerError + var failureReason = errorResult.HttpStatusCode == HttpStatusCode.InternalServerError ? HttpStatusCode.InternalServerError.ToString() : errorResult.ErrorMessage; + throw new OperationFailedException(string.Format(Core.Resources.OperationFailed, OperationsConstants.Import, failureReason), errorResult.HttpStatusCode); + } + else // no failures here + { + var coordResult = JsonConvert.DeserializeObject(coord.Result); + var result = new ImportJobResult() { Request = coordResult.Request, TransactionTime = coord.CreateDate, Output = results.Completed, Error = results.Failed }; + return new GetImportResponse(!inFlightJobsExist ? HttpStatusCode.OK : HttpStatusCode.Accepted, result); + } } else { throw new OperationFailedException(Core.Resources.UnknownError, HttpStatusCode.InternalServerError); } - } - private async Task<(List completedOperationOutcome, List failedOperationOutcome)> GetProcessingResultAsync(long groupId, CancellationToken cancellationToken) - { - var start = Stopwatch.StartNew(); - var jobs = await _queueClient.GetJobByGroupIdAsync(QueueType.Import, groupId, true, cancellationToken); - var duration = start.Elapsed.TotalSeconds; - var completedOperationOutcome = new List(); - var failedOperationOutcome = new List(); - foreach (var job in jobs.Where(_ => _.Id != groupId && _.Status == JobStatus.Completed)) // ignore coordinator && not completed + static (List Completed, List Failed) GetProcessingResultAsync(IList jobs) { - var definition = JsonConvert.DeserializeObject(job.Definition); - var result = JsonConvert.DeserializeObject(job.Result); - completedOperationOutcome.Add(new ImportOperationOutcome() { Type = definition.ResourceType, Count = result.SucceededResources, InputUrl = new Uri(definition.ResourceLocation) }); - if (result.FailedResources > 0) + var completed = new List(); + var failed = new List(); + foreach (var job in jobs.Where(_ => _.Status == JobStatus.Completed)) { - failedOperationOutcome.Add(new ImportFailedOperationOutcome() { Type = definition.ResourceType, Count = result.FailedResources, InputUrl = new Uri(definition.ResourceLocation), Url = result.ErrorLogLocation }); + var definition = JsonConvert.DeserializeObject(job.Definition); + var result = JsonConvert.DeserializeObject(job.Result); + completed.Add(new ImportOperationOutcome() { Type = definition.ResourceType, Count = result.SucceededResources, InputUrl = new Uri(definition.ResourceLocation) }); + if (result.FailedResources > 0) + { + failed.Add(new ImportFailedOperationOutcome() { Type = definition.ResourceType, Count = result.FailedResources, InputUrl = new Uri(definition.ResourceLocation), Url = result.ErrorLogLocation }); + } } - } - await Task.Delay(TimeSpan.FromSeconds(duration * 10), cancellationToken); // throttle to avoid misuse. + // group success results by url + var groupped = completed.GroupBy(o => o.InputUrl).Select(g => new ImportOperationOutcome() { Type = g.First().Type, Count = g.Sum(_ => _.Count), InputUrl = g.Key }).ToList(); - return (completedOperationOutcome, failedOperationOutcome); + return (groupped, failed); + } } } } diff --git a/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportProcessingJobResult.cs b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportProcessingJobResult.cs index 81a86ba85b..1bd337687f 100644 --- a/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportProcessingJobResult.cs +++ b/src/Microsoft.Health.Fhir.Core/Features/Operations/Import/ImportProcessingJobResult.cs @@ -31,10 +31,5 @@ public class ImportProcessingJobResult /// Critical error during data processing. /// public string ErrorDetails { get; set; } - - /// - /// Current index for last checkpoint - /// - public long CurrentIndex { get; set; } } } diff --git a/src/Microsoft.Health.Fhir.SqlServer.UnitTests/Features/Operations/Import/ImportOrchestratorJobTests.cs b/src/Microsoft.Health.Fhir.SqlServer.UnitTests/Features/Operations/Import/ImportOrchestratorJobTests.cs index 4da43f4307..9c18ff502d 100644 --- a/src/Microsoft.Health.Fhir.SqlServer.UnitTests/Features/Operations/Import/ImportOrchestratorJobTests.cs +++ b/src/Microsoft.Health.Fhir.SqlServer.UnitTests/Features/Operations/Import/ImportOrchestratorJobTests.cs @@ -37,42 +37,6 @@ namespace Microsoft.Health.Fhir.SqlServer.UnitTests.Features.Operations.Import [Trait(Traits.Category, Categories.Import)] public class ImportOrchestratorJobTests { - [Fact] - public async Task GivenAnOrchestratorJob_WhenProcessingInputFiles_ThenJobShouldBeCompleted() - { - await VerifyCommonOrchestratorJobAsync(105); - } - - [Fact] - public async Task GivenAnOrchestratorJob_WhenResumeFromFailure_ThenJobShouldBeCompleted() - { - await VerifyCommonOrchestratorJobAsync(105, 10); - } - - [Fact] - public async Task GivenAnOrchestratorJob_WhenAllResumeFromFailure_ThenJobShouldBeCompleted() - { - await VerifyCommonOrchestratorJobAsync(105, 105); - } - - [Fact] - public async Task GivenAnOrchestratorJob_WhenResumeFromFailureSomeJobStillRunning_ThenJobShouldBeCompleted() - { - await VerifyCommonOrchestratorJobAsync(105, 10, 5); - } - - [Fact] - public async Task GivenAnOrchestratorJob_WhenSomeJobsCancelled_ThenOperationCanceledExceptionShouldBeThrownAndWaitForOtherSubJobsCompleted() - { - await VerifyJobStatusChangedAsync(100, JobStatus.Cancelled, 20, 20); - } - - [Fact] - public async Task GivenAnOrchestratorJob_WhenSomeJobsFailed_ThenExceptionWithBadRequestShouldBeThrownAndWaitForOtherSubJobsCompleted() - { - await VerifyJobStatusChangedAsync(100, JobStatus.Failed, 14, 14); - } - [InlineData(ImportMode.InitialLoad)] [InlineData(ImportMode.IncrementalLoad)] [Theory] @@ -262,999 +226,5 @@ public async Task GivenAnOrchestratorJob_WhenIntegrationExceptionThrown_ThenJobS callerClaims: default); } } - - [InlineData(ImportMode.InitialLoad)] - [InlineData(ImportMode.IncrementalLoad)] - [Theory] - public async Task GivenAnOrchestratorJob_WhenLastSubJobFailed_ThenExceptionWithBadRequestShouldThrownAndWaitForOtherSubJobsCancelledAndCompleted(ImportMode importMode) - { - RequestContextAccessor contextAccessor = Substitute.For>(); - ILoggerFactory loggerFactory = new NullLoggerFactory(); - IIntegrationDataStoreClient integrationDataStoreClient = Substitute.For(); - IMediator mediator = Substitute.For(); - ImportOrchestratorJobDefinition importOrchestratorInputData = new ImportOrchestratorJobDefinition(); - ImportOrchestratorJobResult importOrchestratorJobResult = new ImportOrchestratorJobResult(); - TestQueueClient testQueueClient = new TestQueueClient(); - IAuditLogger auditLogger = Substitute.For(); - bool getJobByGroupIdCalledTime = false; - - testQueueClient.GetJobByIdFunc = (queueClient, id, _) => - { - JobInfo jobInfo = queueClient.JobInfos.First(t => t.Id == id); - if (jobInfo.Id == 3) - { - jobInfo.Status = JobStatus.Failed; - jobInfo.Result = JsonConvert.SerializeObject(new ImportJobErrorResult() { ErrorMessage = "Job Failed", HttpStatusCode = HttpStatusCode.BadRequest }); - } - - return jobInfo; - }; - testQueueClient.GetJobByGroupIdFunc = (queueClient, groupId, _) => - { - IEnumerable jobInfos = queueClient.JobInfos.Where(t => t.GroupId == groupId); - if (!getJobByGroupIdCalledTime) - { - foreach (JobInfo jobInfo in jobInfos) - { - if (jobInfo.Status == JobStatus.Running) - { - jobInfo.Status = JobStatus.Completed; - } - } - } - - getJobByGroupIdCalledTime = true; - return jobInfos.ToList(); - }; - importOrchestratorInputData.BaseUri = new Uri("http://dummy"); - var inputs = new List(); - - inputs.Add(new InputResource() { Type = "Resource", Url = new Uri($"http://dummy/3") }); - inputs.Add(new InputResource() { Type = "Resource", Url = new Uri($"http://dummy/4") }); - importOrchestratorInputData.Input = inputs; - importOrchestratorInputData.InputFormat = "ndjson"; - importOrchestratorInputData.InputSource = new Uri("http://dummy"); - importOrchestratorInputData.RequestUri = new Uri("http://dummy"); - importOrchestratorInputData.ImportMode = importMode; - JobInfo orchestratorJobInfo = (await testQueueClient.EnqueueAsync(0, new string[] { JsonConvert.SerializeObject(importOrchestratorInputData) }, 1, false, false, CancellationToken.None)).First(); - - integrationDataStoreClient.GetPropertiesAsync(Arg.Any(), Arg.Any()) - .Returns(callInfo => - { - Dictionary properties = new Dictionary(); - properties[IntegrationDataStoreClientConstants.BlobPropertyETag] = "test"; - properties[IntegrationDataStoreClientConstants.BlobPropertyLength] = 1000L; - return properties; - }); - - ImportOrchestratorJob orchestratorJob = new ImportOrchestratorJob( - mediator, - contextAccessor, - integrationDataStoreClient, - testQueueClient, - Options.Create(new ImportTaskConfiguration()), - loggerFactory, - auditLogger); - orchestratorJob.PollingPeriodSec = 0; - var jobExecutionException = await Assert.ThrowsAnyAsync(() => orchestratorJob.ExecuteAsync(orchestratorJobInfo, CancellationToken.None)); - - ImportJobErrorResult resultDetails = (ImportJobErrorResult)jobExecutionException.Error; - Assert.Equal(HttpStatusCode.BadRequest, resultDetails.HttpStatusCode); - Assert.Equal(1, testQueueClient.JobInfos.Count(t => t.Status == JobStatus.Failed)); - Assert.Equal(2, testQueueClient.JobInfos.Count(t => t.Status == JobStatus.Cancelled)); - - _ = mediator.Received().Publish( - Arg.Is( - notification => notification.Id == orchestratorJobInfo.Id.ToString() && - notification.Status == JobStatus.Failed.ToString() && - notification.CreateTime == orchestratorJobInfo.CreateDate), - Arg.Any()); - - if (importMode == ImportMode.IncrementalLoad) - { - auditLogger.Received(1); - auditLogger.Received().LogAudit( - auditAction: Arg.Any(), - operation: Arg.Any(), - resourceType: Arg.Any(), - requestUri: Arg.Any(), - statusCode: Arg.Any(), - correlationId: Arg.Any(), - callerIpAddress: Arg.Any(), - callerClaims: Arg.Any>>(), - customHeaders: Arg.Any>(), - operationType: Arg.Any(), - callerAgent: Arg.Any(), - additionalProperties: Arg.Is>(dict => - dict.ContainsKey("JobId") && dict["JobId"].Equals(orchestratorJobInfo.Id.ToString()) && - dict.ContainsKey("SucceededResources") && dict["SucceededResources"].Equals("0") && - dict.ContainsKey("FailedResources") && dict["FailedResources"].Equals("0"))); - } - else if (importMode == ImportMode.InitialLoad) - { - auditLogger.DidNotReceiveWithAnyArgs().LogAudit( - auditAction: default, - operation: default, - resourceType: default, - requestUri: default, - statusCode: default, - correlationId: default, - callerIpAddress: default, - callerClaims: default); - } - } - - [InlineData(ImportMode.InitialLoad)] - [InlineData(ImportMode.IncrementalLoad)] - [Theory] - public async Task GivenAnOrchestratorJob_WhenSubJobFailedAndOthersRunning_ThenExceptionWithBadRequestShouldThrownAndContextUpdated(ImportMode importMode) - { - RequestContextAccessor contextAccessor = Substitute.For>(); - ILoggerFactory loggerFactory = new NullLoggerFactory(); - IIntegrationDataStoreClient integrationDataStoreClient = Substitute.For(); - IMediator mediator = Substitute.For(); - ImportOrchestratorJobDefinition importOrchestratorInputData = new ImportOrchestratorJobDefinition(); - TestQueueClient testQueueClient = new TestQueueClient(); - IAuditLogger auditLogger = Substitute.For(); - bool getJobByGroupIdCalledTime = false; - testQueueClient.GetJobByIdFunc = (queueClient, id, _) => - { - if (id > 10) - { - return new JobInfo() - { - Id = id, - Status = JobManagement.JobStatus.Failed, - Result = JsonConvert.SerializeObject(new ImportJobErrorResult() { ErrorMessage = "error", HttpStatusCode = HttpStatusCode.BadRequest }), - }; - } - - JobInfo jobInfo = testQueueClient.JobInfos.First(t => t.Id == id); - if (jobInfo.Status == JobStatus.Created) - { - jobInfo.Status = JobStatus.Running; - return jobInfo; - } - - return jobInfo; - }; - testQueueClient.GetJobByGroupIdFunc = (queueClient, groupId, _) => - { - IEnumerable jobInfos = queueClient.JobInfos.Where(t => t.GroupId == groupId); - if (!getJobByGroupIdCalledTime) - { - foreach (JobInfo jobInfo in jobInfos) - { - if (jobInfo.Status == JobStatus.Running) - { - jobInfo.Status = JobStatus.Completed; - } - } - } - - getJobByGroupIdCalledTime = true; - return jobInfos.ToList(); - }; - - importOrchestratorInputData.BaseUri = new Uri("http://dummy"); - - var inputs = new List(); - for (int i = 0; i < 100; i++) - { - inputs.Add(new InputResource() { Type = "Resource", Url = new Uri($"http://dummy/{i}") }); - } - - importOrchestratorInputData.Input = inputs; - importOrchestratorInputData.InputFormat = "ndjson"; - importOrchestratorInputData.InputSource = new Uri("http://dummy"); - importOrchestratorInputData.RequestUri = new Uri("http://dummy"); - importOrchestratorInputData.ImportMode = importMode; - JobInfo orchestratorJobInfo = (await testQueueClient.EnqueueAsync(0, new string[] { JsonConvert.SerializeObject(importOrchestratorInputData) }, 1, false, false, CancellationToken.None)).First(); - - integrationDataStoreClient.GetPropertiesAsync(Arg.Any(), Arg.Any()) - .Returns(callInfo => - { - Dictionary properties = new Dictionary(); - properties[IntegrationDataStoreClientConstants.BlobPropertyETag] = "test"; - properties[IntegrationDataStoreClientConstants.BlobPropertyLength] = 1000L; - return properties; - }); - - ImportOrchestratorJob orchestratorJob = new ImportOrchestratorJob( - mediator, - contextAccessor, - integrationDataStoreClient, - testQueueClient, - Options.Create(new ImportTaskConfiguration()), - loggerFactory, - auditLogger); - orchestratorJob.PollingPeriodSec = 0; - - var jobExecutionException = await Assert.ThrowsAnyAsync(() => orchestratorJob.ExecuteAsync(orchestratorJobInfo, CancellationToken.None)); - ImportJobErrorResult resultDetails = (ImportJobErrorResult)jobExecutionException.Error; - - Assert.Equal(HttpStatusCode.BadRequest, resultDetails.HttpStatusCode); - - _ = mediator.Received().Publish( - Arg.Is( - notification => notification.Id == orchestratorJobInfo.Id.ToString() && - notification.Status == JobStatus.Failed.ToString() && - notification.CreateTime == orchestratorJobInfo.CreateDate && - notification.SucceededCount == 0 && - notification.FailedCount == 0), - Arg.Any()); - - if (importMode == ImportMode.IncrementalLoad) - { - auditLogger.Received(1); - auditLogger.Received().LogAudit( - auditAction: Arg.Any(), - operation: Arg.Any(), - resourceType: Arg.Any(), - requestUri: Arg.Any(), - statusCode: Arg.Any(), - correlationId: Arg.Any(), - callerIpAddress: Arg.Any(), - callerClaims: Arg.Any>>(), - customHeaders: Arg.Any>(), - operationType: Arg.Any(), - callerAgent: Arg.Any(), - additionalProperties: Arg.Is>(dict => - dict.ContainsKey("JobId") && dict["JobId"].Equals(orchestratorJobInfo.Id.ToString()) && - dict.ContainsKey("SucceededResources") && dict["SucceededResources"].Equals("0") && - dict.ContainsKey("FailedResources") && dict["FailedResources"].Equals("0"))); - } - else if (importMode == ImportMode.InitialLoad) - { - auditLogger.DidNotReceiveWithAnyArgs().LogAudit( - auditAction: default, - operation: default, - resourceType: default, - requestUri: default, - statusCode: default, - correlationId: default, - callerIpAddress: default, - callerClaims: default); - } - } - - [InlineData(ImportMode.InitialLoad)] - [InlineData(ImportMode.IncrementalLoad)] - [Theory] - public async Task GivenAnOrchestratorJob_WhneSubJobCancelledAfterThreeCalls_ThenOperationCanceledExceptionShouldBeThrownAndContextUpdate(ImportMode importMode) - { - RequestContextAccessor contextAccessor = Substitute.For>(); - ILoggerFactory loggerFactory = new NullLoggerFactory(); - IIntegrationDataStoreClient integrationDataStoreClient = Substitute.For(); - IMediator mediator = Substitute.For(); - ImportOrchestratorJobDefinition importOrchestratorJobInputData = new ImportOrchestratorJobDefinition(); - TestQueueClient testQueueClient = new TestQueueClient(); - IAuditLogger auditLogger = Substitute.For(); - int callTime = 0; - testQueueClient.GetJobByIdFunc = (queueClient, id, _) => - { - JobInfo jobInfo = queueClient.JobInfos.First(t => t.Id == id); - if (++callTime > 3) - { - jobInfo.Status = JobStatus.Cancelled; - jobInfo.Result = JsonConvert.SerializeObject(new ImportJobErrorResult() { ErrorMessage = "Job Cancelled" }); - } - - return jobInfo; - }; - importOrchestratorJobInputData.BaseUri = new Uri("http://dummy"); - - var inputs = new List(); - inputs.Add(new InputResource() { Type = "Resource", Url = new Uri($"http://dummy") }); - - importOrchestratorJobInputData.Input = inputs; - importOrchestratorJobInputData.InputFormat = "ndjson"; - importOrchestratorJobInputData.InputSource = new Uri("http://dummy"); - importOrchestratorJobInputData.RequestUri = new Uri("http://dummy"); - importOrchestratorJobInputData.ImportMode = importMode; - - integrationDataStoreClient.GetPropertiesAsync(Arg.Any(), Arg.Any()) - .Returns(callInfo => - { - Dictionary properties = new Dictionary(); - properties[IntegrationDataStoreClientConstants.BlobPropertyETag] = "test"; - properties[IntegrationDataStoreClientConstants.BlobPropertyLength] = 1000L; - return properties; - }); - - JobInfo orchestratorJobInfo = (await testQueueClient.EnqueueAsync(0, new string[] { JsonConvert.SerializeObject(importOrchestratorJobInputData) }, 1, false, false, CancellationToken.None)).First(); - ImportOrchestratorJob orchestratorJob = new ImportOrchestratorJob( - mediator, - contextAccessor, - integrationDataStoreClient, - testQueueClient, - Options.Create(new ImportTaskConfiguration()), - loggerFactory, - auditLogger); - orchestratorJob.PollingPeriodSec = 0; - - var jobExecutionException = await Assert.ThrowsAnyAsync(() => orchestratorJob.ExecuteAsync(orchestratorJobInfo, CancellationToken.None)); - ImportJobErrorResult resultDetails = (ImportJobErrorResult)jobExecutionException.Error; - - Assert.Equal(HttpStatusCode.BadRequest, resultDetails.HttpStatusCode); - - _ = mediator.Received().Publish( - Arg.Is( - notification => notification.Id == orchestratorJobInfo.Id.ToString() && - notification.Status == JobStatus.Cancelled.ToString() && - notification.CreateTime == orchestratorJobInfo.CreateDate && - notification.SucceededCount == 0 && - notification.FailedCount == 0), - Arg.Any()); - - if (importMode == ImportMode.IncrementalLoad) - { - auditLogger.Received(1); - auditLogger.Received().LogAudit( - auditAction: Arg.Any(), - operation: Arg.Any(), - resourceType: Arg.Any(), - requestUri: Arg.Any(), - statusCode: Arg.Any(), - correlationId: Arg.Any(), - callerIpAddress: Arg.Any(), - callerClaims: Arg.Any>>(), - customHeaders: Arg.Any>(), - operationType: Arg.Any(), - callerAgent: Arg.Any(), - additionalProperties: Arg.Is>(dict => - dict.ContainsKey("JobId") && dict["JobId"].Equals(orchestratorJobInfo.Id.ToString()) && - dict.ContainsKey("SucceededResources") && dict["SucceededResources"].Equals("0") && - dict.ContainsKey("FailedResources") && dict["FailedResources"].Equals("0"))); - } - else if (importMode == ImportMode.InitialLoad) - { - auditLogger.DidNotReceiveWithAnyArgs().LogAudit( - auditAction: default, - operation: default, - resourceType: default, - requestUri: default, - statusCode: default, - correlationId: default, - callerIpAddress: default, - callerClaims: default); - } - } - - [InlineData(ImportMode.InitialLoad)] - [InlineData(ImportMode.IncrementalLoad)] - [Theory] - public async Task GivenAnOrchestratorJob_WhenSubJobFailedAfterThreeCalls_ThenExceptionWithBadRequestShouldThrownAndContextUpdated(ImportMode importMode) - { - RequestContextAccessor contextAccessor = Substitute.For>(); - ILoggerFactory loggerFactory = new NullLoggerFactory(); - IIntegrationDataStoreClient integrationDataStoreClient = Substitute.For(); - IMediator mediator = Substitute.For(); - ImportOrchestratorJobDefinition importOrchestratorJobInputData = new ImportOrchestratorJobDefinition(); - TestQueueClient testQueueClient = new TestQueueClient(); - IAuditLogger auditLogger = Substitute.For(); - int callTime = 0; - testQueueClient.GetJobByIdFunc = (queueClient, id, _) => - { - JobInfo jobInfo = queueClient.JobInfos.First(t => t.Id == id); - if (++callTime > 3) - { - jobInfo.Status = JobStatus.Failed; - jobInfo.Result = JsonConvert.SerializeObject(new ImportJobErrorResult() { ErrorMessage = "error", HttpStatusCode = HttpStatusCode.BadRequest }); - } - - return jobInfo; - }; - importOrchestratorJobInputData.BaseUri = new Uri("http://dummy"); - - var inputs = new List(); - inputs.Add(new InputResource() { Type = "Resource", Url = new Uri($"http://dummy") }); - - importOrchestratorJobInputData.Input = inputs; - importOrchestratorJobInputData.InputFormat = "ndjson"; - importOrchestratorJobInputData.InputSource = new Uri("http://dummy"); - importOrchestratorJobInputData.RequestUri = new Uri("http://dummy"); - importOrchestratorJobInputData.ImportMode = importMode; - - integrationDataStoreClient.GetPropertiesAsync(Arg.Any(), Arg.Any()) - .Returns(callInfo => - { - Dictionary properties = new Dictionary(); - properties[IntegrationDataStoreClientConstants.BlobPropertyETag] = "test"; - properties[IntegrationDataStoreClientConstants.BlobPropertyLength] = 1000L; - return properties; - }); - - JobInfo orchestratorJobInfo = (await testQueueClient.EnqueueAsync(0, new string[] { JsonConvert.SerializeObject(importOrchestratorJobInputData) }, 1, false, false, CancellationToken.None)).First(); - ImportOrchestratorJob orchestratorJob = new ImportOrchestratorJob( - mediator, - contextAccessor, - integrationDataStoreClient, - testQueueClient, - Options.Create(new ImportTaskConfiguration()), - loggerFactory, - auditLogger); - orchestratorJob.PollingPeriodSec = 0; - - var jobExecutionException = await Assert.ThrowsAnyAsync(() => orchestratorJob.ExecuteAsync(orchestratorJobInfo, CancellationToken.None)); - ImportJobErrorResult resultDetails = (ImportJobErrorResult)jobExecutionException.Error; - - Assert.Equal(HttpStatusCode.BadRequest, resultDetails.HttpStatusCode); - Assert.Equal("error", resultDetails.ErrorMessage); - - _ = mediator.Received().Publish( - Arg.Is( - notification => notification.Id == orchestratorJobInfo.Id.ToString() && - notification.Status == JobStatus.Failed.ToString() && - notification.CreateTime == orchestratorJobInfo.CreateDate && - notification.SucceededCount == 0 && - notification.FailedCount == 0), - Arg.Any()); - - if (importMode == ImportMode.IncrementalLoad) - { - auditLogger.Received(1); - auditLogger.Received().LogAudit( - auditAction: Arg.Any(), - operation: Arg.Any(), - resourceType: Arg.Any(), - requestUri: Arg.Any(), - statusCode: Arg.Any(), - correlationId: Arg.Any(), - callerIpAddress: Arg.Any(), - callerClaims: Arg.Any>>(), - customHeaders: Arg.Any>(), - operationType: Arg.Any(), - callerAgent: Arg.Any(), - additionalProperties: Arg.Is>(dict => - dict.ContainsKey("JobId") && dict["JobId"].Equals(orchestratorJobInfo.Id.ToString()) && - dict.ContainsKey("SucceededResources") && dict["SucceededResources"].Equals("0") && - dict.ContainsKey("FailedResources") && dict["FailedResources"].Equals("0"))); - } - else if (importMode == ImportMode.InitialLoad) - { - auditLogger.DidNotReceiveWithAnyArgs().LogAudit( - auditAction: default, - operation: default, - resourceType: default, - requestUri: default, - statusCode: default, - correlationId: default, - callerIpAddress: default, - callerClaims: default); - } - } - - [InlineData(ImportMode.InitialLoad)] - [InlineData(ImportMode.IncrementalLoad)] - [Theory] - public async Task GivenAnOrchestratorJob_WhenSubJobCancelled_ThenOperationCancelledExceptionShouldBeThrownAndContextUpdated(ImportMode importMode) - { - RequestContextAccessor contextAccessor = Substitute.For>(); - ILoggerFactory loggerFactory = new NullLoggerFactory(); - IIntegrationDataStoreClient integrationDataStoreClient = Substitute.For(); - IMediator mediator = Substitute.For(); - ImportOrchestratorJobDefinition importOrchestratorInputData = new ImportOrchestratorJobDefinition(); - TestQueueClient testQueueClient = new TestQueueClient(); - IAuditLogger auditLogger = Substitute.For(); - testQueueClient.GetJobByIdFunc = (queueClient, id, _) => - { - JobInfo jobInfo = new JobInfo() - { - Status = JobManagement.JobStatus.Cancelled, - Result = JsonConvert.SerializeObject(new ImportJobErrorResult() { ErrorMessage = "error" }), - }; - - return jobInfo; - }; - - importOrchestratorInputData.BaseUri = new Uri("http://dummy"); - - var inputs = new List(); - inputs.Add(new InputResource() { Type = "Resource", Url = new Uri($"http://dummy") }); - - importOrchestratorInputData.Input = inputs; - importOrchestratorInputData.InputFormat = "ndjson"; - importOrchestratorInputData.InputSource = new Uri("http://dummy"); - importOrchestratorInputData.RequestUri = new Uri("http://dummy"); - importOrchestratorInputData.ImportMode = importMode; - - integrationDataStoreClient.GetPropertiesAsync(Arg.Any(), Arg.Any()) - .Returns(callInfo => - { - Dictionary properties = new Dictionary(); - properties[IntegrationDataStoreClientConstants.BlobPropertyETag] = "test"; - properties[IntegrationDataStoreClientConstants.BlobPropertyLength] = 1000L; - return properties; - }); - - JobInfo orchestratorJobInfo = (await testQueueClient.EnqueueAsync(0, new string[] { JsonConvert.SerializeObject(importOrchestratorInputData) }, 1, false, false, CancellationToken.None)).First(); - - ImportOrchestratorJob orchestratorJob = new ImportOrchestratorJob( - mediator, - contextAccessor, - integrationDataStoreClient, - testQueueClient, - Options.Create(new ImportTaskConfiguration()), - loggerFactory, - auditLogger); - orchestratorJob.PollingPeriodSec = 0; - - var jobExecutionException = await Assert.ThrowsAnyAsync(() => orchestratorJob.ExecuteAsync(orchestratorJobInfo, CancellationToken.None)); - ImportJobErrorResult resultDetails = (ImportJobErrorResult)jobExecutionException.Error; - - Assert.Equal(HttpStatusCode.BadRequest, resultDetails.HttpStatusCode); - - _ = mediator.Received().Publish( - Arg.Is( - notification => notification.Id == orchestratorJobInfo.Id.ToString() && - notification.Status == JobStatus.Cancelled.ToString() && - notification.CreateTime == orchestratorJobInfo.CreateDate && - notification.SucceededCount == 0 && - notification.FailedCount == 0), - Arg.Any()); - - if (importMode == ImportMode.IncrementalLoad) - { - auditLogger.Received(1); - auditLogger.Received().LogAudit( - auditAction: Arg.Any(), - operation: Arg.Any(), - resourceType: Arg.Any(), - requestUri: Arg.Any(), - statusCode: Arg.Any(), - correlationId: Arg.Any(), - callerIpAddress: Arg.Any(), - callerClaims: Arg.Any>>(), - customHeaders: Arg.Any>(), - operationType: Arg.Any(), - callerAgent: Arg.Any(), - additionalProperties: Arg.Is>(dict => - dict.ContainsKey("JobId") && dict["JobId"].Equals(orchestratorJobInfo.Id.ToString()) && - dict.ContainsKey("SucceededResources") && dict["SucceededResources"].Equals("0") && - dict.ContainsKey("FailedResources") && dict["FailedResources"].Equals("0"))); - } - else if (importMode == ImportMode.InitialLoad) - { - auditLogger.DidNotReceiveWithAnyArgs().LogAudit( - auditAction: default, - operation: default, - resourceType: default, - requestUri: default, - statusCode: default, - correlationId: default, - callerIpAddress: default, - callerClaims: default); - } - } - - [InlineData(ImportMode.InitialLoad)] - [InlineData(ImportMode.IncrementalLoad)] - [Theory] - public async Task GivenAnOrchestratorJob_WhenSubJobFailed_ThenExceptionWithBadRequestShouldThrownAndContextUpdated(ImportMode importMode) - { - RequestContextAccessor contextAccessor = Substitute.For>(); - ILoggerFactory loggerFactory = new NullLoggerFactory(); - IIntegrationDataStoreClient integrationDataStoreClient = Substitute.For(); - IMediator mediator = Substitute.For(); - ImportOrchestratorJobDefinition importOrchestratorInputData = new ImportOrchestratorJobDefinition(); - TestQueueClient testQueueClient = new TestQueueClient(); - IAuditLogger auditLogger = Substitute.For(); - testQueueClient.GetJobByIdFunc = (queueClient, id, _) => - { - JobInfo jobInfo = new JobInfo() - { - Status = JobManagement.JobStatus.Failed, - Result = JsonConvert.SerializeObject(new ImportJobErrorResult() { ErrorMessage = "error", HttpStatusCode = HttpStatusCode.BadRequest }), - }; - - return jobInfo; - }; - - importOrchestratorInputData.BaseUri = new Uri("http://dummy"); - - var inputs = new List(); - inputs.Add(new InputResource() { Type = "Resource", Url = new Uri($"http://dummy") }); - - importOrchestratorInputData.Input = inputs; - importOrchestratorInputData.InputFormat = "ndjson"; - importOrchestratorInputData.InputSource = new Uri("http://dummy"); - importOrchestratorInputData.RequestUri = new Uri("http://dummy"); - importOrchestratorInputData.ImportMode = importMode; - - integrationDataStoreClient.GetPropertiesAsync(Arg.Any(), Arg.Any()) - .Returns(callInfo => - { - Dictionary properties = new Dictionary(); - properties[IntegrationDataStoreClientConstants.BlobPropertyETag] = "test"; - properties[IntegrationDataStoreClientConstants.BlobPropertyLength] = 1000L; - return properties; - }); - - JobInfo orchestratorJobInfo = (await testQueueClient.EnqueueAsync(0, new string[] { JsonConvert.SerializeObject(importOrchestratorInputData) }, 1, false, false, CancellationToken.None)).First(); - - ImportOrchestratorJob orchestratorJob = new ImportOrchestratorJob( - mediator, - contextAccessor, - integrationDataStoreClient, - testQueueClient, - Options.Create(new Core.Configs.ImportTaskConfiguration()), - loggerFactory, - auditLogger); - orchestratorJob.PollingPeriodSec = 0; - - var jobExecutionException = await Assert.ThrowsAnyAsync(() => orchestratorJob.ExecuteAsync(orchestratorJobInfo, CancellationToken.None)); - ImportJobErrorResult resultDetails = (ImportJobErrorResult)jobExecutionException.Error; - - Assert.Equal(HttpStatusCode.BadRequest, resultDetails.HttpStatusCode); - Assert.Equal("error", resultDetails.ErrorMessage); - - Assert.True(testQueueClient.JobInfos.All(t => t.Status == JobStatus.Cancelled)); - - _ = mediator.Received().Publish( - Arg.Is( - notification => notification.Id == orchestratorJobInfo.Id.ToString() && - notification.Status == JobStatus.Failed.ToString() && - notification.CreateTime == orchestratorJobInfo.CreateDate && - notification.SucceededCount == 0 && - notification.FailedCount == 0), - Arg.Any()); - - if (importMode == ImportMode.IncrementalLoad) - { - auditLogger.Received(1); - auditLogger.Received().LogAudit( - auditAction: Arg.Any(), - operation: Arg.Any(), - resourceType: Arg.Any(), - requestUri: Arg.Any(), - statusCode: Arg.Any(), - correlationId: Arg.Any(), - callerIpAddress: Arg.Any(), - callerClaims: Arg.Any>>(), - customHeaders: Arg.Any>(), - operationType: Arg.Any(), - callerAgent: Arg.Any(), - additionalProperties: Arg.Is>(dict => - dict.ContainsKey("JobId") && dict["JobId"].Equals(orchestratorJobInfo.Id.ToString()) && - dict.ContainsKey("SucceededResources") && dict["SucceededResources"].Equals("0") && - dict.ContainsKey("FailedResources") && dict["FailedResources"].Equals("0"))); - } - else if (importMode == ImportMode.InitialLoad) - { - auditLogger.DidNotReceiveWithAnyArgs().LogAudit( - auditAction: default, - operation: default, - resourceType: default, - requestUri: default, - statusCode: default, - correlationId: default, - callerIpAddress: default, - callerClaims: default); - } - } - - [Fact] - public async Task GivenAnOrchestratorJob_WhenCancelledBeforeCompleted_ThenProcessingJobsShouldNotBeCancelled() - { - RequestContextAccessor contextAccessor = Substitute.For>(); - ILoggerFactory loggerFactory = new NullLoggerFactory(); - IIntegrationDataStoreClient integrationDataStoreClient = Substitute.For(); - IMediator mediator = Substitute.For(); - ImportOrchestratorJobDefinition importOrchestratorJobInputData = new ImportOrchestratorJobDefinition(); - List<(long begin, long end)> surrogatedIdRanges = new List<(long begin, long end)>(); - TestQueueClient testQueueClient = new TestQueueClient(); - IAuditLogger auditLogger = Substitute.For(); - testQueueClient.GetJobByIdFunc = (testQueueClient, id, cancellationToken) => - { - JobInfo jobInfo = testQueueClient.JobInfos.First(t => t.Id == id); - - if (jobInfo == null) - { - return null; - } - - if (jobInfo.Status == JobManagement.JobStatus.Completed) - { - return jobInfo; - } - - jobInfo.Status = JobStatus.Running; - return jobInfo; - }; - - importOrchestratorJobInputData.BaseUri = new Uri("http://dummy"); - var inputs = new List(); - inputs.Add(new InputResource() { Type = "Resource", Url = new Uri($"http://dummy") }); - - importOrchestratorJobInputData.Input = inputs; - importOrchestratorJobInputData.InputFormat = "ndjson"; - importOrchestratorJobInputData.InputSource = new Uri("http://dummy"); - importOrchestratorJobInputData.RequestUri = new Uri("http://dummy"); - - integrationDataStoreClient.GetPropertiesAsync(Arg.Any(), Arg.Any()) - .Returns(callInfo => - { - Dictionary properties = new Dictionary(); - properties[IntegrationDataStoreClientConstants.BlobPropertyETag] = "test"; - properties[IntegrationDataStoreClientConstants.BlobPropertyLength] = 1000L; - return properties; - }); - - JobInfo orchestratorJobInfo = (await testQueueClient.EnqueueAsync(0, new string[] { JsonConvert.SerializeObject(importOrchestratorJobInputData) }, 1, false, false, CancellationToken.None)).First(); - - ImportOrchestratorJob orchestratorJob = new ImportOrchestratorJob( - mediator, - contextAccessor, - integrationDataStoreClient, - testQueueClient, - Options.Create(new Core.Configs.ImportTaskConfiguration()), - loggerFactory, - auditLogger); - orchestratorJob.PollingPeriodSec = 0; - - CancellationTokenSource cancellationToken = new CancellationTokenSource(); - cancellationToken.CancelAfter(TimeSpan.FromSeconds(1)); - await Assert.ThrowsAnyAsync(() => orchestratorJob.ExecuteAsync(orchestratorJobInfo, cancellationToken.Token)); - - Assert.True(testQueueClient.JobInfos.All(t => t.Status != JobStatus.Cancelled && !t.CancelRequested)); - } - - private static async Task VerifyJobStatusChangedAsync(int inputFileCount, JobStatus jobStatus, int succeedCount, int failedCount, int resumeFrom = -1, int completedCount = 0) - { - RequestContextAccessor contextAccessor = Substitute.For>(); - ILoggerFactory loggerFactory = new NullLoggerFactory(); - IIntegrationDataStoreClient integrationDataStoreClient = Substitute.For(); - IMediator mediator = Substitute.For(); - ImportOrchestratorJobDefinition importOrchestratorJobInputData = new ImportOrchestratorJobDefinition(); - ImportOrchestratorJobResult importOrchestratorJobResult = new ImportOrchestratorJobResult(); - - TestQueueClient testQueueClient = new TestQueueClient(); - IAuditLogger auditLogger = Substitute.For(); - testQueueClient.GetJobByIdFunc = (testQueueClient, id, _) => - { - JobInfo jobInfo = testQueueClient.JobInfos.First(t => t.Id == id); - - if (jobInfo == null) - { - return null; - } - - if (jobInfo.Status == JobStatus.Completed) - { - return jobInfo; - } - - if (jobInfo.Id > succeedCount + 1) - { - return new JobInfo() - { - Id = jobInfo.Id, - Status = jobStatus, - Result = JsonConvert.SerializeObject(new ImportJobErrorResult() { ErrorMessage = "error", HttpStatusCode = HttpStatusCode.BadRequest }), - }; - } - - ImportProcessingJobDefinition processingInput = jobInfo.DeserializeDefinition(); - ImportProcessingJobResult processingResult = new ImportProcessingJobResult(); - processingResult.SucceededResources = 1; - processingResult.FailedResources = 1; - processingResult.ErrorLogLocation = "http://dummy/error"; - - jobInfo.Result = JsonConvert.SerializeObject(processingResult); - jobInfo.Status = JobStatus.Completed; - return jobInfo; - }; - - importOrchestratorJobInputData.BaseUri = new Uri("http://dummy"); - var inputs = new List(); - - bool resumeMode = resumeFrom >= 0; - for (int i = 0; i < inputFileCount; ++i) - { - string location = $"http://dummy/{i}"; - inputs.Add(new InputResource() { Type = "Resource", Url = new Uri(location) }); - - if (resumeMode) - { - if (i <= resumeFrom) - { - ImportProcessingJobDefinition processingInput = new ImportProcessingJobDefinition() - { - ResourceLocation = "http://test", - }; - - JobInfo jobInfo = (await testQueueClient.EnqueueAsync(0, new string[] { JsonConvert.SerializeObject(processingInput) }, 1, false, false, CancellationToken.None)).First(); - - ImportProcessingJobResult processingResult = new ImportProcessingJobResult(); - processingResult.SucceededResources = 1; - processingResult.FailedResources = 1; - processingResult.ErrorLogLocation = "http://dummy/error"; - - jobInfo.Result = JsonConvert.SerializeObject(processingResult); - if (i < completedCount) - { - jobInfo.Status = JobManagement.JobStatus.Completed; - importOrchestratorJobResult.SucceededResources += 1; - importOrchestratorJobResult.FailedResources += 1; - } - else - { - jobInfo.Status = JobManagement.JobStatus.Running; - } - - importOrchestratorJobResult.CreatedJobs += 1; - } - } - } - - importOrchestratorJobInputData.Input = inputs; - importOrchestratorJobInputData.InputFormat = "ndjson"; - importOrchestratorJobInputData.InputSource = new Uri("http://dummy"); - importOrchestratorJobInputData.RequestUri = new Uri("http://dummy"); - JobInfo orchestratorJobInfo = (await testQueueClient.EnqueueAsync(0, [JsonConvert.SerializeObject(importOrchestratorJobInputData)], 1, false, false, CancellationToken.None)).First(); - - integrationDataStoreClient.GetPropertiesAsync(Arg.Any(), Arg.Any()) - .Returns(callInfo => - { - Dictionary properties = new Dictionary(); - properties[IntegrationDataStoreClientConstants.BlobPropertyETag] = "test"; - properties[IntegrationDataStoreClientConstants.BlobPropertyLength] = 1000L; - return properties; - }); - - ImportOrchestratorJob orchestratorJob = new ImportOrchestratorJob( - mediator, - contextAccessor, - integrationDataStoreClient, - testQueueClient, - Options.Create(new ImportTaskConfiguration()), - loggerFactory, - auditLogger); - orchestratorJob.PollingPeriodSec = 0; - var jobExecutionException = await Assert.ThrowsAnyAsync(() => orchestratorJob.ExecuteAsync(orchestratorJobInfo, CancellationToken.None)); - ImportJobErrorResult resultDetails = (ImportJobErrorResult)jobExecutionException.Error; - - Assert.Equal(HttpStatusCode.BadRequest, resultDetails.HttpStatusCode); - _ = mediator.Received().Publish( - Arg.Is( - notification => notification.Id.Equals(orchestratorJobInfo.Id.ToString()) && - notification.Status == jobStatus.ToString() && - notification.CreateTime == orchestratorJobInfo.CreateDate && - notification.SucceededCount == succeedCount && - notification.FailedCount == failedCount), - Arg.Any()); - } - - private static async Task VerifyCommonOrchestratorJobAsync(int inputFileCount, int resumeFrom = -1, int completedCount = 0) - { - RequestContextAccessor contextAccessor = Substitute.For>(); - ILoggerFactory loggerFactory = new NullLoggerFactory(); - IIntegrationDataStoreClient integrationDataStoreClient = Substitute.For(); - IMediator mediator = Substitute.For(); - ImportOrchestratorJobDefinition importOrchestratorJobInputData = new ImportOrchestratorJobDefinition(); - ImportOrchestratorJobResult importOrchestratorJobResult = new ImportOrchestratorJobResult(); - - TestQueueClient testQueueClient = new TestQueueClient(); - IAuditLogger auditLogger = Substitute.For(); - testQueueClient.GetJobByIdFunc = (testQueueClient, id, _) => - { - JobInfo jobInfo = testQueueClient.JobInfos.First(t => t.Id == id); - - if (jobInfo == null) - { - return null; - } - - if (jobInfo.Status == JobManagement.JobStatus.Completed) - { - return jobInfo; - } - - ImportProcessingJobDefinition processingInput = jobInfo.DeserializeDefinition(); - ImportProcessingJobResult processingResult = new ImportProcessingJobResult(); - processingResult.SucceededResources = 1; - processingResult.FailedResources = 1; - processingResult.ErrorLogLocation = "http://dummy/error"; - - jobInfo.Result = JsonConvert.SerializeObject(processingResult); - jobInfo.Status = JobManagement.JobStatus.Completed; - return jobInfo; - }; - - importOrchestratorJobInputData.BaseUri = new Uri("http://dummy"); - importOrchestratorJobInputData.RequestUri = importOrchestratorJobInputData.BaseUri; - var inputs = new List(); - - bool resumeMode = resumeFrom >= 0; - for (int i = 0; i < inputFileCount; ++i) - { - string location = $"http://dummy/{i}"; - inputs.Add(new InputResource() { Type = "Resource", Url = new Uri(location) }); - - if (resumeMode) - { - if (i <= resumeFrom) - { - var processingInput = new ImportProcessingJobDefinition() - { - TypeId = 1, - ResourceLocation = location, - BytesToRead = ImportOrchestratorJob.BytesToRead, - UriString = importOrchestratorJobInputData.RequestUri.ToString(), - BaseUriString = importOrchestratorJobInputData.BaseUri.ToString(), - ResourceType = "Resource", - GroupId = 1, - }; - - JobInfo jobInfo = (await testQueueClient.EnqueueAsync(1, new string[] { JsonConvert.SerializeObject(processingInput) }, 1, false, false, CancellationToken.None)).First(); - - ImportProcessingJobResult processingResult = new ImportProcessingJobResult(); - processingResult.SucceededResources = 1; - processingResult.FailedResources = 1; - processingResult.ErrorLogLocation = "http://dummy/error"; - - jobInfo.Result = JsonConvert.SerializeObject(processingResult); - if (i < completedCount) - { - jobInfo.Status = JobManagement.JobStatus.Completed; - importOrchestratorJobResult.SucceededResources += 1; - importOrchestratorJobResult.FailedResources += 1; - } - else - { - jobInfo.Status = JobManagement.JobStatus.Running; - } - - importOrchestratorJobResult.CreatedJobs += 1; - } - } - } - - importOrchestratorJobInputData.Input = inputs; - importOrchestratorJobInputData.InputFormat = "ndjson"; - importOrchestratorJobInputData.InputSource = new Uri("http://dummy"); - importOrchestratorJobInputData.RequestUri = new Uri("http://dummy"); - JobInfo orchestratorJobInfo = (await testQueueClient.EnqueueAsync(1, new string[] { JsonConvert.SerializeObject(importOrchestratorJobInputData) }, 1, false, false, CancellationToken.None)).First(); - orchestratorJobInfo.Result = JsonConvert.SerializeObject(importOrchestratorJobResult); - - integrationDataStoreClient.GetPropertiesAsync(Arg.Any(), Arg.Any()) - .Returns(callInfo => - { - var properties = new Dictionary - { - [IntegrationDataStoreClientConstants.BlobPropertyETag] = "test", - [IntegrationDataStoreClientConstants.BlobPropertyLength] = 1000L, - }; - return properties; - }); - - var orchestratorJob = new ImportOrchestratorJob( - mediator, - contextAccessor, - integrationDataStoreClient, - testQueueClient, - Options.Create(new Core.Configs.ImportTaskConfiguration()), - loggerFactory, - auditLogger) - { - PollingPeriodSec = 0, - }; - - string result = await orchestratorJob.ExecuteAsync(orchestratorJobInfo, CancellationToken.None); - ImportOrchestratorJobResult resultDetails = JsonConvert.DeserializeObject(result); - Assert.NotEmpty(resultDetails.Request); - - Assert.Equal(inputFileCount, testQueueClient.JobInfos.Count() - 1); - - _ = mediator.Received().Publish( - Arg.Is( - notification => notification.Id.Equals(orchestratorJobInfo.Id.ToString()) && - notification.Status == JobStatus.Completed.ToString() && - notification.CreateTime == orchestratorJobInfo.CreateDate && - notification.SucceededCount == inputFileCount && - notification.FailedCount == inputFileCount), - Arg.Any()); - } } } diff --git a/src/Microsoft.Health.Fhir.SqlServer.UnitTests/Features/Operations/Import/ImportProcessingJobTests.cs b/src/Microsoft.Health.Fhir.SqlServer.UnitTests/Features/Operations/Import/ImportProcessingJobTests.cs index 8c410bd7b9..3070e3821a 100644 --- a/src/Microsoft.Health.Fhir.SqlServer.UnitTests/Features/Operations/Import/ImportProcessingJobTests.cs +++ b/src/Microsoft.Health.Fhir.SqlServer.UnitTests/Features/Operations/Import/ImportProcessingJobTests.cs @@ -10,10 +10,13 @@ using System.Threading; using System.Threading.Channels; using System.Threading.Tasks; +using MediatR; using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging.Abstractions; using Microsoft.Health.Core.Features.Context; +using Microsoft.Health.Fhir.Core.Features.Audit; using Microsoft.Health.Fhir.Core.Features.Context; +using Microsoft.Health.Fhir.Core.Features.Operations.BulkDelete; using Microsoft.Health.Fhir.Core.Features.Operations.Import; using Microsoft.Health.Fhir.Core.Features.Persistence; using Microsoft.Health.Fhir.SqlServer.Features.Operations.Import; @@ -105,6 +108,9 @@ public async Task GivenImportInput_WhenExceptionThrowForLoad_ThenJobExecutionExc IImportErrorStoreFactory importErrorStoreFactory = Substitute.For(); RequestContextAccessor contextAccessor = Substitute.For>(); ILoggerFactory loggerFactory = new NullLoggerFactory(); + IMediator mediator = Substitute.For(); + IAuditLogger auditLogger = Substitute.For(); + IQueueClient queueClient = Substitute.For(); loader.LoadResources(Arg.Any(), Arg.Any(), Arg.Any(), Arg.Any(), Arg.Any(), Arg.Any()) .Returns(callInfo => @@ -134,11 +140,14 @@ public async Task GivenImportInput_WhenExceptionThrowForLoad_ThenJobExecutionExc }); ImportProcessingJob job = new ImportProcessingJob( + mediator, + queueClient, loader, importer, importErrorStoreFactory, contextAccessor, - loggerFactory); + loggerFactory, + auditLogger); await Assert.ThrowsAsync(() => job.ExecuteAsync(GetJobInfo(inputData, result), CancellationToken.None)); } @@ -155,6 +164,9 @@ public async Task GivenImportInput_WhenOperationWasCancelledExceptionThrow_ThenJ IImportErrorStoreFactory importErrorStoreFactory = Substitute.For(); RequestContextAccessor contextAccessor = Substitute.For>(); ILoggerFactory loggerFactory = new NullLoggerFactory(); + IMediator mediator = Substitute.For(); + IAuditLogger auditLogger = Substitute.For(); + IQueueClient queueClient = Substitute.For(); importer.Import(Arg.Any>(), Arg.Any(), Arg.Any(), Arg.Any()) .Returns(callInfo => @@ -168,11 +180,14 @@ public async Task GivenImportInput_WhenOperationWasCancelledExceptionThrow_ThenJ }); ImportProcessingJob job = new ImportProcessingJob( + mediator, + queueClient, loader, importer, importErrorStoreFactory, contextAccessor, - loggerFactory); + loggerFactory, + auditLogger); await Assert.ThrowsAsync(() => job.ExecuteAsync(GetJobInfo(inputData, result), CancellationToken.None)); } @@ -188,6 +203,9 @@ private static async Task VerifyCommonImportAsync(ImportProcessingJobDefinition IImportErrorStoreFactory importErrorStoreFactory = Substitute.For(); RequestContextAccessor contextAccessor = Substitute.For>(); ILoggerFactory loggerFactory = new NullLoggerFactory(); + IMediator mediator = Substitute.For(); + IAuditLogger auditLogger = Substitute.For(); + IQueueClient queueClient = Substitute.For(); loader.LoadResources(Arg.Any(), Arg.Any(), Arg.Any(), Arg.Any(), Arg.Any(), Arg.Any()) .Returns(callInfo => @@ -237,7 +255,7 @@ private static async Task VerifyCommonImportAsync(ImportProcessingJobDefinition return progress; }); - var job = new ImportProcessingJob(loader, importer, importErrorStoreFactory, contextAccessor, loggerFactory); + var job = new ImportProcessingJob(mediator, queueClient, loader, importer, importErrorStoreFactory, contextAccessor, loggerFactory, auditLogger); string resultString = await job.ExecuteAsync(GetJobInfo(inputData, currentResult), CancellationToken.None); ImportProcessingJobResult result = JsonConvert.DeserializeObject(resultString); diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/ExceptionExtension.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/ExceptionExtension.cs index a18bcf9969..fe3ac68629 100644 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/ExceptionExtension.cs +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/ExceptionExtension.cs @@ -42,7 +42,8 @@ private static bool HasNetworkErrorPattern(string str) || str.Contains("connection timeout expired", StringComparison.OrdinalIgnoreCase) || str.Contains("existing connection was forcibly closed by the remote host", StringComparison.OrdinalIgnoreCase) || str.Contains("connection was recovered and rowcount in the first query is not available", StringComparison.OrdinalIgnoreCase) - || str.Contains("connection was successfully established with the server, but then an error occurred during the login process", StringComparison.OrdinalIgnoreCase); + || str.Contains("connection was successfully established with the server, but then an error occurred during the login process", StringComparison.OrdinalIgnoreCase) + || str.Contains("server provided routing information, but timeout already expired", StringComparison.OrdinalIgnoreCase); ////A severe error occurred on the current command. The results, if any, should be discarded. ////Meaning: diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/ImportOrchestratorJob.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/ImportOrchestratorJob.cs index 8a61fa0d0c..65a9b42a2b 100644 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/ImportOrchestratorJob.cs +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/ImportOrchestratorJob.cs @@ -85,9 +85,7 @@ public ImportOrchestratorJob( public async Task ExecuteAsync(JobInfo jobInfo, CancellationToken cancellationToken) { - ImportOrchestratorJobDefinition inputData = jobInfo.DeserializeDefinition(); - ImportOrchestratorJobResult currentResult = string.IsNullOrEmpty(jobInfo.Result) ? new ImportOrchestratorJobResult() : jobInfo.DeserializeResult(); - + var inputData = jobInfo.DeserializeDefinition(); var fhirRequestContext = new FhirRequestContext( method: "Import", uriString: inputData.RequestUri.ToString(), @@ -98,73 +96,57 @@ public async Task ExecuteAsync(JobInfo jobInfo, CancellationToken cancel { IsBackgroundTask = true, }; - _contextAccessor.RequestContext = fhirRequestContext; - currentResult.Request = inputData.RequestUri.ToString(); - + var result = new ImportOrchestratorJobResult(); + result.Request = inputData.RequestUri.ToString(); ImportJobErrorResult errorResult = null; - try { cancellationToken.ThrowIfCancellationRequested(); await ValidateResourcesAsync(inputData, cancellationToken); - _logger.LogJobInformation(jobInfo, "Input Resources Validated."); + _logger.LogJobInformation(jobInfo, "Input resources validated."); - await ExecuteImportProcessingJobAsync(jobInfo, inputData, currentResult, cancellationToken); - _logger.LogJobInformation(jobInfo, "SubJobs Completed."); + await EnqueueProcessingJobsAsync(jobInfo, inputData, result, cancellationToken); + _logger.LogJobInformation(jobInfo, "Registration of processing jobs completed."); } - catch (TaskCanceledException taskCanceledEx) + catch (OperationCanceledException ex) { - _logger.LogJobInformation(taskCanceledEx, jobInfo, "Import job canceled. {Message}", taskCanceledEx.Message); - errorResult = new ImportJobErrorResult() - { - HttpStatusCode = HttpStatusCode.BadRequest, - ErrorMessage = taskCanceledEx.Message, - }; - await WaitCancelledJobCompletedAsync(jobInfo); - await SendImportMetricsNotification(JobStatus.Cancelled, jobInfo, currentResult, inputData.ImportMode, fhirRequestContext); - } - catch (OperationCanceledException canceledEx) - { - _logger.LogJobInformation(canceledEx, jobInfo, "Import job canceled. {Message}", canceledEx.Message); - errorResult = new ImportJobErrorResult() - { - HttpStatusCode = HttpStatusCode.BadRequest, - ErrorMessage = canceledEx.Message, - }; - await WaitCancelledJobCompletedAsync(jobInfo); - await SendImportMetricsNotification(JobStatus.Cancelled, jobInfo, currentResult, inputData.ImportMode, fhirRequestContext); + _logger.LogJobInformation(ex, jobInfo, "Import job canceled. {Message}", ex.Message); + errorResult = new ImportJobErrorResult() { ErrorMessage = ex.Message, HttpStatusCode = HttpStatusCode.BadRequest }; + await SendNotification(JobStatus.Cancelled, jobInfo, 0, 0, result.TotalBytes, inputData.ImportMode, fhirRequestContext, _logger, _auditLogger, _mediator); } - catch (IntegrationDataStoreException integrationDataStoreEx) + catch (IntegrationDataStoreException ex) { - _logger.LogJobInformation(integrationDataStoreEx, jobInfo, "Failed to access input files."); - errorResult = new ImportJobErrorResult() - { - HttpStatusCode = integrationDataStoreEx.StatusCode, - ErrorMessage = integrationDataStoreEx.Message, - }; - await SendImportMetricsNotification(JobStatus.Failed, jobInfo, currentResult, inputData.ImportMode, fhirRequestContext); + _logger.LogJobInformation(ex, jobInfo, "Failed to access input files."); + errorResult = new ImportJobErrorResult() { ErrorMessage = ex.Message, HttpStatusCode = ex.StatusCode }; + await SendNotification(JobStatus.Failed, jobInfo, 0, 0, result.TotalBytes, inputData.ImportMode, fhirRequestContext, _logger, _auditLogger, _mediator); } catch (JobExecutionException ex) { - _logger.LogJobInformation(ex, jobInfo, "Failed to process input resources."); errorResult = ex.Error != null ? (ImportJobErrorResult)ex.Error : new ImportJobErrorResult() { ErrorMessage = ex.Message, ErrorDetails = ex.ToString() }; if (errorResult.HttpStatusCode == 0) { errorResult.HttpStatusCode = HttpStatusCode.InternalServerError; } - await CancelProcessingJobsAsync(jobInfo); - await SendImportMetricsNotification(JobStatus.Failed, jobInfo, currentResult, inputData.ImportMode, fhirRequestContext); + if (errorResult.HttpStatusCode == HttpStatusCode.InternalServerError) + { + _logger.LogJobError(ex, jobInfo, "Failed to register processing jobs."); + } + else + { + _logger.LogJobInformation(ex, jobInfo, "Failed to register processing jobs."); + } + + await SendNotification(JobStatus.Failed, jobInfo, 0, 0, result.TotalBytes, inputData.ImportMode, fhirRequestContext, _logger, _auditLogger, _mediator); } catch (Exception ex) { - _logger.LogJobInformation(ex, jobInfo, "Failed to import data."); + _logger.LogJobError(ex, jobInfo, "Failed to register processing jobs."); errorResult = new ImportJobErrorResult() { ErrorMessage = ex.Message, ErrorDetails = ex.ToString(), HttpStatusCode = HttpStatusCode.InternalServerError }; - await CancelProcessingJobsAsync(jobInfo); - await SendImportMetricsNotification(JobStatus.Failed, jobInfo, currentResult, inputData.ImportMode, fhirRequestContext); + await SendNotification(JobStatus.Failed, jobInfo, 0, 0, result.TotalBytes, inputData.ImportMode, fhirRequestContext, _logger, _auditLogger, _mediator); } if (errorResult != null) @@ -172,8 +154,7 @@ public async Task ExecuteAsync(JobInfo jobInfo, CancellationToken cancel throw new JobExecutionException(errorResult.ErrorMessage, errorResult); } - await SendImportMetricsNotification(JobStatus.Completed, jobInfo, currentResult, inputData.ImportMode, fhirRequestContext); - return JsonConvert.SerializeObject(currentResult); + return JsonConvert.SerializeObject(result); } private async Task ValidateResourcesAsync(ImportOrchestratorJobDefinition inputData, CancellationToken cancellationToken) @@ -193,24 +174,24 @@ private async Task ValidateResourcesAsync(ImportOrchestratorJobDefinition inputD }); } - private async Task SendImportMetricsNotification(JobStatus jobStatus, JobInfo jobInfo, ImportOrchestratorJobResult currentResult, ImportMode importMode, FhirRequestContext fhirRequestContext) + internal static async Task SendNotification(JobStatus status, JobInfo info, long succeeded, long failed, long bytes, ImportMode importMode, FhirRequestContext context, ILogger logger, IAuditLogger auditLogger, IMediator mediator) { - _logger.LogJobInformation(jobInfo, "SucceededResources {SucceededResources} and FailedResources {FailedResources} in Import", currentResult.SucceededResources, currentResult.FailedResources); + logger.LogJobInformation(info, "SucceededResources {SucceededResources} and FailedResources {FailedResources} in Import", succeeded, failed); if (importMode == ImportMode.IncrementalLoad) { var incrementalImportProperties = new Dictionary(); - incrementalImportProperties["JobId"] = jobInfo.Id.ToString(); - incrementalImportProperties["SucceededResources"] = currentResult.SucceededResources.ToString(); - incrementalImportProperties["FailedResources"] = currentResult.FailedResources.ToString(); + incrementalImportProperties["JobId"] = info.Id.ToString(); + incrementalImportProperties["SucceededResources"] = succeeded.ToString(); + incrementalImportProperties["FailedResources"] = failed.ToString(); - _auditLogger.LogAudit( + auditLogger.LogAudit( AuditAction.Executed, operation: "import/" + ImportMode.IncrementalLoad.ToString(), resourceType: string.Empty, - requestUri: fhirRequestContext.Uri, + requestUri: context.Uri, statusCode: HttpStatusCode.Accepted, - correlationId: fhirRequestContext.CorrelationId, + correlationId: context.CorrelationId, callerIpAddress: null, callerClaims: null, customHeaders: null, @@ -218,34 +199,30 @@ private async Task SendImportMetricsNotification(JobStatus jobStatus, JobInfo jo callerAgent: DefaultCallerAgent, additionalProperties: incrementalImportProperties); - _logger.LogJobInformation(jobInfo, "Audit logs for incremental import are added."); + logger.LogJobInformation(info, "Audit logs for incremental import are added."); } var importJobMetricsNotification = new ImportJobMetricsNotification( - jobInfo.Id.ToString(), - jobStatus.ToString(), - jobInfo.CreateDate, + info.Id.ToString(), + status.ToString(), + info.CreateDate, Clock.UtcNow, - currentResult.TotalBytes, - currentResult.SucceededResources, - currentResult.FailedResources, + bytes, + succeeded, + failed, importMode); - await _mediator.Publish(importJobMetricsNotification, CancellationToken.None); + await mediator.Publish(importJobMetricsNotification, CancellationToken.None); } - private async Task ExecuteImportProcessingJobAsync(JobInfo coord, ImportOrchestratorJobDefinition coordDefinition, ImportOrchestratorJobResult currentResult, CancellationToken cancellationToken) + private async Task EnqueueProcessingJobsAsync(JobInfo coord, ImportOrchestratorJobDefinition coordDefinition, ImportOrchestratorJobResult result, CancellationToken cancellationToken) { - currentResult.TotalBytes = 0; - currentResult.FailedResources = 0; - currentResult.SucceededResources = 0; - // split blobs by size var inputs = new List(); await Parallel.ForEachAsync(coordDefinition.Input, new ParallelOptions { MaxDegreeOfParallelism = 16 }, async (input, cancel) => { var blobLength = (long)(await _integrationDataStoreClient.GetPropertiesAsync(input.Url, cancellationToken))[IntegrationDataStoreClientConstants.BlobPropertyLength]; - currentResult.TotalBytes += blobLength; + result.TotalBytes += blobLength; foreach (var offset in GetOffsets(blobLength, BytesToRead)) { var newInput = input.Clone(); @@ -260,9 +237,7 @@ private async Task ExecuteImportProcessingJobAsync(JobInfo coord, ImportOrchestr var jobIds = await EnqueueProcessingJobsAsync(inputs, coord.GroupId, coordDefinition, cancellationToken); - currentResult.CreatedJobs = jobIds.Count; - - await WaitCompletion(coord, jobIds, currentResult, cancellationToken); + result.CreatedJobs = jobIds.Count; } internal static IEnumerable GetOffsets(long blobLength, int bytesToRead) @@ -275,81 +250,6 @@ internal static IEnumerable GetOffsets(long blobLength, int bytesToRead) } } - private async Task WaitCompletion(JobInfo orchestratorInfo, IList jobIds, ImportOrchestratorJobResult currentResult, CancellationToken cancellationToken) - { - _logger.LogJobInformation(orchestratorInfo, "Waiting for other workers to pull work from the queue"); - await Task.Delay(TimeSpan.FromSeconds(PollingPeriodSec), cancellationToken); // there is no sense in checking right away as workers are polling queue on the same interval - - do - { - var completedJobIds = new HashSet(); - var jobIdsToCheck = jobIds.Take(20).ToList(); - var jobInfos = new List(); - double duration; - try - { - var start = Stopwatch.StartNew(); - jobInfos.AddRange(await _timeoutRetries.ExecuteAsync(async () => await _queueClient.GetJobsByIdsAsync(QueueType.Import, jobIdsToCheck.ToArray(), false, cancellationToken))); - duration = start.Elapsed.TotalSeconds; - } - catch (SqlException ex) - { - _logger.LogJobError(ex, orchestratorInfo, "Failed to get running jobs."); - throw new JobExecutionException(ex.Message, ex); - } - - foreach (var jobInfo in jobInfos) - { - if (jobInfo.Status != JobStatus.Created && jobInfo.Status != JobStatus.Running) - { - if (jobInfo.Status == JobStatus.Completed) - { - var procesingJobResult = jobInfo.DeserializeResult(); - currentResult.SucceededResources += procesingJobResult.SucceededResources; - currentResult.FailedResources += procesingJobResult.FailedResources; - currentResult.ProcessedBytes += procesingJobResult.ProcessedBytes; - } - else if (jobInfo.Status == JobStatus.Failed) - { - var procesingJobResult = jobInfo.DeserializeResult(); - _logger.LogJobError(jobInfo, "Job is set to 'Failed'. Message: {Message}.", procesingJobResult.ErrorMessage); - throw new JobExecutionException(procesingJobResult.ErrorMessage, procesingJobResult); - } - else if (jobInfo.Status == JobStatus.Cancelled) - { - const string message = "Import operation cancelled by customer."; - _logger.LogJobError(jobInfo, message); - throw new OperationCanceledException(message); - } - - completedJobIds.Add(jobInfo.Id); - _logger.LogJobInformation(jobInfo, "Job with id: {JobId} and group id: {GroupId} completed.", jobInfo.Id, jobInfo.GroupId); - } - } - - if (completedJobIds.Count > 0) - { - foreach (var jobId in completedJobIds) - { - jobIds.Remove(jobId); - } - - currentResult.CompletedJobs += completedJobIds.Count; - orchestratorInfo.Result = JsonConvert.SerializeObject(currentResult); - await _queueClient.PutJobHeartbeatAsync(orchestratorInfo, cancellationToken); // remove when progress is reported by selecting results of children. - - _logger.LogJobInformation(orchestratorInfo, "Throttle to avoid high database utilization."); - await Task.Delay(TimeSpan.FromSeconds(duration), cancellationToken); // throttle to avoid high database utilization. - } - else - { - _logger.LogJobInformation(orchestratorInfo, "Waiting for child jobs to finish."); - await Task.Delay(TimeSpan.FromSeconds(PollingPeriodSec), cancellationToken); - } - } - while (jobIds.Count > 0); - } - private async Task> EnqueueProcessingJobsAsync(IEnumerable inputs, long groupId, ImportOrchestratorJobDefinition coordDefinition, CancellationToken cancellationToken) { var definitions = new List(); @@ -390,44 +290,5 @@ private async Task> EnqueueProcessingJobsAsync(IEnumerable await _queueClient.GetJobByGroupIdAsync(QueueType.Import, jobInfo.GroupId, false, CancellationToken.None)); - if (jobInfos.All(t => (t.Status != JobStatus.Created && t.Status != JobStatus.Running) || !t.CancelRequested || t.Id == jobInfo.Id)) - { - break; - } - } - catch (SqlException ex) - { - _logger.LogJobWarning(ex, jobInfo, "Failed to get jobs by groupId {GroupId}.", jobInfo.GroupId); - throw new JobExecutionException(ex.Message, ex); - } - - await Task.Delay(TimeSpan.FromSeconds(5)); - } - } } } diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/ImportProcessingJob.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/ImportProcessingJob.cs index f6756faed2..cc1c524cfc 100644 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/ImportProcessingJob.cs +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/ImportProcessingJob.cs @@ -11,9 +11,13 @@ using System.Threading.Tasks; using Azure; using EnsureThat; +using MediatR; using Microsoft.Extensions.Logging; using Microsoft.Extensions.Primitives; +using Microsoft.Health.Core.Features.Audit; using Microsoft.Health.Core.Features.Context; +using Microsoft.Health.Fhir.Core.Extensions; +using Microsoft.Health.Fhir.Core.Features.Audit; using Microsoft.Health.Fhir.Core.Features.Context; using Microsoft.Health.Fhir.Core.Features.Operations; using Microsoft.Health.Fhir.Core.Features.Operations.Import; @@ -26,25 +30,35 @@ namespace Microsoft.Health.Fhir.SqlServer.Features.Operations.Import public class ImportProcessingJob : IJob { private const string CancelledErrorMessage = "Import processing job is canceled."; + internal const string DefaultCallerAgent = "Microsoft.Health.Fhir.Server"; + private readonly IMediator _mediator; + private readonly IQueueClient _queueClient; private readonly IImportResourceLoader _importResourceLoader; private readonly IImporter _importer; private readonly IImportErrorStoreFactory _importErrorStoreFactory; private readonly RequestContextAccessor _contextAccessor; private readonly ILogger _logger; + private readonly IAuditLogger _auditLogger; public ImportProcessingJob( + IMediator mediator, + IQueueClient queueClient, IImportResourceLoader importResourceLoader, IImporter importer, IImportErrorStoreFactory importErrorStoreFactory, RequestContextAccessor contextAccessor, - ILoggerFactory loggerFactory) + ILoggerFactory loggerFactory, + IAuditLogger auditLogger) { + _mediator = EnsureArg.IsNotNull(mediator, nameof(mediator)); + _queueClient = EnsureArg.IsNotNull(queueClient, nameof(queueClient)); _importResourceLoader = EnsureArg.IsNotNull(importResourceLoader, nameof(importResourceLoader)); _importer = EnsureArg.IsNotNull(importer, nameof(importer)); _importErrorStoreFactory = EnsureArg.IsNotNull(importErrorStoreFactory, nameof(importErrorStoreFactory)); _contextAccessor = EnsureArg.IsNotNull(contextAccessor, nameof(contextAccessor)); _logger = EnsureArg.IsNotNull(loggerFactory, nameof(loggerFactory)).CreateLogger(); + _auditLogger = EnsureArg.IsNotNull(auditLogger, nameof(auditLogger)); } public async Task ExecuteAsync(JobInfo jobInfo, CancellationToken cancellationToken) @@ -52,7 +66,7 @@ public async Task ExecuteAsync(JobInfo jobInfo, CancellationToken cancel EnsureArg.IsNotNull(jobInfo, nameof(jobInfo)); var definition = jobInfo.DeserializeDefinition(); - var currentResult = new ImportProcessingJobResult(); + var result = new ImportProcessingJobResult(); var fhirRequestContext = new FhirRequestContext( method: "Import", @@ -71,32 +85,30 @@ public async Task ExecuteAsync(JobInfo jobInfo, CancellationToken cancel { cancellationToken.ThrowIfCancellationRequested(); - // Initialize error store + // Design of error writes is too complex. We do not need separate init and writes. Also, it leads to adding duplicate error records on job restart. IImportErrorStore importErrorStore = await _importErrorStoreFactory.InitializeAsync(GetErrorFileName(definition.ResourceType, jobInfo.GroupId, jobInfo.Id), cancellationToken); - currentResult.ErrorLogLocation = importErrorStore.ErrorFileLocation; + result.ErrorLogLocation = importErrorStore.ErrorFileLocation; - // Load and parse resource from bulk resource + // Design of resource loader is too complex. There is no need to have any channel and separate load task. + // This design was driven from assumption that worker/processing job deals with entire large file. + // This is not true anymore, as worker deals with just small portion of file accessing it by offset. + // We should just open reader and walk through all needed records in a single thread. (Channel importResourceChannel, Task loadTask) = _importResourceLoader.LoadResources(definition.ResourceLocation, definition.Offset, definition.BytesToRead, definition.ResourceType, definition.ImportMode, cancellationToken); // Import to data store var importProgress = await _importer.Import(importResourceChannel, importErrorStore, definition.ImportMode, cancellationToken); - currentResult.SucceededResources = importProgress.SucceededResources; - currentResult.FailedResources = importProgress.FailedResources; - currentResult.ErrorLogLocation = importErrorStore.ErrorFileLocation; - currentResult.ProcessedBytes = importProgress.ProcessedBytes; + result.SucceededResources = importProgress.SucceededResources; + result.FailedResources = importProgress.FailedResources; + result.ErrorLogLocation = importErrorStore.ErrorFileLocation; + result.ProcessedBytes = importProgress.ProcessedBytes; - _logger.LogJobInformation(jobInfo, "Import Job {JobId} progress: succeed {SucceedCount}, failed: {FailedCount}", jobInfo.Id, currentResult.SucceededResources, currentResult.FailedResources); + _logger.LogJobInformation(jobInfo, "Import Job {JobId} progress: succeed {SucceedCount}, failed: {FailedCount}", jobInfo.Id, result.SucceededResources, result.FailedResources); try { await loadTask; } - catch (TaskCanceledException tce) - { - _logger.LogJobWarning(tce, jobInfo, nameof(TaskCanceledException)); - throw; - } catch (OperationCanceledException oce) { _logger.LogJobWarning(oce, jobInfo, nameof(OperationCanceledException)); @@ -116,7 +128,7 @@ public async Task ExecuteAsync(JobInfo jobInfo, CancellationToken cancel } catch (IntegrationDataStoreException ex) { - _logger.LogJobInformation(ex, jobInfo, "Failed to access input files."); + _logger.LogJobWarning(ex, jobInfo, "Failed to access input files."); var error = new ImportJobErrorResult() { ErrorMessage = ex.Message, HttpStatusCode = ex.StatusCode }; throw new JobExecutionException(ex.Message, error, ex); } @@ -127,14 +139,12 @@ public async Task ExecuteAsync(JobInfo jobInfo, CancellationToken cancel throw new JobExecutionException(ex.Message, error, ex); } - jobInfo.Data = currentResult.SucceededResources + currentResult.FailedResources; - return JsonConvert.SerializeObject(currentResult); - } - catch (TaskCanceledException canceledEx) - { - _logger.LogJobInformation(canceledEx, jobInfo, CancelledErrorMessage); - var error = new ImportJobErrorResult() { ErrorMessage = CancelledErrorMessage }; - throw new JobExecutionException(canceledEx.Message, error, canceledEx); + jobInfo.Data = result.SucceededResources + result.FailedResources; + + // jobs are small, send on success only + await ImportOrchestratorJob.SendNotification(JobStatus.Completed, jobInfo, result.SucceededResources, result.FailedResources, result.ProcessedBytes, definition.ImportMode, fhirRequestContext, _logger, _auditLogger, _mediator); + + return JsonConvert.SerializeObject(result); } catch (OperationCanceledException canceledEx) { @@ -144,7 +154,7 @@ public async Task ExecuteAsync(JobInfo jobInfo, CancellationToken cancel } catch (Exception ex) { - _logger.LogJobInformation(ex, jobInfo, "Critical error in import processing job."); + _logger.LogJobError(ex, jobInfo, "Critical error in import processing job."); var error = new ImportJobErrorResult() { ErrorMessage = ex.Message, ErrorDetails = ex.ToString() }; throw new JobExecutionException(ex.Message, error, ex); } diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/SqlImporter.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/SqlImporter.cs index c61d065599..895b071186 100644 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/SqlImporter.cs +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/SqlImporter.cs @@ -55,30 +55,30 @@ public async Task Import(Channel input long succeededCount = 0; long processedBytes = 0; long currentIndex = -1; - var importErrorBuffer = new List(); - var resourceBuffer = new List(); + var errors = new List(); + var resourceBatch = new List(); await foreach (ImportResource resource in inputChannel.Reader.ReadAllAsync(cancellationToken)) { cancellationToken.ThrowIfCancellationRequested(); currentIndex = resource.Index; - resourceBuffer.Add(resource); - if (resourceBuffer.Count < _importTaskConfiguration.TransactionSize) + resourceBatch.Add(resource); + if (resourceBatch.Count < _importTaskConfiguration.TransactionSize) { continue; } - var resultInt = await ImportResourcesInBuffer(resourceBuffer, importErrorBuffer, importMode, cancellationToken); + var resultInt = await ImportResourcesInBuffer(resourceBatch, errors, importMode, cancellationToken); succeededCount += resultInt.LoadedCount; processedBytes += resultInt.ProcessedBytes; } - var result = await ImportResourcesInBuffer(resourceBuffer, importErrorBuffer, importMode, cancellationToken); + var result = await ImportResourcesInBuffer(resourceBatch, errors, importMode, cancellationToken); succeededCount += result.LoadedCount; processedBytes += result.ProcessedBytes; - return await UploadImportErrorsAsync(importErrorStore, succeededCount, importErrorBuffer.Count, importErrorBuffer.ToArray(), currentIndex, processedBytes, cancellationToken); + return await UploadImportErrorsAsync(importErrorStore, succeededCount, errors.Count, errors.ToArray(), currentIndex, processedBytes, cancellationToken); } finally { @@ -93,8 +93,9 @@ public async Task Import(Channel input var validResources = resources.Where(r => string.IsNullOrEmpty(r.ImportError)).ToList(); var newErrors = await _store.ImportResourcesAsync(validResources, importMode, cancellationToken); errors.AddRange(newErrors); + var totalBytes = resources.Sum(_ => (long)_.Length); resources.Clear(); - return (validResources.Count - newErrors.Count, resources.Sum(_ => (long)_.Length)); + return (validResources.Count - newErrors.Count, totalBytes); } private async Task UploadImportErrorsAsync(IImportErrorStore importErrorStore, long succeededCount, long failedCount, string[] importErrors, long lastIndex, long processedBytes, CancellationToken cancellationToken) diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/SqlRetry/SqlRetryService.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/SqlRetry/SqlRetryService.cs index 5894c8dd05..5957d3118d 100644 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/SqlRetry/SqlRetryService.cs +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/SqlRetry/SqlRetryService.cs @@ -275,7 +275,7 @@ public async Task ExecuteSql(SqlCommand sqlCommand, Func 0) { - await TryLogEvent($"Retry:{sqlCommand.CommandText}", "Warn", $"retries={retry} error={lastException}", start, cancellationToken); + await TryLogEvent($"SuccessOnRetry:{sqlCommand.CommandText}", "Warn", $"retries={retry} error={lastException}", start, cancellationToken); } return; diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/SqlServerFhirDataStore.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/SqlServerFhirDataStore.cs index 904e3e1213..e65c598ff8 100644 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/SqlServerFhirDataStore.cs +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/SqlServerFhirDataStore.cs @@ -380,6 +380,11 @@ private async Task> ImportResourcesAsync(IReadOnlyList resources, ImportMode importMode, CancellationToken cancellationToken) { + if (resources.Count == 0) // do not go to the database + { + return new List(); + } + (List Loaded, List Conflicts) results; var retries = 0; while (true) diff --git a/src/Microsoft.Health.TaskManagement/JobHosting.cs b/src/Microsoft.Health.TaskManagement/JobHosting.cs index bc6babd0dc..ec15a3aba0 100644 --- a/src/Microsoft.Health.TaskManagement/JobHosting.cs +++ b/src/Microsoft.Health.TaskManagement/JobHosting.cs @@ -180,7 +180,7 @@ private async Task ExecuteJobAsync(JobInfo jobInfo) return; } - catch (Exception ex) when (ex is OperationCanceledException || ex is TaskCanceledException) + catch (OperationCanceledException ex) { _logger.LogWarning(ex, "Job with id: {JobId} and group id: {GroupId} of type: {JobType} canceled.", jobInfo.Id, jobInfo.GroupId, jobInfo.QueueType); jobInfo.Status = JobStatus.Cancelled; diff --git a/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportTests.cs b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportTests.cs index 892e3ab26e..ed7598f373 100644 --- a/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportTests.cs +++ b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportTests.cs @@ -845,12 +845,17 @@ public async Task GivenImportTriggeredWithMultipleFiles_ThenDataShouldBeImported { var resourceCount = Regex.Matches(patientNdJsonResource, "{\"resourceType\":").Count * 2; var notificationList = _metricHandler.NotificationMapping[typeof(ImportJobMetricsNotification)]; - Assert.Single(notificationList); - var notification = notificationList.First() as ImportJobMetricsNotification; - Assert.Equal(JobStatus.Completed.ToString(), notification.Status); - Assert.NotNull(notification.DataSize); - Assert.Equal(resourceCount, notification.SucceededCount); - Assert.Equal(0, notification.FailedCount); + Assert.Equal(2, notificationList.Count); + var succeeded = 0L; + foreach (var notification in notificationList.Select(_ => (ImportJobMetricsNotification)_)) + { + Assert.Equal(JobStatus.Completed.ToString(), notification.Status); + Assert.NotNull(notification.DataSize); + succeeded += notification.SucceededCount.Value; + Assert.Equal(0, notification.FailedCount); + } + + Assert.Equal(resourceCount, succeeded); } } diff --git a/tools/PerfTester/Program.cs b/tools/PerfTester/Program.cs index de5f05390d..f9230a23b5 100644 --- a/tools/PerfTester/Program.cs +++ b/tools/PerfTester/Program.cs @@ -57,6 +57,8 @@ public static void Main() _sqlRetryService = SqlRetryService.GetInstance(iSqlConnectionBuilder); _store = new SqlStoreClient(_sqlRetryService, NullLogger.Instance); + DumpResourceIds(); + if (_callType == "GetDate" || _callType == "LogEvent") { Console.WriteLine($"Start at {DateTime.UtcNow.ToString("s")}"); @@ -85,8 +87,6 @@ public static void Main() return; } - DumpResourceIds(); - var resourceIds = GetRandomIds(); SwitchToResourceTable(); ExecuteParallelCalls(resourceIds); // compare this From 00c22e619f721f08b762c9df1d5ec49928dc90c3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 11 Apr 2024 14:18:36 -0700 Subject: [PATCH 147/155] Bump Microsoft.SqlServer.SqlManagementObjects from 170.21.0 to 171.30.0 (#3801) --- Directory.Packages.props | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Directory.Packages.props b/Directory.Packages.props index 8a2782bba0..900bd4df1c 100644 --- a/Directory.Packages.props +++ b/Directory.Packages.props @@ -97,7 +97,7 @@ - + From 4b94e40d832eac4e0aa186d65f0ddf0797062e21 Mon Sep 17 00:00:00 2001 From: Mikael Weaver Date: Fri, 12 Apr 2024 13:19:47 -0700 Subject: [PATCH 148/155] Update import to return bad requset for duplicate input (#3763) * Changnd import dupliate file to bad request * Added check on input controller for duplicate files. * Update tests for handling at controller * Updated error message * Changed unit test to check for input url * removed duplicate check --- .../Resources.Designer.cs | 9 ++++ src/Microsoft.Health.Fhir.Api/Resources.resx | 3 ++ .../Controllers/ImportControllerTests.cs | 41 ++++++++++++++++++- .../Controllers/ImportController.cs | 8 ++++ .../Import/ImportOrchestratorJob.cs | 7 ---- .../Rest/Import/ImportTests.cs | 36 ++++++++++++++++ 6 files changed, 95 insertions(+), 9 deletions(-) diff --git a/src/Microsoft.Health.Fhir.Api/Resources.Designer.cs b/src/Microsoft.Health.Fhir.Api/Resources.Designer.cs index e681303db3..669a490d3d 100644 --- a/src/Microsoft.Health.Fhir.Api/Resources.Designer.cs +++ b/src/Microsoft.Health.Fhir.Api/Resources.Designer.cs @@ -267,6 +267,15 @@ public static string ImportModeIsNotRecognized { } } + /// + /// Looks up a localized string similar to Import request cannot have duplicate files. Found duplicates: {0}. . + /// + public static string ImportRequestDuplicateInputFiles { + get { + return ResourceManager.GetString("ImportRequestDuplicateInputFiles", resourceCulture); + } + } + /// /// Looks up a localized string similar to Import request must be specified as a Paramters. The body provided in this request is not valid. . /// diff --git a/src/Microsoft.Health.Fhir.Api/Resources.resx b/src/Microsoft.Health.Fhir.Api/Resources.resx index 34a5701129..16838bf404 100644 --- a/src/Microsoft.Health.Fhir.Api/Resources.resx +++ b/src/Microsoft.Health.Fhir.Api/Resources.resx @@ -323,6 +323,9 @@ Import request must be specified as a Paramters. The body provided in this request is not valid. + + Import request cannot have duplicate files. Found duplicates: {0}. + The template collection reference '{0}' is invalid. {0}: template collection reference diff --git a/src/Microsoft.Health.Fhir.Shared.Api.UnitTests/Controllers/ImportControllerTests.cs b/src/Microsoft.Health.Fhir.Shared.Api.UnitTests/Controllers/ImportControllerTests.cs index 51f7312cb0..c19b5371a1 100644 --- a/src/Microsoft.Health.Fhir.Shared.Api.UnitTests/Controllers/ImportControllerTests.cs +++ b/src/Microsoft.Health.Fhir.Shared.Api.UnitTests/Controllers/ImportControllerTests.cs @@ -41,7 +41,7 @@ public class ImportControllerTests GetValidBulkImportRequestConfiguration(), }; - public static TheoryData InValidBody => + public static TheoryData InvalidBody => new TheoryData { GetBulkImportRequestConfigurationWithUnsupportedInputFormat(), @@ -63,7 +63,7 @@ public async Task GivenAnBulkImportRequest_WhenDisabled_ThenRequestNotValidExcep } [Theory] - [MemberData(nameof(InValidBody), MemberType = typeof(ImportControllerTests))] + [MemberData(nameof(InvalidBody), MemberType = typeof(ImportControllerTests))] public async Task GivenAnBulkImportRequest_WhenRequestConfigurationNotValid_ThenRequestNotValidExceptionShouldBeThrown(ImportRequest body) { var bulkImportController = GetController(new ImportTaskConfiguration() { Enabled = true }); @@ -80,6 +80,18 @@ public async Task GivenAnBulkImportRequest_WhenRequestWithNullParameters_ThenReq await Assert.ThrowsAsync(() => bulkImportController.Import(parameters)); } + [Fact] + public async Task GivenAnBulkImportRequest_WhenRequestWithDuplicateFiles_ThenRequestNotValidExceptionShouldBeThrown() + { + var requestWithDuplicateUrls = GetDuplicateFileImportRequest(); + var bulkImportController = GetController(new ImportTaskConfiguration() { Enabled = true }); + + var controllerException = await Assert.ThrowsAsync( + () => bulkImportController.Import(requestWithDuplicateUrls.ToParameters())); + + Assert.Contains(requestWithDuplicateUrls.Input[0].Url.ToString(), controllerException.Message); + } + private ImportController GetController(ImportTaskConfiguration bulkImportConfig) { var operationConfig = new OperationsConfiguration() @@ -128,6 +140,31 @@ private static ImportRequest GetValidBulkImportRequestConfiguration() return importRequest; } + private static ImportRequest GetDuplicateFileImportRequest() + { + var input = new List + { + new InputResource + { + Type = "Patient", + Url = new Uri("https://client.example.org/patient_file_2.ndjson"), + }, + new InputResource + { + Type = "Patient", + Url = new Uri("https://client.example.org/patient_file_2.ndjson"), + }, + }; + + var importRequest = new ImportRequest(); + importRequest.InputFormat = "application/fhir+ndjson"; + importRequest.InputSource = new Uri("https://other-server.example.org"); + importRequest.Input = input; + importRequest.StorageDetail = new ImportRequestStorageDetail(); + + return importRequest; + } + private static ImportRequest GetBulkImportRequestConfigurationWithUnsupportedInputFormat() { var input = new List diff --git a/src/Microsoft.Health.Fhir.Shared.Api/Controllers/ImportController.cs b/src/Microsoft.Health.Fhir.Shared.Api/Controllers/ImportController.cs index 7b4467da0c..651ec4b376 100644 --- a/src/Microsoft.Health.Fhir.Shared.Api/Controllers/ImportController.cs +++ b/src/Microsoft.Health.Fhir.Shared.Api/Controllers/ImportController.cs @@ -198,6 +198,14 @@ private void ValidateImportRequestConfiguration(ImportRequest importData) throw new RequestNotValidException(string.Format(Resources.ImportRequestValueNotValid, nameof(input))); } + var duplicateInputUrls = input.GroupBy(item => item.Url).Where(group => group.Count() > 1).Select(group => group.Key); + + if (duplicateInputUrls.Any()) + { + var duplicateUrlString = string.Join(", ", duplicateInputUrls); + throw new RequestNotValidException(string.Format(Resources.ImportRequestDuplicateInputFiles, duplicateUrlString)); + } + foreach (var item in input) { if (!string.IsNullOrEmpty(item.Type) && !Enum.IsDefined(typeof(ResourceType), item.Type)) diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/ImportOrchestratorJob.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/ImportOrchestratorJob.cs index 65a9b42a2b..39b3903362 100644 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/ImportOrchestratorJob.cs +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Operations/Import/ImportOrchestratorJob.cs @@ -277,13 +277,6 @@ private async Task> EnqueueProcessingJobsAsync(IEnumerable (await _queueClient.EnqueueAsync(QueueType.Import, cancellationToken, groupId: groupId, definitions: definitions.ToArray())).Select(x => x.Id).OrderBy(x => x).ToList()); return jobIds; } - catch (SqlException ex) when (ex.Number == 2627) - { - const string message = "Duplicate file detected in list of files to import."; - _logger.LogJobError(ex, orchestratorInfo, message); - var error = new ImportJobErrorResult() { ErrorMessage = ex.Message, ErrorDetails = ex.ToString(), HttpStatusCode = HttpStatusCode.BadRequest }; - throw new JobExecutionException(message, error, ex); - } catch (Exception ex) { _logger.LogJobError(ex, orchestratorInfo, "Failed to enqueue jobs."); diff --git a/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportTests.cs b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportTests.cs index ed7598f373..2634a0f289 100644 --- a/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportTests.cs +++ b/test/Microsoft.Health.Fhir.Shared.Tests.E2E/Rest/Import/ImportTests.cs @@ -1140,6 +1140,42 @@ public async Task GivenImportInvalidResourceType_ThenBadRequestShouldBeReturned( Assert.Equal(HttpStatusCode.BadRequest, fhirException.StatusCode); } + [Fact] + public async Task GivenImportRequestWithMultipleSameFile_ThenBadRequestShouldBeReturned() + { + _metricHandler?.ResetCount(); + string patientNdJsonResource = Samples.GetNdJson("Import-SinglePatientTemplate"); + string resourceId1 = Guid.NewGuid().ToString("N"); + string patientNdJsonResource1 = patientNdJsonResource.Replace("##PatientID##", resourceId1); + + (Uri location1, string _) = await ImportTestHelper.UploadFileAsync(patientNdJsonResource1, _fixture.StorageAccount); + + var request = new ImportRequest() + { + InputFormat = "application/fhir+ndjson", + InputSource = new Uri("https://other-server.example.org"), + StorageDetail = new ImportRequestStorageDetail() { Type = "azure-blob" }, + Input = new List() + { + new InputResource() + { + Url = location1, + Type = "Patient", + }, + new InputResource() + { + Url = location1, + Type = "Patient", + }, + }, + Mode = ImportMode.InitialLoad.ToString(), + }; + + FhirClientException fhirException = await Assert.ThrowsAsync( + async () => await _client.ImportAsync(request.ToParameters())); + Assert.Equal(HttpStatusCode.BadRequest, fhirException.StatusCode); + } + private async Task ImportCheckAsync(ImportRequest request, TestFhirClient client = null, int? errorCount = null) { client = client ?? _client; From 31aeb3193b08abc314a0465b61952704749a29b2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 12 Apr 2024 16:02:38 -0700 Subject: [PATCH 149/155] Bump dotnet/sdk to 8.0.204-cbl-mariner2.0 in /build/docker (#3807) --- Directory.Packages.props | 4 ++-- build/docker/Dockerfile | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/Directory.Packages.props b/Directory.Packages.props index 900bd4df1c..e66667d6e0 100644 --- a/Directory.Packages.props +++ b/Directory.Packages.props @@ -32,7 +32,7 @@ - + @@ -123,4 +123,4 @@ - + \ No newline at end of file diff --git a/build/docker/Dockerfile b/build/docker/Dockerfile index 364f2a57a0..5e061fcc95 100644 --- a/build/docker/Dockerfile +++ b/build/docker/Dockerfile @@ -1,4 +1,4 @@ -FROM mcr.microsoft.com/dotnet/sdk:8.0.202-cbl-mariner2.0 AS build +FROM mcr.microsoft.com/dotnet/sdk:8.0.204-cbl-mariner2.0 AS build ARG FHIR_VERSION ARG ASSEMBLY_VER @@ -69,7 +69,7 @@ COPY . . RUN dotnet publish /repo/src/Microsoft.Health.Fhir.${FHIR_VERSION}.Web/Microsoft.Health.Fhir.${FHIR_VERSION}.Web.csproj -c Release -o "/build" --no-restore -p:AssemblyVersion="${ASSEMBLY_VER}" -p:FileVersion="${ASSEMBLY_VER}" -p:Version="${ASSEMBLY_VER}" -f net8.0 -FROM mcr.microsoft.com/dotnet/aspnet:8.0.3-cbl-mariner2.0 AS runtime +FROM mcr.microsoft.com/dotnet/aspnet:8.0.4-cbl-mariner2.0 AS runtime ARG FHIR_VERSION From dfce3aaed689ff717843de2b3677d9333b060650 Mon Sep 17 00:00:00 2001 From: SergeyGaluzo <95932081+SergeyGaluzo@users.noreply.github.com> Date: Tue, 16 Apr 2024 12:35:53 -0700 Subject: [PATCH 150/155] Added stats usage logging (#3812) --- .../Features/Search/SqlServerSearchService.cs | 1 + 1 file changed, 1 insertion(+) diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Search/SqlServerSearchService.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Search/SqlServerSearchService.cs index db74037eef..1744550429 100644 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Search/SqlServerSearchService.cs +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Search/SqlServerSearchService.cs @@ -1260,6 +1260,7 @@ private async Task Create(string tableName, string columnName, short resourceTyp { if (_stats.ContainsKey((tableName, columnName, resourceTypeId, searchParamId))) { + logger.LogInformation("ResourceSearchParamStats.FoundInCache Table={Table} Column={Column} Type={ResourceType} Param={SearchParam}", tableName, columnName, resourceTypeId, searchParamId); return; } From b5aa4da6bc8bfb05df59265b149814a32f3a1a1e Mon Sep 17 00:00:00 2001 From: SergeyGaluzo <95932081+SergeyGaluzo@users.noreply.github.com> Date: Thu, 18 Apr 2024 08:40:03 -0700 Subject: [PATCH 151/155] cleanup signatures (remove heavy heartbeats) (#3817) * extra cleanup * started * 78 --- .../Features/Schema/Migrations/78.diff.sql | 102 + .../Features/Schema/Migrations/78.sql | 5143 +++++++++++++++++ .../Features/Schema/SchemaVersion.cs | 1 + .../Features/Schema/SchemaVersionConstants.cs | 2 +- .../TransactionCheckWithInitialiScript.sql | 2 +- .../GetResourcesByTypeAndSurrogateIdRange.sql | 3 +- .../Schema/Sql/Sprocs/PutJobHeartbeat.sql | 33 +- .../Features/Storage/SqlQueueClient.cs | 14 - .../Microsoft.Health.Fhir.SqlServer.csproj | 2 +- .../JobHosting.cs | 21 +- .../Persistence/QueueClientTests.cs | 119 - 11 files changed, 5261 insertions(+), 181 deletions(-) create mode 100644 src/Microsoft.Health.Fhir.SqlServer/Features/Schema/Migrations/78.diff.sql create mode 100644 src/Microsoft.Health.Fhir.SqlServer/Features/Schema/Migrations/78.sql diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/Migrations/78.diff.sql b/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/Migrations/78.diff.sql new file mode 100644 index 0000000000..0745f7bd58 --- /dev/null +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/Migrations/78.diff.sql @@ -0,0 +1,102 @@ +ALTER PROCEDURE dbo.PutJobHeartbeat @QueueType tinyint, @JobId bigint, @Version bigint, @Data bigint = NULL, @CancelRequested bit = 0 OUTPUT +AS +set nocount on +DECLARE @SP varchar(100) = 'PutJobHeartbeat' + ,@Mode varchar(100) + ,@st datetime = getUTCdate() + ,@Rows int = 0 + ,@PartitionId tinyint = @JobId % 16 + +SET @Mode = 'Q='+convert(varchar,@QueueType)+' J='+convert(varchar,@JobId)+' P='+convert(varchar,@PartitionId)+' V='+convert(varchar,@Version)+' D='+isnull(convert(varchar,@Data),'NULL') + +BEGIN TRY + UPDATE dbo.JobQueue + SET @CancelRequested = CancelRequested + ,HeartbeatDate = getUTCdate() + WHERE QueueType = @QueueType + AND PartitionId = @PartitionId + AND JobId = @JobId + AND Status = 1 + AND Version = @Version + SET @Rows = @@rowcount + + IF @Rows = 0 AND NOT EXISTS (SELECT * FROM dbo.JobQueue WHERE QueueType = @QueueType AND PartitionId = @PartitionId AND JobId = @JobId AND Version = @Version AND Status IN (2,3,4)) + BEGIN + IF EXISTS (SELECT * FROM dbo.JobQueue WHERE QueueType = @QueueType AND PartitionId = @PartitionId AND JobId = @JobId) + THROW 50412, 'Precondition failed', 1 + ELSE + THROW 50404, 'Job record not found', 1 + END + + EXECUTE dbo.LogEvent @Process=@SP,@Mode=@Mode,@Status='End',@Start=@st,@Rows=@Rows +END TRY +BEGIN CATCH + IF error_number() = 1750 THROW -- Real error is before 1750, cannot trap in SQL. + EXECUTE dbo.LogEvent @Process=@SP,@Mode=@Mode,@Status='Error'; + THROW +END CATCH +GO +ALTER PROCEDURE dbo.GetResourcesByTypeAndSurrogateIdRange @ResourceTypeId smallint, @StartId bigint, @EndId bigint, @GlobalEndId bigint = NULL, @IncludeHistory bit = 0, @IncludeDeleted bit = 0 +AS +set nocount on +DECLARE @SP varchar(100) = 'GetResourcesByTypeAndSurrogateIdRange' + ,@Mode varchar(100) = 'RT='+isnull(convert(varchar,@ResourceTypeId),'NULL') + +' S='+isnull(convert(varchar,@StartId),'NULL') + +' E='+isnull(convert(varchar,@EndId),'NULL') + +' GE='+isnull(convert(varchar,@GlobalEndId),'NULL') + +' HI='+isnull(convert(varchar,@IncludeHistory),'NULL') + +' DE'+isnull(convert(varchar,@IncludeDeleted),'NULL') + ,@st datetime = getUTCdate() + ,@DummyTop bigint = 9223372036854775807 + +BEGIN TRY + DECLARE @ResourceIds TABLE (ResourceId varchar(64) COLLATE Latin1_General_100_CS_AS PRIMARY KEY) + DECLARE @SurrogateIds TABLE (MaxSurrogateId bigint PRIMARY KEY) + + IF @GlobalEndId IS NOT NULL AND @IncludeHistory = 0 -- snapshot view + BEGIN + INSERT INTO @ResourceIds + SELECT DISTINCT ResourceId + FROM dbo.Resource + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceSurrogateId BETWEEN @StartId AND @EndId + AND IsHistory = 1 + AND (IsDeleted = 0 OR @IncludeDeleted = 1) + OPTION (MAXDOP 1) + + IF @@rowcount > 0 + INSERT INTO @SurrogateIds + SELECT ResourceSurrogateId + FROM (SELECT ResourceId, ResourceSurrogateId, RowId = row_number() OVER (PARTITION BY ResourceId ORDER BY ResourceSurrogateId DESC) + FROM dbo.Resource WITH (INDEX = IX_Resource_ResourceTypeId_ResourceId_Version) -- w/o hint access to Resource table is inefficient when many versions are present. Hint is ignored if Resource is a view. + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceId IN (SELECT TOP (@DummyTop) ResourceId FROM @ResourceIds) + AND ResourceSurrogateId BETWEEN @StartId AND @GlobalEndId + ) A + WHERE RowId = 1 + AND ResourceSurrogateId BETWEEN @StartId AND @EndId + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)) + END + + SELECT ResourceTypeId, ResourceId, Version, IsDeleted, ResourceSurrogateId, RequestMethod, IsMatch = convert(bit,1), IsPartial = convert(bit,0), IsRawResourceMetaSet, SearchParamHash, RawResource + FROM dbo.Resource + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceSurrogateId BETWEEN @StartId AND @EndId + AND (IsHistory = 0 OR @IncludeHistory = 1) + AND (IsDeleted = 0 OR @IncludeDeleted = 1) + UNION ALL + SELECT ResourceTypeId, ResourceId, Version, IsDeleted, ResourceSurrogateId, RequestMethod, IsMatch = convert(bit,1), IsPartial = convert(bit,0), IsRawResourceMetaSet, SearchParamHash, RawResource + FROM @SurrogateIds + JOIN dbo.Resource ON ResourceTypeId = @ResourceTypeId AND ResourceSurrogateId = MaxSurrogateId + WHERE IsHistory = 1 + AND (IsDeleted = 0 OR @IncludeDeleted = 1) + OPTION (MAXDOP 1) + + EXECUTE dbo.LogEvent @Process=@SP,@Mode=@Mode,@Status='End',@Start=@st,@Rows=@@rowcount +END TRY +BEGIN CATCH + IF error_number() = 1750 THROW -- Real error is before 1750, cannot trap in SQL. + EXECUTE dbo.LogEvent @Process=@SP,@Mode=@Mode,@Status='Error'; + THROW +END CATCH +GO diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/Migrations/78.sql b/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/Migrations/78.sql new file mode 100644 index 0000000000..40a0f21e8c --- /dev/null +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/Migrations/78.sql @@ -0,0 +1,5143 @@ + +/************************************************************************************************* + Auto-Generated from Sql build task. Do not manually edit it. +**************************************************************************************************/ +SET XACT_ABORT ON +BEGIN TRAN +IF EXISTS (SELECT * + FROM sys.tables + WHERE name = 'ClaimType') + BEGIN + ROLLBACK; + RETURN; + END + + +GO +INSERT INTO dbo.SchemaVersion +VALUES (78, 'started'); + +CREATE PARTITION FUNCTION PartitionFunction_ResourceTypeId(SMALLINT) + AS RANGE RIGHT + FOR VALUES (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150); + +CREATE PARTITION SCHEME PartitionScheme_ResourceTypeId + AS PARTITION PartitionFunction_ResourceTypeId + ALL TO ([PRIMARY]); + + +GO +CREATE PARTITION FUNCTION PartitionFunction_ResourceChangeData_Timestamp(DATETIME2 (7)) + AS RANGE RIGHT + FOR VALUES (N'1970-01-01T00:00:00.0000000'); + +CREATE PARTITION SCHEME PartitionScheme_ResourceChangeData_Timestamp + AS PARTITION PartitionFunction_ResourceChangeData_Timestamp + ALL TO ([PRIMARY]); + +DECLARE @numberOfHistoryPartitions AS INT = 48; + +DECLARE @numberOfFuturePartitions AS INT = 720; + +DECLARE @rightPartitionBoundary AS DATETIME2 (7); + +DECLARE @currentDateTime AS DATETIME2 (7) = sysutcdatetime(); + +WHILE @numberOfHistoryPartitions >= -@numberOfFuturePartitions + BEGIN + SET @rightPartitionBoundary = DATEADD(hour, DATEDIFF(hour, 0, @currentDateTime) - @numberOfHistoryPartitions, 0); + ALTER PARTITION SCHEME PartitionScheme_ResourceChangeData_Timestamp NEXT USED [Primary]; + ALTER PARTITION FUNCTION PartitionFunction_ResourceChangeData_Timestamp( ) + SPLIT RANGE (@rightPartitionBoundary); + SET @numberOfHistoryPartitions -= 1; + END + +CREATE SEQUENCE dbo.ResourceSurrogateIdUniquifierSequence + AS INT + START WITH 0 + INCREMENT BY 1 + MINVALUE 0 + MAXVALUE 79999 + CYCLE + CACHE 1000000; + +CREATE TYPE dbo.BigintList AS TABLE ( + Id BIGINT NOT NULL PRIMARY KEY); + +CREATE TYPE dbo.DateTimeSearchParamList AS TABLE ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + StartDateTime DATETIMEOFFSET (7) NOT NULL, + EndDateTime DATETIMEOFFSET (7) NOT NULL, + IsLongerThanADay BIT NOT NULL, + IsMin BIT NOT NULL, + IsMax BIT NOT NULL UNIQUE (ResourceTypeId, ResourceSurrogateId, SearchParamId, StartDateTime, EndDateTime, IsLongerThanADay, IsMin, IsMax)); + +CREATE TYPE dbo.NumberSearchParamList AS TABLE ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + SingleValue DECIMAL (36, 18) NULL, + LowValue DECIMAL (36, 18) NULL, + HighValue DECIMAL (36, 18) NULL UNIQUE (ResourceTypeId, ResourceSurrogateId, SearchParamId, SingleValue, LowValue, HighValue)); + +CREATE TYPE dbo.QuantitySearchParamList AS TABLE ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + SystemId INT NULL, + QuantityCodeId INT NULL, + SingleValue DECIMAL (36, 18) NULL, + LowValue DECIMAL (36, 18) NULL, + HighValue DECIMAL (36, 18) NULL UNIQUE (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId, QuantityCodeId, SingleValue, LowValue, HighValue)); + +CREATE TYPE dbo.ReferenceSearchParamList AS TABLE ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + BaseUri VARCHAR (128) COLLATE Latin1_General_100_CS_AS NULL, + ReferenceResourceTypeId SMALLINT NULL, + ReferenceResourceId VARCHAR (64) COLLATE Latin1_General_100_CS_AS NOT NULL, + ReferenceResourceVersion INT NULL UNIQUE (ResourceTypeId, ResourceSurrogateId, SearchParamId, BaseUri, ReferenceResourceTypeId, ReferenceResourceId)); + +CREATE TYPE dbo.ReferenceTokenCompositeSearchParamList AS TABLE ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + BaseUri1 VARCHAR (128) COLLATE Latin1_General_100_CS_AS NULL, + ReferenceResourceTypeId1 SMALLINT NULL, + ReferenceResourceId1 VARCHAR (64) COLLATE Latin1_General_100_CS_AS NOT NULL, + ReferenceResourceVersion1 INT NULL, + SystemId2 INT NULL, + Code2 VARCHAR (256) COLLATE Latin1_General_100_CS_AS NOT NULL, + CodeOverflow2 VARCHAR (MAX) COLLATE Latin1_General_100_CS_AS NULL); + +CREATE TYPE dbo.ResourceDateKeyList AS TABLE ( + ResourceTypeId SMALLINT NOT NULL, + ResourceId VARCHAR (64) COLLATE Latin1_General_100_CS_AS NOT NULL, + ResourceSurrogateId BIGINT NOT NULL PRIMARY KEY (ResourceTypeId, ResourceId, ResourceSurrogateId)); + +CREATE TYPE dbo.ResourceKeyList AS TABLE ( + ResourceTypeId SMALLINT NOT NULL, + ResourceId VARCHAR (64) COLLATE Latin1_General_100_CS_AS NOT NULL, + Version INT NULL UNIQUE (ResourceTypeId, ResourceId, Version)); + +CREATE TYPE dbo.ResourceList AS TABLE ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + ResourceId VARCHAR (64) COLLATE Latin1_General_100_CS_AS NOT NULL, + Version INT NOT NULL, + HasVersionToCompare BIT NOT NULL, + IsDeleted BIT NOT NULL, + IsHistory BIT NOT NULL, + KeepHistory BIT NOT NULL, + RawResource VARBINARY (MAX) NOT NULL, + IsRawResourceMetaSet BIT NOT NULL, + RequestMethod VARCHAR (10) NULL, + SearchParamHash VARCHAR (64) NULL PRIMARY KEY (ResourceTypeId, ResourceSurrogateId), + UNIQUE (ResourceTypeId, ResourceId, Version)); + +CREATE TYPE dbo.ResourceWriteClaimList AS TABLE ( + ResourceSurrogateId BIGINT NOT NULL, + ClaimTypeId TINYINT NOT NULL, + ClaimValue NVARCHAR (128) NOT NULL); + +CREATE TYPE dbo.StringList AS TABLE ( + String VARCHAR (MAX)); + +CREATE TYPE dbo.StringSearchParamList AS TABLE ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + Text NVARCHAR (256) COLLATE Latin1_General_100_CI_AI_SC NOT NULL, + TextOverflow NVARCHAR (MAX) COLLATE Latin1_General_100_CI_AI_SC NULL, + IsMin BIT NOT NULL, + IsMax BIT NOT NULL); + +CREATE TYPE dbo.TokenDateTimeCompositeSearchParamList AS TABLE ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + SystemId1 INT NULL, + Code1 VARCHAR (256) COLLATE Latin1_General_100_CS_AS NOT NULL, + CodeOverflow1 VARCHAR (MAX) COLLATE Latin1_General_100_CS_AS NULL, + StartDateTime2 DATETIMEOFFSET (7) NOT NULL, + EndDateTime2 DATETIMEOFFSET (7) NOT NULL, + IsLongerThanADay2 BIT NOT NULL); + +CREATE TYPE dbo.TokenNumberNumberCompositeSearchParamList AS TABLE ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + SystemId1 INT NULL, + Code1 VARCHAR (256) COLLATE Latin1_General_100_CS_AS NOT NULL, + CodeOverflow1 VARCHAR (MAX) COLLATE Latin1_General_100_CS_AS NULL, + SingleValue2 DECIMAL (36, 18) NULL, + LowValue2 DECIMAL (36, 18) NULL, + HighValue2 DECIMAL (36, 18) NULL, + SingleValue3 DECIMAL (36, 18) NULL, + LowValue3 DECIMAL (36, 18) NULL, + HighValue3 DECIMAL (36, 18) NULL, + HasRange BIT NOT NULL); + +CREATE TYPE dbo.TokenQuantityCompositeSearchParamList AS TABLE ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + SystemId1 INT NULL, + Code1 VARCHAR (256) COLLATE Latin1_General_100_CS_AS NOT NULL, + CodeOverflow1 VARCHAR (MAX) COLLATE Latin1_General_100_CS_AS NULL, + SystemId2 INT NULL, + QuantityCodeId2 INT NULL, + SingleValue2 DECIMAL (36, 18) NULL, + LowValue2 DECIMAL (36, 18) NULL, + HighValue2 DECIMAL (36, 18) NULL); + +CREATE TYPE dbo.TokenSearchParamList AS TABLE ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + SystemId INT NULL, + Code VARCHAR (256) COLLATE Latin1_General_100_CS_AS NOT NULL, + CodeOverflow VARCHAR (MAX) COLLATE Latin1_General_100_CS_AS NULL); + +CREATE TYPE dbo.TokenStringCompositeSearchParamList AS TABLE ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + SystemId1 INT NULL, + Code1 VARCHAR (256) COLLATE Latin1_General_100_CS_AS NOT NULL, + CodeOverflow1 VARCHAR (MAX) COLLATE Latin1_General_100_CS_AS NULL, + Text2 NVARCHAR (256) COLLATE Latin1_General_100_CI_AI_SC NOT NULL, + TextOverflow2 NVARCHAR (MAX) COLLATE Latin1_General_100_CI_AI_SC NULL); + +CREATE TYPE dbo.TokenTextList AS TABLE ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + Text NVARCHAR (400) COLLATE Latin1_General_CI_AI NOT NULL); + +CREATE TYPE dbo.TokenTokenCompositeSearchParamList AS TABLE ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + SystemId1 INT NULL, + Code1 VARCHAR (256) COLLATE Latin1_General_100_CS_AS NOT NULL, + CodeOverflow1 VARCHAR (MAX) COLLATE Latin1_General_100_CS_AS NULL, + SystemId2 INT NULL, + Code2 VARCHAR (256) COLLATE Latin1_General_100_CS_AS NOT NULL, + CodeOverflow2 VARCHAR (MAX) COLLATE Latin1_General_100_CS_AS NULL); + +CREATE TYPE dbo.SearchParamTableType_2 AS TABLE ( + Uri VARCHAR (128) COLLATE Latin1_General_100_CS_AS NOT NULL, + Status VARCHAR (20) NOT NULL, + IsPartiallySupported BIT NOT NULL); + +CREATE TYPE dbo.BulkReindexResourceTableType_1 AS TABLE ( + Offset INT NOT NULL, + ResourceTypeId SMALLINT NOT NULL, + ResourceId VARCHAR (64) COLLATE Latin1_General_100_CS_AS NOT NULL, + ETag INT NULL, + SearchParamHash VARCHAR (64) NOT NULL); + +CREATE TYPE dbo.UriSearchParamList AS TABLE ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + Uri VARCHAR (256) COLLATE Latin1_General_100_CS_AS NOT NULL PRIMARY KEY (ResourceTypeId, ResourceSurrogateId, SearchParamId, Uri)); + +CREATE TABLE dbo.ClaimType ( + ClaimTypeId TINYINT IDENTITY (1, 1) NOT NULL, + Name VARCHAR (128) COLLATE Latin1_General_100_CS_AS NOT NULL, + CONSTRAINT UQ_ClaimType_ClaimTypeId UNIQUE (ClaimTypeId), + CONSTRAINT PKC_ClaimType PRIMARY KEY CLUSTERED (Name) WITH (DATA_COMPRESSION = PAGE) +); + +CREATE TABLE dbo.CompartmentAssignment ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + CompartmentTypeId TINYINT NOT NULL, + ReferenceResourceId VARCHAR (64) COLLATE Latin1_General_100_CS_AS NOT NULL, + IsHistory BIT NOT NULL, + CONSTRAINT PKC_CompartmentAssignment PRIMARY KEY CLUSTERED (ResourceTypeId, ResourceSurrogateId, CompartmentTypeId, ReferenceResourceId) WITH (DATA_COMPRESSION = PAGE) ON PartitionScheme_ResourceTypeId (ResourceTypeId) +); + + +GO +ALTER TABLE dbo.CompartmentAssignment + ADD CONSTRAINT DF_CompartmentAssignment_IsHistory DEFAULT 0 FOR IsHistory; + + +GO +ALTER TABLE dbo.CompartmentAssignment SET (LOCK_ESCALATION = AUTO); + + +GO +CREATE NONCLUSTERED INDEX IX_CompartmentAssignment_CompartmentTypeId_ReferenceResourceId + ON dbo.CompartmentAssignment(ResourceTypeId, CompartmentTypeId, ReferenceResourceId, ResourceSurrogateId) WHERE IsHistory = 0 WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE TABLE dbo.CompartmentType ( + CompartmentTypeId TINYINT IDENTITY (1, 1) NOT NULL, + Name VARCHAR (128) COLLATE Latin1_General_100_CS_AS NOT NULL, + CONSTRAINT UQ_CompartmentType_CompartmentTypeId UNIQUE (CompartmentTypeId), + CONSTRAINT PKC_CompartmentType PRIMARY KEY CLUSTERED (Name) WITH (DATA_COMPRESSION = PAGE) +); + +CREATE TABLE dbo.DateTimeSearchParam ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + StartDateTime DATETIME2 (7) NOT NULL, + EndDateTime DATETIME2 (7) NOT NULL, + IsLongerThanADay BIT NOT NULL, + IsMin BIT CONSTRAINT date_IsMin_Constraint DEFAULT 0 NOT NULL, + IsMax BIT CONSTRAINT date_IsMax_Constraint DEFAULT 0 NOT NULL +); + +ALTER TABLE dbo.DateTimeSearchParam SET (LOCK_ESCALATION = AUTO); + +CREATE CLUSTERED INDEX IXC_DateTimeSearchParam + ON dbo.DateTimeSearchParam(ResourceTypeId, ResourceSurrogateId, SearchParamId) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_StartDateTime_EndDateTime_INCLUDE_IsLongerThanADay_IsMin_IsMax + ON dbo.DateTimeSearchParam(SearchParamId, StartDateTime, EndDateTime) + INCLUDE(IsLongerThanADay, IsMin, IsMax) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_EndDateTime_StartDateTime_INCLUDE_IsLongerThanADay_IsMin_IsMax + ON dbo.DateTimeSearchParam(SearchParamId, EndDateTime, StartDateTime) + INCLUDE(IsLongerThanADay, IsMin, IsMax) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_StartDateTime_EndDateTime_INCLUDE_IsMin_IsMax_WHERE_IsLongerThanADay_1 + ON dbo.DateTimeSearchParam(SearchParamId, StartDateTime, EndDateTime) + INCLUDE(IsMin, IsMax) WHERE IsLongerThanADay = 1 + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_EndDateTime_StartDateTime_INCLUDE_IsMin_IsMax_WHERE_IsLongerThanADay_1 + ON dbo.DateTimeSearchParam(SearchParamId, EndDateTime, StartDateTime) + INCLUDE(IsMin, IsMax) WHERE IsLongerThanADay = 1 + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +IF NOT EXISTS (SELECT 1 + FROM sys.tables + WHERE name = 'EventAgentCheckpoint') + BEGIN + CREATE TABLE dbo.EventAgentCheckpoint ( + CheckpointId VARCHAR (64) NOT NULL, + LastProcessedDateTime DATETIMEOFFSET (7), + LastProcessedIdentifier VARCHAR (64) , + UpdatedOn DATETIME2 (7) DEFAULT sysutcdatetime() NOT NULL, + CONSTRAINT PK_EventAgentCheckpoint PRIMARY KEY CLUSTERED (CheckpointId) + ) ON [PRIMARY]; + END + +CREATE PARTITION FUNCTION EventLogPartitionFunction(TINYINT) + AS RANGE RIGHT + FOR VALUES (0, 1, 2, 3, 4, 5, 6, 7); + + +GO +CREATE PARTITION SCHEME EventLogPartitionScheme + AS PARTITION EventLogPartitionFunction + ALL TO ([PRIMARY]); + + +GO +CREATE TABLE dbo.EventLog ( + PartitionId AS isnull(CONVERT (TINYINT, EventId % 8), 0) PERSISTED, + EventId BIGINT IDENTITY (1, 1) NOT NULL, + EventDate DATETIME NOT NULL, + Process VARCHAR (100) NOT NULL, + Status VARCHAR (10) NOT NULL, + Mode VARCHAR (200) NULL, + Action VARCHAR (20) NULL, + Target VARCHAR (100) NULL, + Rows BIGINT NULL, + Milliseconds INT NULL, + EventText NVARCHAR (3500) NULL, + SPID SMALLINT NOT NULL, + HostName VARCHAR (64) NOT NULL CONSTRAINT PKC_EventLog_EventDate_EventId_PartitionId PRIMARY KEY CLUSTERED (EventDate, EventId, PartitionId) ON EventLogPartitionScheme (PartitionId) +); + +CREATE TABLE dbo.ExportJob ( + Id VARCHAR (64) COLLATE Latin1_General_100_CS_AS NOT NULL, + Hash VARCHAR (64) COLLATE Latin1_General_100_CS_AS NOT NULL, + Status VARCHAR (10) NOT NULL, + HeartbeatDateTime DATETIME2 (7) NULL, + RawJobRecord VARCHAR (MAX) NOT NULL, + JobVersion ROWVERSION NOT NULL, + CONSTRAINT PKC_ExportJob PRIMARY KEY CLUSTERED (Id) +); + +CREATE UNIQUE NONCLUSTERED INDEX IX_ExportJob_Hash_Status_HeartbeatDateTime + ON dbo.ExportJob(Hash, Status, HeartbeatDateTime); + +CREATE TABLE dbo.IndexProperties ( + TableName VARCHAR (100) NOT NULL, + IndexName VARCHAR (200) NOT NULL, + PropertyName VARCHAR (100) NOT NULL, + PropertyValue VARCHAR (100) NOT NULL, + CreateDate DATETIME CONSTRAINT DF_IndexProperties_CreateDate DEFAULT getUTCdate() NOT NULL CONSTRAINT PKC_IndexProperties_TableName_IndexName_PropertyName PRIMARY KEY CLUSTERED (TableName, IndexName, PropertyName) +); + +CREATE PARTITION FUNCTION TinyintPartitionFunction(TINYINT) + AS RANGE RIGHT + FOR VALUES (0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255); + + +GO +CREATE PARTITION SCHEME TinyintPartitionScheme + AS PARTITION TinyintPartitionFunction + ALL TO ([PRIMARY]); + + +GO +CREATE TABLE dbo.JobQueue ( + QueueType TINYINT NOT NULL, + GroupId BIGINT NOT NULL, + JobId BIGINT NOT NULL, + PartitionId AS CONVERT (TINYINT, JobId % 16) PERSISTED, + Definition VARCHAR (MAX) NOT NULL, + DefinitionHash VARBINARY (20) NOT NULL, + Version BIGINT CONSTRAINT DF_JobQueue_Version DEFAULT datediff_big(millisecond, '0001-01-01', getUTCdate()) NOT NULL, + Status TINYINT CONSTRAINT DF_JobQueue_Status DEFAULT 0 NOT NULL, + Priority TINYINT CONSTRAINT DF_JobQueue_Priority DEFAULT 100 NOT NULL, + Data BIGINT NULL, + Result VARCHAR (MAX) NULL, + CreateDate DATETIME CONSTRAINT DF_JobQueue_CreateDate DEFAULT getUTCdate() NOT NULL, + StartDate DATETIME NULL, + EndDate DATETIME NULL, + HeartbeatDate DATETIME CONSTRAINT DF_JobQueue_HeartbeatDate DEFAULT getUTCdate() NOT NULL, + Worker VARCHAR (100) NULL, + Info VARCHAR (1000) NULL, + CancelRequested BIT CONSTRAINT DF_JobQueue_CancelRequested DEFAULT 0 NOT NULL CONSTRAINT PKC_JobQueue_QueueType_PartitionId_JobId PRIMARY KEY CLUSTERED (QueueType, PartitionId, JobId) ON TinyintPartitionScheme (QueueType), + CONSTRAINT U_JobQueue_QueueType_JobId UNIQUE (QueueType, JobId) +); + + +GO +CREATE INDEX IX_QueueType_PartitionId_Status_Priority + ON dbo.JobQueue(PartitionId, Status, Priority) + ON TinyintPartitionScheme (QueueType); + + +GO +CREATE INDEX IX_QueueType_GroupId + ON dbo.JobQueue(QueueType, GroupId) + ON TinyintPartitionScheme (QueueType); + + +GO +CREATE INDEX IX_QueueType_DefinitionHash + ON dbo.JobQueue(QueueType, DefinitionHash) + ON TinyintPartitionScheme (QueueType); + +CREATE TABLE dbo.NumberSearchParam ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + SingleValue DECIMAL (36, 18) NULL, + LowValue DECIMAL (36, 18) NOT NULL, + HighValue DECIMAL (36, 18) NOT NULL +); + +ALTER TABLE dbo.NumberSearchParam SET (LOCK_ESCALATION = AUTO); + +CREATE CLUSTERED INDEX IXC_NumberSearchParam + ON dbo.NumberSearchParam(ResourceTypeId, ResourceSurrogateId, SearchParamId) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_SingleValue_WHERE_SingleValue_NOT_NULL + ON dbo.NumberSearchParam(SearchParamId, SingleValue) WHERE SingleValue IS NOT NULL + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_LowValue_HighValue + ON dbo.NumberSearchParam(SearchParamId, LowValue, HighValue) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_HighValue_LowValue + ON dbo.NumberSearchParam(SearchParamId, HighValue, LowValue) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE TABLE dbo.Parameters ( + Id VARCHAR (100) NOT NULL, + Date DATETIME NULL, + Number FLOAT NULL, + Bigint BIGINT NULL, + Char VARCHAR (4000) NULL, + Binary VARBINARY (MAX) NULL, + UpdatedDate DATETIME NULL, + UpdatedBy NVARCHAR (255) NULL CONSTRAINT PKC_Parameters_Id PRIMARY KEY CLUSTERED (Id) WITH (IGNORE_DUP_KEY = ON) +); + + +GO +CREATE TABLE dbo.ParametersHistory ( + ChangeId INT IDENTITY (1, 1) NOT NULL, + Id VARCHAR (100) NOT NULL, + Date DATETIME NULL, + Number FLOAT NULL, + Bigint BIGINT NULL, + Char VARCHAR (4000) NULL, + Binary VARBINARY (MAX) NULL, + UpdatedDate DATETIME NULL, + UpdatedBy NVARCHAR (255) NULL +); + +CREATE TABLE dbo.QuantityCode ( + QuantityCodeId INT IDENTITY (1, 1) NOT NULL, + Value NVARCHAR (256) COLLATE Latin1_General_100_CS_AS NOT NULL, + CONSTRAINT UQ_QuantityCode_QuantityCodeId UNIQUE (QuantityCodeId), + CONSTRAINT PKC_QuantityCode PRIMARY KEY CLUSTERED (Value) WITH (DATA_COMPRESSION = PAGE) +); + +CREATE TABLE dbo.QuantitySearchParam ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + SystemId INT NULL, + QuantityCodeId INT NULL, + SingleValue DECIMAL (36, 18) NULL, + LowValue DECIMAL (36, 18) NOT NULL, + HighValue DECIMAL (36, 18) NOT NULL +); + +ALTER TABLE dbo.QuantitySearchParam SET (LOCK_ESCALATION = AUTO); + +CREATE CLUSTERED INDEX IXC_QuantitySearchParam + ON dbo.QuantitySearchParam(ResourceTypeId, ResourceSurrogateId, SearchParamId) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_QuantityCodeId_SingleValue_INCLUDE_SystemId_WHERE_SingleValue_NOT_NULL + ON dbo.QuantitySearchParam(SearchParamId, QuantityCodeId, SingleValue) + INCLUDE(SystemId) WHERE SingleValue IS NOT NULL + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_QuantityCodeId_LowValue_HighValue_INCLUDE_SystemId + ON dbo.QuantitySearchParam(SearchParamId, QuantityCodeId, LowValue, HighValue) + INCLUDE(SystemId) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_QuantityCodeId_HighValue_LowValue_INCLUDE_SystemId + ON dbo.QuantitySearchParam(SearchParamId, QuantityCodeId, HighValue, LowValue) + INCLUDE(SystemId) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE TABLE dbo.ReferenceSearchParam ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + BaseUri VARCHAR (128) COLLATE Latin1_General_100_CS_AS NULL, + ReferenceResourceTypeId SMALLINT NULL, + ReferenceResourceId VARCHAR (64) COLLATE Latin1_General_100_CS_AS NOT NULL, + ReferenceResourceVersion INT NULL +); + +ALTER TABLE dbo.ReferenceSearchParam SET (LOCK_ESCALATION = AUTO); + +CREATE CLUSTERED INDEX IXC_ReferenceSearchParam + ON dbo.ReferenceSearchParam(ResourceTypeId, ResourceSurrogateId, SearchParamId) WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE UNIQUE INDEX IXU_ReferenceResourceId_ReferenceResourceTypeId_SearchParamId_BaseUri_ResourceSurrogateId_ResourceTypeId + ON dbo.ReferenceSearchParam(ReferenceResourceId, ReferenceResourceTypeId, SearchParamId, BaseUri, ResourceSurrogateId, ResourceTypeId) WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE TABLE dbo.ReferenceTokenCompositeSearchParam ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + BaseUri1 VARCHAR (128) COLLATE Latin1_General_100_CS_AS NULL, + ReferenceResourceTypeId1 SMALLINT NULL, + ReferenceResourceId1 VARCHAR (64) COLLATE Latin1_General_100_CS_AS NOT NULL, + ReferenceResourceVersion1 INT NULL, + SystemId2 INT NULL, + Code2 VARCHAR (256) COLLATE Latin1_General_100_CS_AS NOT NULL, + CodeOverflow2 VARCHAR (MAX) COLLATE Latin1_General_100_CS_AS NULL +); + +ALTER TABLE dbo.ReferenceTokenCompositeSearchParam + ADD CONSTRAINT CHK_ReferenceTokenCompositeSearchParam_CodeOverflow2 CHECK (LEN(Code2) = 256 + OR CodeOverflow2 IS NULL); + +ALTER TABLE dbo.ReferenceTokenCompositeSearchParam SET (LOCK_ESCALATION = AUTO); + +CREATE CLUSTERED INDEX IXC_ReferenceTokenCompositeSearchParam + ON dbo.ReferenceTokenCompositeSearchParam(ResourceTypeId, ResourceSurrogateId, SearchParamId) WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_ReferenceResourceId1_Code2_INCLUDE_ReferenceResourceTypeId1_BaseUri1_SystemId2 + ON dbo.ReferenceTokenCompositeSearchParam(SearchParamId, ReferenceResourceId1, Code2) + INCLUDE(ReferenceResourceTypeId1, BaseUri1, SystemId2) WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE TABLE dbo.ReindexJob ( + Id VARCHAR (64) COLLATE Latin1_General_100_CS_AS NOT NULL, + Status VARCHAR (10) NOT NULL, + HeartbeatDateTime DATETIME2 (7) NULL, + RawJobRecord VARCHAR (MAX) NOT NULL, + JobVersion ROWVERSION NOT NULL, + CONSTRAINT PKC_ReindexJob PRIMARY KEY CLUSTERED (Id) +); + +CREATE TABLE dbo.Resource ( + ResourceTypeId SMALLINT NOT NULL, + ResourceId VARCHAR (64) COLLATE Latin1_General_100_CS_AS NOT NULL, + Version INT NOT NULL, + IsHistory BIT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + IsDeleted BIT NOT NULL, + RequestMethod VARCHAR (10) NULL, + RawResource VARBINARY (MAX) NOT NULL, + IsRawResourceMetaSet BIT DEFAULT 0 NOT NULL, + SearchParamHash VARCHAR (64) NULL, + TransactionId BIGINT NULL, + HistoryTransactionId BIGINT NULL CONSTRAINT PKC_Resource PRIMARY KEY CLUSTERED (ResourceTypeId, ResourceSurrogateId) WITH (DATA_COMPRESSION = PAGE) ON PartitionScheme_ResourceTypeId (ResourceTypeId), + CONSTRAINT CH_Resource_RawResource_Length CHECK (RawResource > 0x0) +); + +ALTER TABLE dbo.Resource SET (LOCK_ESCALATION = AUTO); + +CREATE INDEX IX_ResourceTypeId_TransactionId + ON dbo.Resource(ResourceTypeId, TransactionId) WHERE TransactionId IS NOT NULL + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_ResourceTypeId_HistoryTransactionId + ON dbo.Resource(ResourceTypeId, HistoryTransactionId) WHERE HistoryTransactionId IS NOT NULL + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE UNIQUE NONCLUSTERED INDEX IX_Resource_ResourceTypeId_ResourceId_Version + ON dbo.Resource(ResourceTypeId, ResourceId, Version) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE UNIQUE NONCLUSTERED INDEX IX_Resource_ResourceTypeId_ResourceId + ON dbo.Resource(ResourceTypeId, ResourceId) + INCLUDE(Version, IsDeleted) WHERE IsHistory = 0 + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE UNIQUE NONCLUSTERED INDEX IX_Resource_ResourceTypeId_ResourceSurrgateId + ON dbo.Resource(ResourceTypeId, ResourceSurrogateId) WHERE IsHistory = 0 + AND IsDeleted = 0 + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE TABLE dbo.ResourceChangeData ( + Id BIGINT IDENTITY (1, 1) NOT NULL, + Timestamp DATETIME2 (7) CONSTRAINT DF_ResourceChangeData_Timestamp DEFAULT sysutcdatetime() NOT NULL, + ResourceId VARCHAR (64) NOT NULL, + ResourceTypeId SMALLINT NOT NULL, + ResourceVersion INT NOT NULL, + ResourceChangeTypeId TINYINT NOT NULL +) ON PartitionScheme_ResourceChangeData_Timestamp (Timestamp); + +CREATE CLUSTERED INDEX IXC_ResourceChangeData + ON dbo.ResourceChangeData(Id ASC) WITH (ONLINE = ON) + ON PartitionScheme_ResourceChangeData_Timestamp (Timestamp); + +CREATE TABLE dbo.ResourceChangeDataStaging ( + Id BIGINT IDENTITY (1, 1) NOT NULL, + Timestamp DATETIME2 (7) CONSTRAINT DF_ResourceChangeDataStaging_Timestamp DEFAULT sysutcdatetime() NOT NULL, + ResourceId VARCHAR (64) NOT NULL, + ResourceTypeId SMALLINT NOT NULL, + ResourceVersion INT NOT NULL, + ResourceChangeTypeId TINYINT NOT NULL +) ON [PRIMARY]; + +CREATE CLUSTERED INDEX IXC_ResourceChangeDataStaging + ON dbo.ResourceChangeDataStaging(Id ASC, Timestamp ASC) WITH (ONLINE = ON) + ON [PRIMARY]; + +ALTER TABLE dbo.ResourceChangeDataStaging WITH CHECK + ADD CONSTRAINT CHK_ResourceChangeDataStaging_partition CHECK (Timestamp < CONVERT (DATETIME2 (7), N'9999-12-31 23:59:59.9999999')); + +ALTER TABLE dbo.ResourceChangeDataStaging CHECK CONSTRAINT CHK_ResourceChangeDataStaging_partition; + +CREATE TABLE dbo.ResourceChangeType ( + ResourceChangeTypeId TINYINT NOT NULL, + Name NVARCHAR (50) NOT NULL, + CONSTRAINT PK_ResourceChangeType PRIMARY KEY CLUSTERED (ResourceChangeTypeId), + CONSTRAINT UQ_ResourceChangeType_Name UNIQUE NONCLUSTERED (Name) +) ON [PRIMARY]; + + +GO +INSERT dbo.ResourceChangeType (ResourceChangeTypeId, Name) +VALUES (0, N'Creation'); + +INSERT dbo.ResourceChangeType (ResourceChangeTypeId, Name) +VALUES (1, N'Update'); + +INSERT dbo.ResourceChangeType (ResourceChangeTypeId, Name) +VALUES (2, N'Deletion'); + +CREATE TABLE dbo.ResourceType ( + ResourceTypeId SMALLINT IDENTITY (1, 1) NOT NULL, + Name NVARCHAR (50) COLLATE Latin1_General_100_CS_AS NOT NULL, + CONSTRAINT UQ_ResourceType_ResourceTypeId UNIQUE (ResourceTypeId), + CONSTRAINT PKC_ResourceType PRIMARY KEY CLUSTERED (Name) WITH (DATA_COMPRESSION = PAGE) +); + +CREATE TABLE dbo.ResourceWriteClaim ( + ResourceSurrogateId BIGINT NOT NULL, + ClaimTypeId TINYINT NOT NULL, + ClaimValue NVARCHAR (128) NOT NULL +) +WITH (DATA_COMPRESSION = PAGE); + +CREATE CLUSTERED INDEX IXC_ResourceWriteClaim + ON dbo.ResourceWriteClaim(ResourceSurrogateId, ClaimTypeId); + +CREATE TABLE dbo.SchemaMigrationProgress ( + Timestamp DATETIME2 (3) DEFAULT CURRENT_TIMESTAMP, + Message NVARCHAR (MAX) +); + +CREATE TABLE dbo.SearchParam ( + SearchParamId SMALLINT IDENTITY (1, 1) NOT NULL, + Uri VARCHAR (128) COLLATE Latin1_General_100_CS_AS NOT NULL, + Status VARCHAR (20) NULL, + LastUpdated DATETIMEOFFSET (7) NULL, + IsPartiallySupported BIT NULL, + CONSTRAINT UQ_SearchParam_SearchParamId UNIQUE (SearchParamId), + CONSTRAINT PKC_SearchParam PRIMARY KEY CLUSTERED (Uri) WITH (DATA_COMPRESSION = PAGE) +); + +CREATE TABLE dbo.StringSearchParam ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + Text NVARCHAR (256) COLLATE Latin1_General_100_CI_AI_SC NOT NULL, + TextOverflow NVARCHAR (MAX) COLLATE Latin1_General_100_CI_AI_SC NULL, + IsMin BIT CONSTRAINT string_IsMin_Constraint DEFAULT 0 NOT NULL, + IsMax BIT CONSTRAINT string_IsMax_Constraint DEFAULT 0 NOT NULL +); + +ALTER TABLE dbo.StringSearchParam SET (LOCK_ESCALATION = AUTO); + +CREATE CLUSTERED INDEX IXC_StringSearchParam + ON dbo.StringSearchParam(ResourceTypeId, ResourceSurrogateId, SearchParamId) WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_Text_INCLUDE_TextOverflow_IsMin_IsMax + ON dbo.StringSearchParam(SearchParamId, Text) + INCLUDE(TextOverflow, IsMin, IsMax) WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_Text_INCLUDE_IsMin_IsMax_WHERE_TextOverflow_NOT_NULL + ON dbo.StringSearchParam(SearchParamId, Text) + INCLUDE(IsMin, IsMax) WHERE TextOverflow IS NOT NULL WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE TABLE dbo.System ( + SystemId INT IDENTITY (1, 1) NOT NULL, + Value NVARCHAR (256) NOT NULL, + CONSTRAINT UQ_System_SystemId UNIQUE (SystemId), + CONSTRAINT PKC_System PRIMARY KEY CLUSTERED (Value) WITH (DATA_COMPRESSION = PAGE) +); + +CREATE TABLE [dbo].[TaskInfo] ( + [TaskId] VARCHAR (64) NOT NULL, + [QueueId] VARCHAR (64) NOT NULL, + [Status] SMALLINT NOT NULL, + [TaskTypeId] SMALLINT NOT NULL, + [RunId] VARCHAR (50) NULL, + [IsCanceled] BIT NOT NULL, + [RetryCount] SMALLINT NOT NULL, + [MaxRetryCount] SMALLINT NOT NULL, + [HeartbeatDateTime] DATETIME2 (7) NULL, + [InputData] VARCHAR (MAX) NOT NULL, + [TaskContext] VARCHAR (MAX) NULL, + [Result] VARCHAR (MAX) NULL, + [CreateDateTime] DATETIME2 (7) CONSTRAINT DF_TaskInfo_CreateDate DEFAULT SYSUTCDATETIME() NOT NULL, + [StartDateTime] DATETIME2 (7) NULL, + [EndDateTime] DATETIME2 (7) NULL, + [Worker] VARCHAR (100) NULL, + [RestartInfo] VARCHAR (MAX) NULL, + [ParentTaskId] VARCHAR (64) NULL, + CONSTRAINT PKC_TaskInfo PRIMARY KEY CLUSTERED (TaskId) WITH (DATA_COMPRESSION = PAGE) +) ON [PRIMARY] TEXTIMAGE_ON [PRIMARY]; + + +GO +CREATE NONCLUSTERED INDEX IX_QueueId_Status + ON dbo.TaskInfo(QueueId, Status); + + +GO +CREATE NONCLUSTERED INDEX IX_QueueId_ParentTaskId + ON dbo.TaskInfo(QueueId, ParentTaskId); + +CREATE TABLE dbo.TokenDateTimeCompositeSearchParam ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + SystemId1 INT NULL, + Code1 VARCHAR (256) COLLATE Latin1_General_100_CS_AS NOT NULL, + StartDateTime2 DATETIME2 (7) NOT NULL, + EndDateTime2 DATETIME2 (7) NOT NULL, + IsLongerThanADay2 BIT NOT NULL, + CodeOverflow1 VARCHAR (MAX) COLLATE Latin1_General_100_CS_AS NULL +); + +ALTER TABLE dbo.TokenDateTimeCompositeSearchParam + ADD CONSTRAINT CHK_TokenDateTimeCompositeSearchParam_CodeOverflow1 CHECK (LEN(Code1) = 256 + OR CodeOverflow1 IS NULL); + +ALTER TABLE dbo.TokenDateTimeCompositeSearchParam SET (LOCK_ESCALATION = AUTO); + +CREATE CLUSTERED INDEX IXC_TokenDateTimeCompositeSearchParam + ON dbo.TokenDateTimeCompositeSearchParam(ResourceTypeId, ResourceSurrogateId, SearchParamId) WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_Code1_StartDateTime2_EndDateTime2_INCLUDE_SystemId1_IsLongerThanADay2 + ON dbo.TokenDateTimeCompositeSearchParam(SearchParamId, Code1, StartDateTime2, EndDateTime2) + INCLUDE(SystemId1, IsLongerThanADay2) WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_Code1_EndDateTime2_StartDateTime2_INCLUDE_SystemId1_IsLongerThanADay2 + ON dbo.TokenDateTimeCompositeSearchParam(SearchParamId, Code1, EndDateTime2, StartDateTime2) + INCLUDE(SystemId1, IsLongerThanADay2) WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_Code1_StartDateTime2_EndDateTime2_INCLUDE_SystemId1_WHERE_IsLongerThanADay2_1 + ON dbo.TokenDateTimeCompositeSearchParam(SearchParamId, Code1, StartDateTime2, EndDateTime2) + INCLUDE(SystemId1) WHERE IsLongerThanADay2 = 1 WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_Code1_EndDateTime2_StartDateTime2_INCLUDE_SystemId1_WHERE_IsLongerThanADay2_1 + ON dbo.TokenDateTimeCompositeSearchParam(SearchParamId, Code1, EndDateTime2, StartDateTime2) + INCLUDE(SystemId1) WHERE IsLongerThanADay2 = 1 WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE TABLE dbo.TokenNumberNumberCompositeSearchParam ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + SystemId1 INT NULL, + Code1 VARCHAR (256) COLLATE Latin1_General_100_CS_AS NOT NULL, + SingleValue2 DECIMAL (36, 18) NULL, + LowValue2 DECIMAL (36, 18) NULL, + HighValue2 DECIMAL (36, 18) NULL, + SingleValue3 DECIMAL (36, 18) NULL, + LowValue3 DECIMAL (36, 18) NULL, + HighValue3 DECIMAL (36, 18) NULL, + HasRange BIT NOT NULL, + CodeOverflow1 VARCHAR (MAX) COLLATE Latin1_General_100_CS_AS NULL +); + +ALTER TABLE dbo.TokenNumberNumberCompositeSearchParam + ADD CONSTRAINT CHK_TokenNumberNumberCompositeSearchParam_CodeOverflow1 CHECK (LEN(Code1) = 256 + OR CodeOverflow1 IS NULL); + +ALTER TABLE dbo.TokenNumberNumberCompositeSearchParam SET (LOCK_ESCALATION = AUTO); + +CREATE CLUSTERED INDEX IXC_TokenNumberNumberCompositeSearchParam + ON dbo.TokenNumberNumberCompositeSearchParam(ResourceTypeId, ResourceSurrogateId, SearchParamId) WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_Code1_SingleValue2_SingleValue3_INCLUDE_SystemId1_WHERE_HasRange_0 + ON dbo.TokenNumberNumberCompositeSearchParam(SearchParamId, Code1, SingleValue2, SingleValue3) + INCLUDE(SystemId1) WHERE HasRange = 0 WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_Code1_LowValue2_HighValue2_LowValue3_HighValue3_INCLUDE_SystemId1_WHERE_HasRange_1 + ON dbo.TokenNumberNumberCompositeSearchParam(SearchParamId, Code1, LowValue2, HighValue2, LowValue3, HighValue3) + INCLUDE(SystemId1) WHERE HasRange = 1 WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE TABLE dbo.TokenQuantityCompositeSearchParam ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + SystemId1 INT NULL, + Code1 VARCHAR (256) COLLATE Latin1_General_100_CS_AS NOT NULL, + SystemId2 INT NULL, + QuantityCodeId2 INT NULL, + SingleValue2 DECIMAL (36, 18) NULL, + LowValue2 DECIMAL (36, 18) NULL, + HighValue2 DECIMAL (36, 18) NULL, + CodeOverflow1 VARCHAR (MAX) COLLATE Latin1_General_100_CS_AS NULL +); + +ALTER TABLE dbo.TokenQuantityCompositeSearchParam + ADD CONSTRAINT CHK_TokenQuantityCompositeSearchParam_CodeOverflow1 CHECK (LEN(Code1) = 256 + OR CodeOverflow1 IS NULL); + +ALTER TABLE dbo.TokenQuantityCompositeSearchParam SET (LOCK_ESCALATION = AUTO); + +CREATE CLUSTERED INDEX IXC_TokenQuantityCompositeSearchParam + ON dbo.TokenQuantityCompositeSearchParam(ResourceTypeId, ResourceSurrogateId, SearchParamId) WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_Code1_SingleValue2_INCLUDE_QuantityCodeId2_SystemId1_SystemId2_WHERE_SingleValue2_NOT_NULL + ON dbo.TokenQuantityCompositeSearchParam(SearchParamId, Code1, SingleValue2) + INCLUDE(QuantityCodeId2, SystemId1, SystemId2) WHERE SingleValue2 IS NOT NULL WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_Code1_LowValue2_HighValue2_INCLUDE_QuantityCodeId2_SystemId1_SystemId2_WHERE_LowValue2_NOT_NULL + ON dbo.TokenQuantityCompositeSearchParam(SearchParamId, Code1, LowValue2, HighValue2) + INCLUDE(QuantityCodeId2, SystemId1, SystemId2) WHERE LowValue2 IS NOT NULL WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_Code1_HighValue2_LowValue2_INCLUDE_QuantityCodeId2_SystemId1_SystemId2_WHERE_LowValue2_NOT_NULL + ON dbo.TokenQuantityCompositeSearchParam(SearchParamId, Code1, HighValue2, LowValue2) + INCLUDE(QuantityCodeId2, SystemId1, SystemId2) WHERE LowValue2 IS NOT NULL WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE TABLE dbo.TokenSearchParam ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + SystemId INT NULL, + Code VARCHAR (256) COLLATE Latin1_General_100_CS_AS NOT NULL, + CodeOverflow VARCHAR (MAX) COLLATE Latin1_General_100_CS_AS NULL +); + +ALTER TABLE dbo.TokenSearchParam + ADD CONSTRAINT CHK_TokenSearchParam_CodeOverflow CHECK (LEN(Code) = 256 + OR CodeOverflow IS NULL); + +ALTER TABLE dbo.TokenSearchParam SET (LOCK_ESCALATION = AUTO); + +CREATE CLUSTERED INDEX IXC_TokenSearchParam + ON dbo.TokenSearchParam(ResourceTypeId, ResourceSurrogateId, SearchParamId) WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_Code_INCLUDE_SystemId + ON dbo.TokenSearchParam(SearchParamId, Code) + INCLUDE(SystemId) WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE TABLE dbo.TokenStringCompositeSearchParam ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + SystemId1 INT NULL, + Code1 VARCHAR (256) COLLATE Latin1_General_100_CS_AS NOT NULL, + Text2 NVARCHAR (256) COLLATE Latin1_General_CI_AI NOT NULL, + TextOverflow2 NVARCHAR (MAX) COLLATE Latin1_General_CI_AI NULL, + CodeOverflow1 VARCHAR (MAX) COLLATE Latin1_General_100_CS_AS NULL +); + +ALTER TABLE dbo.TokenStringCompositeSearchParam + ADD CONSTRAINT CHK_TokenStringCompositeSearchParam_CodeOverflow1 CHECK (LEN(Code1) = 256 + OR CodeOverflow1 IS NULL); + +ALTER TABLE dbo.TokenStringCompositeSearchParam SET (LOCK_ESCALATION = AUTO); + +CREATE CLUSTERED INDEX IXC_TokenStringCompositeSearchParam + ON dbo.TokenStringCompositeSearchParam(ResourceSurrogateId, SearchParamId) WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_Code1_Text2_INCLUDE_SystemId1_TextOverflow2 + ON dbo.TokenStringCompositeSearchParam(SearchParamId, Code1, Text2) + INCLUDE(SystemId1, TextOverflow2) WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_Code1_Text2_INCLUDE_SystemId1_WHERE_TextOverflow2_NOT_NULL + ON dbo.TokenStringCompositeSearchParam(SearchParamId, Code1, Text2) + INCLUDE(SystemId1) WHERE TextOverflow2 IS NOT NULL WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE TABLE dbo.TokenText ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + Text NVARCHAR (400) COLLATE Latin1_General_CI_AI NOT NULL, + IsHistory BIT NOT NULL +); + +ALTER TABLE dbo.TokenText + ADD CONSTRAINT DF_TokenText_IsHistory DEFAULT 0 FOR IsHistory; + +ALTER TABLE dbo.TokenText SET (LOCK_ESCALATION = AUTO); + +CREATE CLUSTERED INDEX IXC_TokenText + ON dbo.TokenText(ResourceTypeId, ResourceSurrogateId, SearchParamId) WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE NONCLUSTERED INDEX IX_TokenText_SearchParamId_Text + ON dbo.TokenText(ResourceTypeId, SearchParamId, Text, ResourceSurrogateId) WHERE IsHistory = 0 WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE TABLE dbo.TokenTokenCompositeSearchParam ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + SystemId1 INT NULL, + Code1 VARCHAR (256) COLLATE Latin1_General_100_CS_AS NOT NULL, + SystemId2 INT NULL, + Code2 VARCHAR (256) COLLATE Latin1_General_100_CS_AS NOT NULL, + CodeOverflow1 VARCHAR (MAX) COLLATE Latin1_General_100_CS_AS NULL, + CodeOverflow2 VARCHAR (MAX) COLLATE Latin1_General_100_CS_AS NULL +); + +ALTER TABLE dbo.TokenTokenCompositeSearchParam + ADD CONSTRAINT CHK_TokenTokenCompositeSearchParam_CodeOverflow1 CHECK (LEN(Code1) = 256 + OR CodeOverflow1 IS NULL); + +ALTER TABLE dbo.TokenTokenCompositeSearchParam + ADD CONSTRAINT CHK_TokenTokenCompositeSearchParam_CodeOverflow2 CHECK (LEN(Code2) = 256 + OR CodeOverflow2 IS NULL); + +ALTER TABLE dbo.TokenTokenCompositeSearchParam SET (LOCK_ESCALATION = AUTO); + +CREATE CLUSTERED INDEX IXC_TokenTokenCompositeSearchParam + ON dbo.TokenTokenCompositeSearchParam(ResourceSurrogateId, SearchParamId) WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_Code1_Code2_INCLUDE_SystemId1_SystemId2 + ON dbo.TokenTokenCompositeSearchParam(SearchParamId, Code1, Code2) + INCLUDE(SystemId1, SystemId2) WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE TABLE dbo.Transactions ( + SurrogateIdRangeFirstValue BIGINT NOT NULL, + SurrogateIdRangeLastValue BIGINT NOT NULL, + Definition VARCHAR (2000) NULL, + IsCompleted BIT CONSTRAINT DF_Transactions_IsCompleted DEFAULT 0 NOT NULL, + IsSuccess BIT CONSTRAINT DF_Transactions_IsSuccess DEFAULT 0 NOT NULL, + IsVisible BIT CONSTRAINT DF_Transactions_IsVisible DEFAULT 0 NOT NULL, + IsHistoryMoved BIT CONSTRAINT DF_Transactions_IsHistoryMoved DEFAULT 0 NOT NULL, + CreateDate DATETIME CONSTRAINT DF_Transactions_CreateDate DEFAULT getUTCdate() NOT NULL, + EndDate DATETIME NULL, + VisibleDate DATETIME NULL, + HistoryMovedDate DATETIME NULL, + HeartbeatDate DATETIME CONSTRAINT DF_Transactions_HeartbeatDate DEFAULT getUTCdate() NOT NULL, + FailureReason VARCHAR (MAX) NULL, + IsControlledByClient BIT CONSTRAINT DF_Transactions_IsControlledByClient DEFAULT 1 NOT NULL, + InvisibleHistoryRemovedDate DATETIME NULL CONSTRAINT PKC_Transactions_SurrogateIdRangeFirstValue PRIMARY KEY CLUSTERED (SurrogateIdRangeFirstValue) +); + +CREATE INDEX IX_IsVisible + ON dbo.Transactions(IsVisible); + +CREATE TABLE dbo.UriSearchParam ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + Uri VARCHAR (256) COLLATE Latin1_General_100_CS_AS NOT NULL +); + +ALTER TABLE dbo.UriSearchParam SET (LOCK_ESCALATION = AUTO); + +CREATE CLUSTERED INDEX IXC_UriSearchParam + ON dbo.UriSearchParam(ResourceTypeId, ResourceSurrogateId, SearchParamId) WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_Uri + ON dbo.UriSearchParam(SearchParamId, Uri) WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE TABLE dbo.WatchdogLeases ( + Watchdog VARCHAR (100) NOT NULL, + LeaseHolder VARCHAR (100) CONSTRAINT DF_WatchdogLeases_LeaseHolder DEFAULT '' NOT NULL, + LeaseEndTime DATETIME CONSTRAINT DF_WatchdogLeases_LeaseEndTime DEFAULT 0 NOT NULL, + RemainingLeaseTimeSec AS datediff(second, getUTCdate(), LeaseEndTime), + LeaseRequestor VARCHAR (100) CONSTRAINT DF_WatchdogLeases_LeaseRequestor DEFAULT '' NOT NULL, + LeaseRequestTime DATETIME CONSTRAINT DF_WatchdogLeases_LeaseRequestTime DEFAULT 0 NOT NULL CONSTRAINT PKC_WatchdogLeases_Watchdog PRIMARY KEY CLUSTERED (Watchdog) +); + +COMMIT +GO +CREATE PROCEDURE dbo.AcquireReindexJobs +@jobHeartbeatTimeoutThresholdInSeconds BIGINT, @maximumNumberOfConcurrentJobsAllowed INT +AS +SET NOCOUNT ON; +SET XACT_ABORT ON; +SET TRANSACTION ISOLATION LEVEL SERIALIZABLE; +BEGIN TRANSACTION; +DECLARE @expirationDateTime AS DATETIME2 (7); +SELECT @expirationDateTime = DATEADD(second, -@jobHeartbeatTimeoutThresholdInSeconds, SYSUTCDATETIME()); +DECLARE @numberOfRunningJobs AS INT; +SELECT @numberOfRunningJobs = COUNT(*) +FROM dbo.ReindexJob WITH (TABLOCKX) +WHERE Status = 'Running' + AND HeartbeatDateTime > @expirationDateTime; +DECLARE @limit AS INT = @maximumNumberOfConcurrentJobsAllowed - @numberOfRunningJobs; +IF (@limit > 0) + BEGIN + DECLARE @availableJobs TABLE ( + Id VARCHAR (64) COLLATE Latin1_General_100_CS_AS NOT NULL, + JobVersion BINARY (8) NOT NULL); + INSERT INTO @availableJobs + SELECT TOP (@limit) Id, + JobVersion + FROM dbo.ReindexJob + WHERE (Status = 'Queued' + OR (Status = 'Running' + AND HeartbeatDateTime <= @expirationDateTime)) + ORDER BY HeartbeatDateTime; + DECLARE @heartbeatDateTime AS DATETIME2 (7) = SYSUTCDATETIME(); + UPDATE dbo.ReindexJob + SET Status = 'Running', + HeartbeatDateTime = @heartbeatDateTime, + RawJobRecord = JSON_MODIFY(RawJobRecord, '$.status', 'Running') + OUTPUT inserted.RawJobRecord, inserted.JobVersion + FROM dbo.ReindexJob AS job + INNER JOIN + @availableJobs AS availableJob + ON job.Id = availableJob.Id + AND job.JobVersion = availableJob.JobVersion; + END +COMMIT TRANSACTION; + +GO +CREATE PROCEDURE dbo.AcquireWatchdogLease +@Watchdog VARCHAR (100), @Worker VARCHAR (100), @AllowRebalance BIT=1, @ForceAcquire BIT=0, @LeasePeriodSec FLOAT, @WorkerIsRunning BIT=0, @LeaseEndTime DATETIME OUTPUT, @IsAcquired BIT OUTPUT, @CurrentLeaseHolder VARCHAR (100)=NULL OUTPUT +AS +SET NOCOUNT ON; +SET XACT_ABORT ON; +DECLARE @SP AS VARCHAR (100) = 'AcquireWatchdogLease', @Mode AS VARCHAR (100), @msg AS VARCHAR (1000), @MyLeasesNumber AS INT, @OtherValidRequestsOrLeasesNumber AS INT, @MyValidRequestsOrLeasesNumber AS INT, @DesiredLeasesNumber AS INT, @NotLeasedWatchdogNumber AS INT, @WatchdogNumber AS INT, @Now AS DATETIME, @MyLastChangeTime AS DATETIME, @PreviousLeaseHolder AS VARCHAR (100), @Rows AS INT = 0, @NumberOfWorkers AS INT, @st AS DATETIME = getUTCdate(), @RowsInt AS INT, @Pattern AS VARCHAR (100); +BEGIN TRY + SET @Mode = 'R=' + isnull(@Watchdog, 'NULL') + ' W=' + isnull(@Worker, 'NULL') + ' F=' + isnull(CONVERT (VARCHAR, @ForceAcquire), 'NULL') + ' LP=' + isnull(CONVERT (VARCHAR, @LeasePeriodSec), 'NULL'); + SET @CurrentLeaseHolder = ''; + SET @IsAcquired = 0; + SET @Now = getUTCdate(); + SET @LeaseEndTime = @Now; + SET @Pattern = NULLIF ((SELECT Char + FROM dbo.Parameters + WHERE Id = 'WatchdogLeaseHolderIncludePatternFor' + @Watchdog), ''); + IF @Pattern IS NULL + SET @Pattern = NULLIF ((SELECT Char + FROM dbo.Parameters + WHERE Id = 'WatchdogLeaseHolderIncludePattern'), ''); + IF @Pattern IS NOT NULL + AND @Worker NOT LIKE @Pattern + BEGIN + SET @msg = 'Worker does not match include pattern=' + @Pattern; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @Rows, @Text = @msg; + SET @CurrentLeaseHolder = isnull((SELECT LeaseHolder + FROM dbo.WatchdogLeases + WHERE Watchdog = @Watchdog), ''); + RETURN; + END + SET @Pattern = NULLIF ((SELECT Char + FROM dbo.Parameters + WHERE Id = 'WatchdogLeaseHolderExcludePatternFor' + @Watchdog), ''); + IF @Pattern IS NULL + SET @Pattern = NULLIF ((SELECT Char + FROM dbo.Parameters + WHERE Id = 'WatchdogLeaseHolderExcludePattern'), ''); + IF @Pattern IS NOT NULL + AND @Worker LIKE @Pattern + BEGIN + SET @msg = 'Worker matches exclude pattern=' + @Pattern; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @Rows, @Text = @msg; + SET @CurrentLeaseHolder = isnull((SELECT LeaseHolder + FROM dbo.WatchdogLeases + WHERE Watchdog = @Watchdog), ''); + RETURN; + END + DECLARE @Watchdogs TABLE ( + Watchdog VARCHAR (100) PRIMARY KEY); + INSERT INTO @Watchdogs + SELECT Watchdog + FROM dbo.WatchdogLeases WITH (NOLOCK) + WHERE RemainingLeaseTimeSec * (-1) > 10 * @LeasePeriodSec + OR @ForceAcquire = 1 + AND Watchdog = @Watchdog + AND LeaseHolder <> @Worker; + IF @@rowcount > 0 + BEGIN + DELETE dbo.WatchdogLeases + WHERE Watchdog IN (SELECT Watchdog + FROM @Watchdogs); + SET @Rows += @@rowcount; + IF @Rows > 0 + BEGIN + SET @msg = ''; + SELECT @msg = CONVERT (VARCHAR (1000), @msg + CASE WHEN @msg = '' THEN '' ELSE ',' END + Watchdog) + FROM @Watchdogs; + SET @msg = CONVERT (VARCHAR (1000), 'Remove old/forced leases:' + @msg); + EXECUTE dbo.LogEvent @Process = 'AcquireWatchdogLease', @Status = 'Info', @Mode = @Mode, @Target = 'WatchdogLeases', @Action = 'Delete', @Rows = @Rows, @Text = @msg; + END + END + SET @NumberOfWorkers = 1 + (SELECT count(*) + FROM (SELECT LeaseHolder + FROM dbo.WatchdogLeases WITH (NOLOCK) + WHERE LeaseHolder <> @Worker + UNION + SELECT LeaseRequestor + FROM dbo.WatchdogLeases WITH (NOLOCK) + WHERE LeaseRequestor <> @Worker + AND LeaseRequestor <> '') AS A); + SET @Mode = CONVERT (VARCHAR (100), @Mode + ' N=' + CONVERT (VARCHAR (10), @NumberOfWorkers)); + IF NOT EXISTS (SELECT * + FROM dbo.WatchdogLeases WITH (NOLOCK) + WHERE Watchdog = @Watchdog) + INSERT INTO dbo.WatchdogLeases (Watchdog, LeaseEndTime, LeaseRequestTime) + SELECT @Watchdog, + dateadd(day, -10, @Now), + dateadd(day, -10, @Now) + WHERE NOT EXISTS (SELECT * + FROM dbo.WatchdogLeases WITH (TABLOCKX) + WHERE Watchdog = @Watchdog); + SET @LeaseEndTime = dateadd(second, @LeasePeriodSec, @Now); + SET @WatchdogNumber = (SELECT count(*) + FROM dbo.WatchdogLeases WITH (NOLOCK)); + SET @NotLeasedWatchdogNumber = (SELECT count(*) + FROM dbo.WatchdogLeases WITH (NOLOCK) + WHERE LeaseHolder = '' + OR LeaseEndTime < @Now); + SET @MyLeasesNumber = (SELECT count(*) + FROM dbo.WatchdogLeases WITH (NOLOCK) + WHERE LeaseHolder = @Worker + AND LeaseEndTime > @Now); + SET @OtherValidRequestsOrLeasesNumber = (SELECT count(*) + FROM dbo.WatchdogLeases WITH (NOLOCK) + WHERE LeaseHolder <> @Worker + AND LeaseEndTime > @Now + OR LeaseRequestor <> @Worker + AND datediff(second, LeaseRequestTime, @Now) < @LeasePeriodSec); + SET @MyValidRequestsOrLeasesNumber = (SELECT count(*) + FROM dbo.WatchdogLeases WITH (NOLOCK) + WHERE LeaseHolder = @Worker + AND LeaseEndTime > @Now + OR LeaseRequestor = @Worker + AND datediff(second, LeaseRequestTime, @Now) < @LeasePeriodSec); + SET @DesiredLeasesNumber = ceiling(1.0 * @WatchdogNumber / @NumberOfWorkers); + IF @DesiredLeasesNumber = 0 + SET @DesiredLeasesNumber = 1; + IF @DesiredLeasesNumber = 1 + AND @OtherValidRequestsOrLeasesNumber = 1 + AND @WatchdogNumber = 1 + SET @DesiredLeasesNumber = 0; + IF @MyValidRequestsOrLeasesNumber = floor(1.0 * @WatchdogNumber / @NumberOfWorkers) + AND @OtherValidRequestsOrLeasesNumber + @MyValidRequestsOrLeasesNumber = @WatchdogNumber + SET @DesiredLeasesNumber = @DesiredLeasesNumber - 1; + UPDATE dbo.WatchdogLeases + SET LeaseHolder = @Worker, + LeaseEndTime = @LeaseEndTime, + LeaseRequestor = '', + @PreviousLeaseHolder = LeaseHolder + WHERE Watchdog = @Watchdog + AND NOT (LeaseRequestor <> @Worker + AND datediff(second, LeaseRequestTime, @Now) < @LeasePeriodSec) + AND (LeaseHolder = @Worker + AND (LeaseEndTime > @Now + OR @WorkerIsRunning = 1) + OR LeaseEndTime < @Now + AND (@DesiredLeasesNumber > @MyLeasesNumber + OR @OtherValidRequestsOrLeasesNumber < @WatchdogNumber)); + IF @@rowcount > 0 + BEGIN + SET @IsAcquired = 1; + SET @msg = 'Lease holder changed from [' + isnull(@PreviousLeaseHolder, '') + '] to [' + @Worker + ']'; + IF @PreviousLeaseHolder <> @Worker + EXECUTE dbo.LogEvent @Process = 'AcquireWatchdogLease', @Status = 'Info', @Mode = @Mode, @Text = @msg; + END + ELSE + IF @AllowRebalance = 1 + BEGIN + SET @CurrentLeaseHolder = (SELECT LeaseHolder + FROM dbo.WatchdogLeases + WHERE Watchdog = @Watchdog); + UPDATE dbo.WatchdogLeases + SET LeaseRequestTime = @Now + WHERE Watchdog = @Watchdog + AND LeaseRequestor = @Worker + AND datediff(second, LeaseRequestTime, @Now) < @LeasePeriodSec; + IF @DesiredLeasesNumber > @MyValidRequestsOrLeasesNumber + BEGIN + UPDATE A + SET LeaseRequestor = @Worker, + LeaseRequestTime = @Now + FROM dbo.WatchdogLeases AS A + WHERE Watchdog = @Watchdog + AND NOT (LeaseRequestor <> @Worker + AND datediff(second, LeaseRequestTime, @Now) < @LeasePeriodSec) + AND @NotLeasedWatchdogNumber = 0 + AND (SELECT count(*) + FROM dbo.WatchdogLeases AS B + WHERE B.LeaseHolder = A.LeaseHolder + AND datediff(second, B.LeaseEndTime, @Now) < @LeasePeriodSec) > @DesiredLeasesNumber; + SET @RowsInt = @@rowcount; + SET @msg = '@DesiredLeasesNumber=[' + CONVERT (VARCHAR (10), @DesiredLeasesNumber) + '] > @MyValidRequestsOrLeasesNumber=[' + CONVERT (VARCHAR (10), @MyValidRequestsOrLeasesNumber) + ']'; + EXECUTE dbo.LogEvent @Process = 'AcquireWatchdogLease', @Status = 'Info', @Mode = @Mode, @Rows = @RowsInt, @Text = @msg; + END + END + SET @Mode = CONVERT (VARCHAR (100), @Mode + ' A=' + CONVERT (VARCHAR (1), @IsAcquired)); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @Rows; +END TRY +BEGIN CATCH + IF @@trancount > 0 + ROLLBACK; + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = 'AcquireWatchdogLease', @Status = 'Error', @Mode = @Mode; + THROW; +END CATCH + +GO +CREATE OR ALTER PROCEDURE dbo.AddPartitionOnResourceChanges +@partitionBoundary DATETIME2 (7) OUTPUT +AS +BEGIN + SET XACT_ABORT ON; + BEGIN TRANSACTION; + DECLARE @rightPartitionBoundary AS DATETIME2 (7) = CAST ((SELECT TOP (1) value + FROM sys.partition_range_values AS prv + INNER JOIN + sys.partition_functions AS pf + ON pf.function_id = prv.function_id + WHERE pf.name = N'PartitionFunction_ResourceChangeData_Timestamp' + ORDER BY prv.boundary_id DESC) AS DATETIME2 (7)); + DECLARE @timestamp AS DATETIME2 (7) = DATEADD(hour, DATEDIFF(hour, 0, sysutcdatetime()), 0); + IF (@rightPartitionBoundary < @timestamp) + BEGIN + SET @rightPartitionBoundary = @timestamp; + END + SET @rightPartitionBoundary = DATEADD(hour, 1, @rightPartitionBoundary); + ALTER PARTITION SCHEME PartitionScheme_ResourceChangeData_Timestamp NEXT USED [Primary]; + ALTER PARTITION FUNCTION PartitionFunction_ResourceChangeData_Timestamp( ) + SPLIT RANGE (@rightPartitionBoundary); + SET @partitionBoundary = @rightPartitionBoundary; + COMMIT TRANSACTION; +END + +GO +CREATE PROCEDURE dbo.ArchiveJobs +@QueueType TINYINT +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'ArchiveJobs', @Mode AS VARCHAR (100) = '', @st AS DATETIME = getUTCdate(), @Rows AS INT = 0, @PartitionId AS TINYINT, @MaxPartitions AS TINYINT = 16, @LookedAtPartitions AS TINYINT = 0, @InflightRows AS INT = 0, @Lock AS VARCHAR (100) = 'DequeueJob_' + CONVERT (VARCHAR, @QueueType); +BEGIN TRY + SET @PartitionId = @MaxPartitions * rand(); + BEGIN TRANSACTION; + EXECUTE sp_getapplock @Lock, 'Exclusive'; + WHILE @LookedAtPartitions <= @MaxPartitions + BEGIN + SET @InflightRows += (SELECT count(*) + FROM dbo.JobQueue + WHERE PartitionId = @PartitionId + AND QueueType = @QueueType + AND Status IN (0, 1)); + SET @PartitionId = CASE WHEN @PartitionId = 15 THEN 0 ELSE @PartitionId + 1 END; + SET @LookedAtPartitions = @LookedAtPartitions + 1; + END + IF @InflightRows = 0 + BEGIN + SET @LookedAtPartitions = 0; + WHILE @LookedAtPartitions <= @MaxPartitions + BEGIN + UPDATE dbo.JobQueue + SET Status = 5 + WHERE PartitionId = @PartitionId + AND QueueType = @QueueType + AND Status IN (2, 3, 4); + SET @Rows += @@rowcount; + SET @PartitionId = CASE WHEN @PartitionId = 15 THEN 0 ELSE @PartitionId + 1 END; + SET @LookedAtPartitions = @LookedAtPartitions + 1; + END + END + COMMIT TRANSACTION; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @Rows; +END TRY +BEGIN CATCH + IF @@trancount > 0 + ROLLBACK; + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error'; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.CaptureResourceChanges +@isDeleted BIT, @version INT, @resourceId VARCHAR (64), @resourceTypeId SMALLINT +AS +BEGIN + DECLARE @changeType AS SMALLINT; + IF (@isDeleted = 1) + BEGIN + SET @changeType = 2; + END + ELSE + BEGIN + IF (@version = 1) + BEGIN + SET @changeType = 0; + END + ELSE + BEGIN + SET @changeType = 1; + END + END + INSERT INTO dbo.ResourceChangeData (ResourceId, ResourceTypeId, ResourceVersion, ResourceChangeTypeId) + VALUES (@resourceId, @resourceTypeId, @version, @changeType); +END + +GO +CREATE PROCEDURE dbo.CaptureResourceIdsForChanges +@Resources dbo.ResourceList READONLY +AS +SET NOCOUNT ON; +INSERT INTO dbo.ResourceChangeData (ResourceId, ResourceTypeId, ResourceVersion, ResourceChangeTypeId) +SELECT ResourceId, + ResourceTypeId, + Version, + CASE WHEN IsDeleted = 1 THEN 2 WHEN Version > 1 THEN 1 ELSE 0 END +FROM @Resources +WHERE IsHistory = 0; + +GO +CREATE PROCEDURE dbo.CheckActiveReindexJobs +AS +SET NOCOUNT ON; +SELECT Id +FROM dbo.ReindexJob +WHERE Status = 'Running' + OR Status = 'Queued' + OR Status = 'Paused'; + +GO +CREATE PROCEDURE dbo.CleanupEventLog +WITH EXECUTE AS 'dbo' +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'CleanupEventLog', @Mode AS VARCHAR (100) = '', @MaxDeleteRows AS INT, @MaxAllowedRows AS BIGINT, @RetentionPeriodSecond AS INT, @DeletedRows AS INT, @TotalDeletedRows AS INT = 0, @TotalRows AS INT, @Now AS DATETIME = getUTCdate(); +EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Start'; +BEGIN TRY + SET @MaxDeleteRows = (SELECT Number + FROM dbo.Parameters + WHERE Id = 'CleanupEventLog.DeleteBatchSize'); + IF @MaxDeleteRows IS NULL + RAISERROR ('Cannot get Parameter.CleanupEventLog.DeleteBatchSize', 18, 127); + SET @MaxAllowedRows = (SELECT Number + FROM dbo.Parameters + WHERE Id = 'CleanupEventLog.AllowedRows'); + IF @MaxAllowedRows IS NULL + RAISERROR ('Cannot get Parameter.CleanupEventLog.AllowedRows', 18, 127); + SET @RetentionPeriodSecond = (SELECT Number * 24 * 60 * 60 + FROM dbo.Parameters + WHERE Id = 'CleanupEventLog.RetentionPeriodDay'); + IF @RetentionPeriodSecond IS NULL + RAISERROR ('Cannot get Parameter.CleanupEventLog.RetentionPeriodDay', 18, 127); + SET @TotalRows = (SELECT sum(row_count) + FROM sys.dm_db_partition_stats + WHERE object_id = object_id('EventLog') + AND index_id IN (0, 1)); + SET @DeletedRows = 1; + WHILE @DeletedRows > 0 + AND EXISTS (SELECT * + FROM dbo.Parameters + WHERE Id = 'CleanupEventLog.IsEnabled' + AND Number = 1) + BEGIN + SET @DeletedRows = 0; + IF @TotalRows - @TotalDeletedRows > @MaxAllowedRows + BEGIN + DELETE TOP (@MaxDeleteRows) + dbo.EventLog WITH (PAGLOCK) + WHERE EventDate <= dateadd(second, -@RetentionPeriodSecond, @Now); + SET @DeletedRows = @@rowcount; + SET @TotalDeletedRows += @DeletedRows; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Run', @Target = 'EventLog', @Action = 'Delete', @Rows = @DeletedRows, @Text = @TotalDeletedRows; + END + END + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @Now; +END TRY +BEGIN CATCH + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error'; + THROW; +END CATCH + +GO +CREATE OR ALTER PROCEDURE dbo.ConfigurePartitionOnResourceChanges +@numberOfFuturePartitionsToAdd INT +AS +BEGIN + SET XACT_ABORT ON; + BEGIN TRANSACTION; + DECLARE @partitionBoundary AS DATETIME2 (7) = DATEADD(hour, DATEDIFF(hour, 0, sysutcdatetime()), 0); + DECLARE @startingRightPartitionBoundary AS DATETIME2 (7) = CAST ((SELECT TOP (1) value + FROM sys.partition_range_values AS prv + INNER JOIN + sys.partition_functions AS pf + ON pf.function_id = prv.function_id + WHERE pf.name = N'PartitionFunction_ResourceChangeData_Timestamp' + ORDER BY prv.boundary_id DESC) AS DATETIME2 (7)); + DECLARE @numberOfPartitionsToAdd AS INT = @numberOfFuturePartitionsToAdd + 1; + WHILE @numberOfPartitionsToAdd > 0 + BEGIN + IF (@startingRightPartitionBoundary < @partitionBoundary) + BEGIN + ALTER PARTITION SCHEME PartitionScheme_ResourceChangeData_Timestamp NEXT USED [PRIMARY]; + ALTER PARTITION FUNCTION PartitionFunction_ResourceChangeData_Timestamp( ) + SPLIT RANGE (@partitionBoundary); + END + SET @partitionBoundary = DATEADD(hour, 1, @partitionBoundary); + SET @numberOfPartitionsToAdd -= 1; + END + COMMIT TRANSACTION; +END + +GO +CREATE PROCEDURE dbo.CreateReindexJob +@id VARCHAR (64), @status VARCHAR (10), @rawJobRecord VARCHAR (MAX) +AS +SET NOCOUNT ON; +SET XACT_ABORT ON; +BEGIN TRANSACTION; +DECLARE @heartbeatDateTime AS DATETIME2 (7) = SYSUTCDATETIME(); +INSERT INTO dbo.ReindexJob (Id, Status, HeartbeatDateTime, RawJobRecord) +VALUES (@id, @status, @heartbeatDateTime, @rawJobRecord); +SELECT CAST (MIN_ACTIVE_ROWVERSION() AS INT); +COMMIT TRANSACTION; + +GO +CREATE PROCEDURE dbo.CreateResourceSearchParamStats +@Table VARCHAR (100), @Column VARCHAR (100), @ResourceTypeId SMALLINT, @SearchParamId SMALLINT +WITH EXECUTE AS 'dbo' +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = object_name(@@procid), @Mode AS VARCHAR (200) = 'T=' + isnull(@Table, 'NULL') + ' C=' + isnull(@Column, 'NULL') + ' RT=' + isnull(CONVERT (VARCHAR, @ResourceTypeId), 'NULL') + ' SP=' + isnull(CONVERT (VARCHAR, @SearchParamId), 'NULL'), @st AS DATETIME = getUTCdate(); +BEGIN TRY + IF @Table IS NULL + OR @Column IS NULL + OR @ResourceTypeId IS NULL + OR @SearchParamId IS NULL + RAISERROR ('@TableName IS NULL OR @KeyColumn IS NULL OR @ResourceTypeId IS NULL OR @SearchParamId IS NULL', 18, 127); + EXECUTE ('CREATE STATISTICS ST_' + @Column + '_WHERE_ResourceTypeId_' + @ResourceTypeId + '_SearchParamId_' + @SearchParamId + ' ON dbo.' + @Table + ' (' + @Column + ') WHERE ResourceTypeId = ' + @ResourceTypeId + ' AND SearchParamId = ' + @SearchParamId); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Text = 'Stats created'; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + IF error_number() = 1927 + BEGIN + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st; + RETURN; + END + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error', @Start = @st; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.Defrag +@TableName VARCHAR (100), @IndexName VARCHAR (200), @PartitionNumber INT, @IsPartitioned BIT +WITH EXECUTE AS 'dbo' +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'Defrag', @Mode AS VARCHAR (200) = @TableName + '.' + @IndexName + '.' + CONVERT (VARCHAR, @PartitionNumber) + '.' + CONVERT (VARCHAR, @IsPartitioned), @st AS DATETIME = getUTCdate(), @SQL AS VARCHAR (3500), @msg AS VARCHAR (1000), @SizeBefore AS FLOAT, @SizeAfter AS FLOAT, @IndexId AS INT; +BEGIN TRY + SET @IndexId = (SELECT index_id + FROM sys.indexes + WHERE object_id = object_id(@TableName) + AND name = @IndexName); + SET @SizeBefore = (SELECT sum(reserved_page_count) + FROM sys.dm_db_partition_stats + WHERE object_id = object_id(@TableName) + AND index_id = @IndexId) * 8.0 / 1024 / 1024; + SET @msg = 'Size[GB] before=' + CONVERT (VARCHAR, @SizeBefore); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Start', @Text = @msg; + SET @Sql = 'ALTER INDEX ' + quotename(@IndexName) + ' ON dbo.' + quotename(@TableName) + ' REORGANIZE' + CASE WHEN @IsPartitioned = 1 THEN ' PARTITION = ' + CONVERT (VARCHAR, @PartitionNumber) ELSE '' END; + BEGIN TRY + EXECUTE (@Sql); + SET @SizeAfter = (SELECT sum(reserved_page_count) + FROM sys.dm_db_partition_stats + WHERE object_id = object_id(@TableName) + AND index_id = @IndexId) * 8.0 / 1024 / 1024; + SET @msg = 'Size[GB] before=' + CONVERT (VARCHAR, @SizeBefore) + ', after=' + CONVERT (VARCHAR, @SizeAfter) + ', reduced by=' + CONVERT (VARCHAR, @SizeBefore - @SizeAfter); + EXECUTE dbo.LogEvent @Process = @SP, @Status = 'End', @Mode = @Mode, @Action = 'Reorganize', @Start = @st, @Text = @msg; + END TRY + BEGIN CATCH + EXECUTE dbo.LogEvent @Process = @SP, @Status = 'Error', @Mode = @Mode, @Action = 'Reorganize', @Start = @st, @ReRaisError = 0; + END CATCH +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error'; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.DefragChangeDatabaseSettings +@IsOn BIT +WITH EXECUTE AS 'dbo' +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'DefragChangeDatabaseSettings', @Mode AS VARCHAR (200) = 'On=' + CONVERT (VARCHAR, @IsOn), @st AS DATETIME = getUTCdate(), @SQL AS VARCHAR (3500); +BEGIN TRY + EXECUTE dbo.LogEvent @Process = @SP, @Status = 'Start', @Mode = @Mode; + SET @SQL = 'ALTER DATABASE CURRENT SET AUTO_UPDATE_STATISTICS ' + CASE WHEN @IsOn = 1 THEN 'ON' ELSE 'OFF' END; + EXECUTE (@SQL); + EXECUTE dbo.LogEvent @Process = @SP, @Status = 'Run', @Mode = @Mode, @Text = @SQL; + SET @SQL = 'ALTER DATABASE CURRENT SET AUTO_CREATE_STATISTICS ' + CASE WHEN @IsOn = 1 THEN 'ON' ELSE 'OFF' END; + EXECUTE (@SQL); + EXECUTE dbo.LogEvent @Process = @SP, @Status = 'End', @Mode = @Mode, @Start = @st, @Text = @SQL; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error'; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.DeleteHistory +@DeleteResources BIT=0, @Reset BIT=0, @DisableLogEvent BIT=0 +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'DeleteHistory', @Mode AS VARCHAR (100) = 'D=' + isnull(CONVERT (VARCHAR, @DeleteResources), 'NULL') + ' R=' + isnull(CONVERT (VARCHAR, @Reset), 'NULL'), @st AS DATETIME = getUTCdate(), @Id AS VARCHAR (100) = 'DeleteHistory.LastProcessed.TypeId.SurrogateId', @ResourceTypeId AS SMALLINT, @SurrogateId AS BIGINT, @RowsToProcess AS INT, @ProcessedResources AS INT = 0, @DeletedResources AS INT = 0, @DeletedSearchParams AS INT = 0, @ReportDate AS DATETIME = getUTCdate(); +BEGIN TRY + IF @DisableLogEvent = 0 + INSERT INTO dbo.Parameters (Id, Char) + SELECT @SP, + 'LogEvent'; + ELSE + DELETE dbo.Parameters + WHERE Id = @SP; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Start'; + INSERT INTO dbo.Parameters (Id, Char) + SELECT @Id, + '0.0' + WHERE NOT EXISTS (SELECT * + FROM dbo.Parameters + WHERE Id = @Id); + DECLARE @LastProcessed AS VARCHAR (100) = CASE WHEN @Reset = 0 THEN (SELECT Char + FROM dbo.Parameters + WHERE Id = @Id) ELSE '0.0' END; + DECLARE @Types TABLE ( + ResourceTypeId SMALLINT PRIMARY KEY, + Name VARCHAR (100)); + DECLARE @SurrogateIds TABLE ( + ResourceSurrogateId BIGINT PRIMARY KEY, + IsHistory BIT ); + INSERT INTO @Types + EXECUTE dbo.GetUsedResourceTypes ; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Run', @Target = '@Types', @Action = 'Insert', @Rows = @@rowcount; + SET @ResourceTypeId = substring(@LastProcessed, 1, charindex('.', @LastProcessed) - 1); + SET @SurrogateId = substring(@LastProcessed, charindex('.', @LastProcessed) + 1, 255); + DELETE @Types + WHERE ResourceTypeId < @ResourceTypeId; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Run', @Target = '@Types', @Action = 'Delete', @Rows = @@rowcount; + WHILE EXISTS (SELECT * + FROM @Types) + BEGIN + SET @ResourceTypeId = (SELECT TOP 1 ResourceTypeId + FROM @Types + ORDER BY ResourceTypeId); + SET @ProcessedResources = 0; + SET @DeletedResources = 0; + SET @DeletedSearchParams = 0; + SET @RowsToProcess = 1; + WHILE @RowsToProcess > 0 + BEGIN + DELETE @SurrogateIds; + INSERT INTO @SurrogateIds + SELECT TOP 10000 ResourceSurrogateId, + IsHistory + FROM dbo.Resource + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceSurrogateId > @SurrogateId + ORDER BY ResourceSurrogateId; + SET @RowsToProcess = @@rowcount; + SET @ProcessedResources += @RowsToProcess; + IF @RowsToProcess > 0 + SET @SurrogateId = (SELECT max(ResourceSurrogateId) + FROM @SurrogateIds); + SET @LastProcessed = CONVERT (VARCHAR, @ResourceTypeId) + '.' + CONVERT (VARCHAR, @SurrogateId); + DELETE @SurrogateIds + WHERE IsHistory = 0; + IF EXISTS (SELECT * + FROM @SurrogateIds) + BEGIN + DELETE dbo.ResourceWriteClaim + WHERE ResourceSurrogateId IN (SELECT ResourceSurrogateId + FROM @SurrogateIds); + SET @DeletedSearchParams += @@rowcount; + DELETE dbo.CompartmentAssignment + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceSurrogateId IN (SELECT ResourceSurrogateId + FROM @SurrogateIds); + SET @DeletedSearchParams += @@rowcount; + DELETE dbo.ReferenceSearchParam + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceSurrogateId IN (SELECT ResourceSurrogateId + FROM @SurrogateIds); + SET @DeletedSearchParams += @@rowcount; + DELETE dbo.TokenSearchParam + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceSurrogateId IN (SELECT ResourceSurrogateId + FROM @SurrogateIds); + SET @DeletedSearchParams += @@rowcount; + DELETE dbo.TokenText + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceSurrogateId IN (SELECT ResourceSurrogateId + FROM @SurrogateIds); + SET @DeletedSearchParams += @@rowcount; + DELETE dbo.StringSearchParam + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceSurrogateId IN (SELECT ResourceSurrogateId + FROM @SurrogateIds); + SET @DeletedSearchParams += @@rowcount; + DELETE dbo.UriSearchParam + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceSurrogateId IN (SELECT ResourceSurrogateId + FROM @SurrogateIds); + SET @DeletedSearchParams += @@rowcount; + DELETE dbo.NumberSearchParam + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceSurrogateId IN (SELECT ResourceSurrogateId + FROM @SurrogateIds); + SET @DeletedSearchParams += @@rowcount; + DELETE dbo.QuantitySearchParam + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceSurrogateId IN (SELECT ResourceSurrogateId + FROM @SurrogateIds); + SET @DeletedSearchParams += @@rowcount; + DELETE dbo.DateTimeSearchParam + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceSurrogateId IN (SELECT ResourceSurrogateId + FROM @SurrogateIds); + SET @DeletedSearchParams += @@rowcount; + DELETE dbo.ReferenceTokenCompositeSearchParam + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceSurrogateId IN (SELECT ResourceSurrogateId + FROM @SurrogateIds); + SET @DeletedSearchParams += @@rowcount; + DELETE dbo.TokenTokenCompositeSearchParam + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceSurrogateId IN (SELECT ResourceSurrogateId + FROM @SurrogateIds); + SET @DeletedSearchParams += @@rowcount; + DELETE dbo.TokenDateTimeCompositeSearchParam + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceSurrogateId IN (SELECT ResourceSurrogateId + FROM @SurrogateIds); + SET @DeletedSearchParams += @@rowcount; + DELETE dbo.TokenQuantityCompositeSearchParam + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceSurrogateId IN (SELECT ResourceSurrogateId + FROM @SurrogateIds); + SET @DeletedSearchParams += @@rowcount; + DELETE dbo.TokenStringCompositeSearchParam + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceSurrogateId IN (SELECT ResourceSurrogateId + FROM @SurrogateIds); + SET @DeletedSearchParams += @@rowcount; + DELETE dbo.TokenNumberNumberCompositeSearchParam + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceSurrogateId IN (SELECT ResourceSurrogateId + FROM @SurrogateIds); + SET @DeletedSearchParams += @@rowcount; + IF @DeleteResources = 1 + BEGIN + DELETE dbo.Resource + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceSurrogateId IN (SELECT ResourceSurrogateId + FROM @SurrogateIds); + SET @DeletedResources += @@rowcount; + END + END + UPDATE dbo.Parameters + SET Char = @LastProcessed + WHERE Id = @Id; + IF datediff(second, @ReportDate, getUTCdate()) > 60 + BEGIN + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Run', @Target = 'Resource', @Action = 'Select', @Rows = @ProcessedResources, @Text = @LastProcessed; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Run', @Target = '*SearchParam', @Action = 'Delete', @Rows = @DeletedSearchParams, @Text = @LastProcessed; + IF @DeleteResources = 1 + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Run', @Target = 'Resource', @Action = 'Delete', @Rows = @DeletedResources, @Text = @LastProcessed; + SET @ReportDate = getUTCdate(); + SET @ProcessedResources = 0; + SET @DeletedSearchParams = 0; + SET @DeletedResources = 0; + END + END + DELETE @Types + WHERE ResourceTypeId = @ResourceTypeId; + SET @SurrogateId = 0; + END + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Run', @Target = 'Resource', @Action = 'Select', @Rows = @ProcessedResources, @Text = @LastProcessed; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Run', @Target = '*SearchParam', @Action = 'Delete', @Rows = @DeletedSearchParams, @Text = @LastProcessed; + IF @DeleteResources = 1 + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Run', @Target = 'Resource', @Action = 'Delete', @Rows = @DeletedResources, @Text = @LastProcessed; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error'; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.DequeueJob +@QueueType TINYINT, @Worker VARCHAR (100), @HeartbeatTimeoutSec INT, @InputJobId BIGINT=NULL, @CheckTimeoutJobs BIT=0 +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'DequeueJob', @Mode AS VARCHAR (100) = 'Q=' + isnull(CONVERT (VARCHAR, @QueueType), 'NULL') + ' H=' + isnull(CONVERT (VARCHAR, @HeartbeatTimeoutSec), 'NULL') + ' W=' + isnull(@Worker, 'NULL') + ' IJ=' + isnull(CONVERT (VARCHAR, @InputJobId), 'NULL') + ' T=' + isnull(CONVERT (VARCHAR, @CheckTimeoutJobs), 'NULL'), @Rows AS INT = 0, @st AS DATETIME = getUTCdate(), @JobId AS BIGINT, @msg AS VARCHAR (100), @Lock AS VARCHAR (100), @PartitionId AS TINYINT, @MaxPartitions AS TINYINT = 16, @LookedAtPartitions AS TINYINT = 0; +BEGIN TRY + IF EXISTS (SELECT * + FROM dbo.Parameters + WHERE Id = 'DequeueJobStop' + AND Number = 1) + BEGIN + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = 0, @Text = 'Skipped'; + RETURN; + END + IF @InputJobId IS NULL + SET @PartitionId = @MaxPartitions * rand(); + ELSE + SET @PartitionId = @InputJobId % 16; + SET TRANSACTION ISOLATION LEVEL READ COMMITTED; + WHILE @InputJobId IS NULL + AND @JobId IS NULL + AND @LookedAtPartitions < @MaxPartitions + AND @CheckTimeoutJobs = 0 + BEGIN + SET @Lock = 'DequeueJob_' + CONVERT (VARCHAR, @QueueType) + '_' + CONVERT (VARCHAR, @PartitionId); + BEGIN TRANSACTION; + EXECUTE sp_getapplock @Lock, 'Exclusive'; + UPDATE T + SET StartDate = getUTCdate(), + HeartbeatDate = getUTCdate(), + Worker = @Worker, + Status = 1, + Version = datediff_big(millisecond, '0001-01-01', getUTCdate()), + @JobId = T.JobId + FROM dbo.JobQueue AS T WITH (PAGLOCK) + INNER JOIN + (SELECT TOP 1 JobId + FROM dbo.JobQueue WITH (INDEX (IX_QueueType_PartitionId_Status_Priority)) + WHERE QueueType = @QueueType + AND PartitionId = @PartitionId + AND Status = 0 + ORDER BY Priority, JobId) AS S + ON QueueType = @QueueType + AND PartitionId = @PartitionId + AND T.JobId = S.JobId; + SET @Rows += @@rowcount; + COMMIT TRANSACTION; + IF @JobId IS NULL + BEGIN + SET @PartitionId = CASE WHEN @PartitionId = 15 THEN 0 ELSE @PartitionId + 1 END; + SET @LookedAtPartitions = @LookedAtPartitions + 1; + END + END + SET @LookedAtPartitions = 0; + WHILE @InputJobId IS NULL + AND @JobId IS NULL + AND @LookedAtPartitions < @MaxPartitions + BEGIN + SET @Lock = 'DequeueStoreCopyWorkUnit_' + CONVERT (VARCHAR, @PartitionId); + BEGIN TRANSACTION; + EXECUTE sp_getapplock @Lock, 'Exclusive'; + UPDATE T + SET StartDate = getUTCdate(), + HeartbeatDate = getUTCdate(), + Worker = @Worker, + Status = CASE WHEN CancelRequested = 0 THEN 1 ELSE 4 END, + Version = datediff_big(millisecond, '0001-01-01', getUTCdate()), + @JobId = CASE WHEN CancelRequested = 0 THEN T.JobId END, + Info = CONVERT (VARCHAR (1000), isnull(Info, '') + ' Prev: Worker=' + Worker + ' Start=' + CONVERT (VARCHAR, StartDate, 121)) + FROM dbo.JobQueue AS T WITH (PAGLOCK) + INNER JOIN + (SELECT TOP 1 JobId + FROM dbo.JobQueue WITH (INDEX (IX_QueueType_PartitionId_Status_Priority)) + WHERE QueueType = @QueueType + AND PartitionId = @PartitionId + AND Status = 1 + AND datediff(second, HeartbeatDate, getUTCdate()) > @HeartbeatTimeoutSec + ORDER BY Priority, JobId) AS S + ON QueueType = @QueueType + AND PartitionId = @PartitionId + AND T.JobId = S.JobId; + SET @Rows += @@rowcount; + COMMIT TRANSACTION; + IF @JobId IS NULL + BEGIN + SET @PartitionId = CASE WHEN @PartitionId = 15 THEN 0 ELSE @PartitionId + 1 END; + SET @LookedAtPartitions = @LookedAtPartitions + 1; + END + END + IF @InputJobId IS NOT NULL + BEGIN + UPDATE dbo.JobQueue WITH (PAGLOCK) + SET StartDate = getUTCdate(), + HeartbeatDate = getUTCdate(), + Worker = @Worker, + Status = 1, + Version = datediff_big(millisecond, '0001-01-01', getUTCdate()), + @JobId = JobId + WHERE QueueType = @QueueType + AND PartitionId = @PartitionId + AND Status = 0 + AND JobId = @InputJobId; + SET @Rows += @@rowcount; + IF @JobId IS NULL + BEGIN + UPDATE dbo.JobQueue WITH (PAGLOCK) + SET StartDate = getUTCdate(), + HeartbeatDate = getUTCdate(), + Worker = @Worker, + Status = 1, + Version = datediff_big(millisecond, '0001-01-01', getUTCdate()), + @JobId = JobId + WHERE QueueType = @QueueType + AND PartitionId = @PartitionId + AND Status = 1 + AND JobId = @InputJobId + AND datediff(second, HeartbeatDate, getUTCdate()) > @HeartbeatTimeoutSec; + SET @Rows += @@rowcount; + END + END + IF @JobId IS NOT NULL + EXECUTE dbo.GetJobs @QueueType = @QueueType, @JobId = @JobId; + SET @msg = 'J=' + isnull(CONVERT (VARCHAR, @JobId), 'NULL') + ' P=' + CONVERT (VARCHAR, @PartitionId); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @Rows, @Text = @msg; +END TRY +BEGIN CATCH + IF @@trancount > 0 + ROLLBACK; + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error'; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.DisableIndex +@tableName NVARCHAR (128), @indexName NVARCHAR (128) +WITH EXECUTE AS 'dbo' +AS +DECLARE @errorTxt AS VARCHAR (1000), @sql AS NVARCHAR (1000), @isDisabled AS BIT; +IF object_id(@tableName) IS NULL + BEGIN + SET @errorTxt = @tableName + ' does not exist or you don''t have permissions.'; + RAISERROR (@errorTxt, 18, 127); + END +SET @isDisabled = (SELECT is_disabled + FROM sys.indexes + WHERE object_id = object_id(@tableName) + AND name = @indexName); +IF @isDisabled IS NULL + BEGIN + SET @errorTxt = @indexName + ' does not exist or you don''t have permissions.'; + RAISERROR (@errorTxt, 18, 127); + END +IF @isDisabled = 0 + BEGIN + SET @sql = N'ALTER INDEX ' + QUOTENAME(@indexName) + N' on ' + @tableName + ' Disable'; + EXECUTE sp_executesql @sql; + END + +GO +CREATE PROCEDURE dbo.DisableIndexes +WITH EXECUTE AS 'dbo' +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'DisableIndexes', @Mode AS VARCHAR (200) = '', @st AS DATETIME = getUTCdate(), @Tbl AS VARCHAR (100), @Ind AS VARCHAR (200), @Txt AS VARCHAR (4000); +BEGIN TRY + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Start'; + DECLARE @Tables TABLE ( + Tbl VARCHAR (100) PRIMARY KEY, + Supported BIT ); + INSERT INTO @Tables + EXECUTE dbo.GetPartitionedTables @IncludeNotDisabled = 1, @IncludeNotSupported = 0; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = '@Tables', @Action = 'Insert', @Rows = @@rowcount; + DECLARE @Indexes TABLE ( + Tbl VARCHAR (100), + Ind VARCHAR (200), + TblId INT , + IndId INT PRIMARY KEY (Tbl, Ind)); + INSERT INTO @Indexes + SELECT Tbl, + I.Name, + TblId, + I.index_id + FROM (SELECT object_id(Tbl) AS TblId, + Tbl + FROM @Tables) AS O + INNER JOIN + sys.indexes AS I + ON I.object_id = TblId; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = '@Indexes', @Action = 'Insert', @Rows = @@rowcount; + INSERT INTO dbo.IndexProperties (TableName, IndexName, PropertyName, PropertyValue) + SELECT Tbl, + Ind, + 'DATA_COMPRESSION', + data_comp + FROM (SELECT Tbl, + Ind, + isnull((SELECT TOP 1 CASE WHEN data_compression_desc = 'PAGE' THEN 'PAGE' END + FROM sys.partitions + WHERE object_id = TblId + AND index_id = IndId), 'NONE') AS data_comp + FROM @Indexes) AS A + WHERE NOT EXISTS (SELECT * + FROM dbo.IndexProperties + WHERE TableName = Tbl + AND IndexName = Ind); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = 'IndexProperties', @Action = 'Insert', @Rows = @@rowcount; + DELETE @Indexes + WHERE Tbl = 'Resource' + OR IndId = 1; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = '@Indexes', @Action = 'Delete', @Rows = @@rowcount; + WHILE EXISTS (SELECT * + FROM @Indexes) + BEGIN + SELECT TOP 1 @Tbl = Tbl, + @Ind = Ind + FROM @Indexes; + SET @Txt = 'ALTER INDEX ' + @Ind + ' ON dbo.' + @Tbl + ' DISABLE'; + EXECUTE (@Txt); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = @Ind, @Action = 'Disable', @Text = @Txt; + DELETE @Indexes + WHERE Tbl = @Tbl + AND Ind = @Ind; + END + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error', @Start = @st; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.EnqueueJobs +@QueueType TINYINT, @Definitions StringList READONLY, @GroupId BIGINT=NULL, @ForceOneActiveJobGroup BIT=1, @IsCompleted BIT=NULL, @ReturnJobs BIT=1 +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'EnqueueJobs', @Mode AS VARCHAR (100) = 'Q=' + isnull(CONVERT (VARCHAR, @QueueType), 'NULL') + ' D=' + CONVERT (VARCHAR, (SELECT count(*) + FROM @Definitions)) + ' G=' + isnull(CONVERT (VARCHAR, @GroupId), 'NULL') + ' F=' + isnull(CONVERT (VARCHAR, @ForceOneActiveJobGroup), 'NULL') + ' C=' + isnull(CONVERT (VARCHAR, @IsCompleted), 'NULL'), @st AS DATETIME = getUTCdate(), @Lock AS VARCHAR (100) = 'EnqueueJobs_' + CONVERT (VARCHAR, @QueueType), @MaxJobId AS BIGINT, @Rows AS INT, @msg AS VARCHAR (1000), @JobIds AS BigintList, @InputRows AS INT; +BEGIN TRY + DECLARE @Input TABLE ( + DefinitionHash VARBINARY (20) PRIMARY KEY, + Definition VARCHAR (MAX) ); + INSERT INTO @Input + SELECT hashbytes('SHA1', String) AS DefinitionHash, + String AS Definition + FROM @Definitions; + SET @InputRows = @@rowcount; + INSERT INTO @JobIds + SELECT JobId + FROM @Input AS A + INNER JOIN + dbo.JobQueue AS B + ON B.QueueType = @QueueType + AND B.DefinitionHash = A.DefinitionHash + AND B.Status <> 5; + IF @@rowcount < @InputRows + BEGIN + BEGIN TRANSACTION; + EXECUTE sp_getapplock @Lock, 'Exclusive'; + IF @ForceOneActiveJobGroup = 1 + AND EXISTS (SELECT * + FROM dbo.JobQueue + WHERE QueueType = @QueueType + AND Status IN (0, 1) + AND (@GroupId IS NULL + OR GroupId <> @GroupId)) + RAISERROR ('There are other active job groups', 18, 127); + SET @MaxJobId = isnull((SELECT TOP 1 JobId + FROM dbo.JobQueue + WHERE QueueType = @QueueType + ORDER BY JobId DESC), 0); + INSERT INTO dbo.JobQueue (QueueType, GroupId, JobId, Definition, DefinitionHash, Status) + OUTPUT inserted.JobId INTO @JobIds + SELECT @QueueType, + isnull(@GroupId, @MaxJobId + 1) AS GroupId, + JobId, + Definition, + DefinitionHash, + CASE WHEN @IsCompleted = 1 THEN 2 ELSE 0 END AS Status + FROM (SELECT @MaxJobId + row_number() OVER (ORDER BY Dummy) AS JobId, + * + FROM (SELECT *, + 0 AS Dummy + FROM @Input) AS A) AS A + WHERE NOT EXISTS (SELECT * + FROM dbo.JobQueue AS B WITH (INDEX (IX_QueueType_DefinitionHash)) + WHERE B.QueueType = @QueueType + AND B.DefinitionHash = A.DefinitionHash + AND B.Status <> 5); + SET @Rows = @@rowcount; + COMMIT TRANSACTION; + END + IF @ReturnJobs = 1 + EXECUTE dbo.GetJobs @QueueType = @QueueType, @JobIds = @JobIds; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @Rows; +END TRY +BEGIN CATCH + IF @@trancount > 0 + ROLLBACK; + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error'; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.ExecuteCommandForRebuildIndexes +@Tbl VARCHAR (100), @Ind VARCHAR (1000), @Cmd VARCHAR (MAX) +WITH EXECUTE AS 'dbo' +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'ExecuteCommandForRebuildIndexes', @Mode AS VARCHAR (200) = 'Tbl=' + isnull(@Tbl, 'NULL'), @st AS DATETIME, @Retries AS INT = 0, @Action AS VARCHAR (100), @msg AS VARCHAR (1000); +RetryOnTempdbError: +BEGIN TRY + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Start', @Text = @Cmd; + SET @st = getUTCdate(); + IF @Tbl IS NULL + RAISERROR ('@Tbl IS NULL', 18, 127); + IF @Cmd IS NULL + RAISERROR ('@Cmd IS NULL', 18, 127); + SET @Action = CASE WHEN @Cmd LIKE 'UPDATE STAT%' THEN 'Update statistics' WHEN @Cmd LIKE 'CREATE%INDEX%' THEN 'Create Index' WHEN @Cmd LIKE 'ALTER%INDEX%REBUILD%' THEN 'Rebuild Index' WHEN @Cmd LIKE 'ALTER%TABLE%ADD%' THEN 'Add Constraint' END; + IF @Action IS NULL + BEGIN + SET @msg = 'Not supported command = ' + CONVERT (VARCHAR (900), @Cmd); + RAISERROR (@msg, 18, 127); + END + IF @Action = 'Create Index' + WAITFOR DELAY '00:00:05'; + EXECUTE (@Cmd); + SELECT @Ind; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Action = @Action, @Status = 'End', @Start = @st, @Text = @Cmd; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + IF error_number() = 40544 + BEGIN + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error', @Start = @st, @Retry = @Retries; + SET @Retries = @Retries + 1; + IF @Tbl = 'TokenText_96' + WAITFOR DELAY '01:00:00'; + ELSE + WAITFOR DELAY '00:10:00'; + GOTO RetryOnTempdbError; + END + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error', @Start = @st; + THROW; +END CATCH + +GO +CREATE OR ALTER PROCEDURE dbo.FetchEventAgentCheckpoint +@CheckpointId VARCHAR (64) +AS +BEGIN + SELECT TOP (1) CheckpointId, + LastProcessedDateTime, + LastProcessedIdentifier + FROM dbo.EventAgentCheckpoint + WHERE CheckpointId = @CheckpointId; +END + +GO +CREATE PROCEDURE dbo.FetchResourceChanges_3 +@startId BIGINT, @lastProcessedUtcDateTime DATETIME2 (7), @pageSize SMALLINT +AS +BEGIN + SET NOCOUNT ON; + DECLARE @precedingPartitionBoundary AS DATETIME2 (7) = (SELECT TOP (1) CAST (prv.value AS DATETIME2 (7)) AS value + FROM sys.partition_range_values AS prv WITH (NOLOCK) + INNER JOIN + sys.partition_functions AS pf WITH (NOLOCK) + ON pf.function_id = prv.function_id + WHERE pf.name = N'PartitionFunction_ResourceChangeData_Timestamp' + AND SQL_VARIANT_PROPERTY(prv.Value, 'BaseType') = 'datetime2' + AND CAST (prv.value AS DATETIME2 (7)) < DATEADD(HOUR, DATEDIFF(HOUR, 0, @lastProcessedUtcDateTime), 0) + ORDER BY prv.boundary_id DESC); + IF (@precedingPartitionBoundary IS NULL) + BEGIN + SET @precedingPartitionBoundary = CONVERT (DATETIME2 (7), N'1970-01-01T00:00:00.0000000'); + END + DECLARE @endDateTimeToFilter AS DATETIME2 (7) = DATEADD(HOUR, 1, SYSUTCDATETIME()); + WITH PartitionBoundaries + AS (SELECT CAST (prv.value AS DATETIME2 (7)) AS PartitionBoundary + FROM sys.partition_range_values AS prv WITH (NOLOCK) + INNER JOIN + sys.partition_functions AS pf WITH (NOLOCK) + ON pf.function_id = prv.function_id + WHERE pf.name = N'PartitionFunction_ResourceChangeData_Timestamp' + AND SQL_VARIANT_PROPERTY(prv.Value, 'BaseType') = 'datetime2' + AND CAST (prv.value AS DATETIME2 (7)) BETWEEN @precedingPartitionBoundary AND @endDateTimeToFilter) + SELECT TOP (@pageSize) Id, + Timestamp, + ResourceId, + ResourceTypeId, + ResourceVersion, + ResourceChangeTypeId + FROM PartitionBoundaries AS p CROSS APPLY (SELECT TOP (@pageSize) Id, + Timestamp, + ResourceId, + ResourceTypeId, + ResourceVersion, + ResourceChangeTypeId + FROM dbo.ResourceChangeData WITH (TABLOCK, HOLDLOCK) + WHERE Id >= @startId + AND $PARTITION.PartitionFunction_ResourceChangeData_Timestamp (Timestamp) = $PARTITION.PartitionFunction_ResourceChangeData_Timestamp (p.PartitionBoundary) + ORDER BY Id ASC) AS rcd + ORDER BY rcd.Id ASC; +END + +GO +CREATE PROCEDURE dbo.GetActiveJobs +@QueueType TINYINT, @GroupId BIGINT=NULL +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'GetActiveJobs', @Mode AS VARCHAR (100) = 'Q=' + isnull(CONVERT (VARCHAR, @QueueType), 'NULL') + ' G=' + isnull(CONVERT (VARCHAR, @GroupId), 'NULL'), @st AS DATETIME = getUTCdate(), @JobIds AS BigintList, @PartitionId AS TINYINT, @MaxPartitions AS TINYINT = 16, @LookedAtPartitions AS TINYINT = 0, @Rows AS INT = 0; +BEGIN TRY + SET @PartitionId = @MaxPartitions * rand(); + WHILE @LookedAtPartitions < @MaxPartitions + BEGIN + IF @GroupId IS NULL + INSERT INTO @JobIds + SELECT JobId + FROM dbo.JobQueue + WHERE PartitionId = @PartitionId + AND QueueType = @QueueType + AND Status IN (0, 1); + ELSE + INSERT INTO @JobIds + SELECT JobId + FROM dbo.JobQueue + WHERE PartitionId = @PartitionId + AND QueueType = @QueueType + AND GroupId = @GroupId + AND Status IN (0, 1); + SET @Rows += @@rowcount; + SET @PartitionId = CASE WHEN @PartitionId = 15 THEN 0 ELSE @PartitionId + 1 END; + SET @LookedAtPartitions += 1; + END + IF @Rows > 0 + EXECUTE dbo.GetJobs @QueueType = @QueueType, @JobIds = @JobIds; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @Rows; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error'; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.GetCommandsForRebuildIndexes +@RebuildClustered BIT +WITH EXECUTE AS 'dbo' +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'GetCommandsForRebuildIndexes', @Mode AS VARCHAR (200) = 'PS=PartitionScheme_ResourceTypeId RC=' + isnull(CONVERT (VARCHAR, @RebuildClustered), 'NULL'), @st AS DATETIME = getUTCdate(), @Tbl AS VARCHAR (100), @TblInt AS VARCHAR (100), @Ind AS VARCHAR (200), @IndId AS INT, @Supported AS BIT, @Txt AS VARCHAR (MAX), @Rows AS BIGINT, @Pages AS BIGINT, @ResourceTypeId AS SMALLINT, @IndexesCnt AS INT, @DataComp AS VARCHAR (100); +BEGIN TRY + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Start'; + DECLARE @Commands TABLE ( + Tbl VARCHAR (100), + Ind VARCHAR (200), + Txt VARCHAR (MAX), + Pages BIGINT ); + DECLARE @ResourceTypes TABLE ( + ResourceTypeId SMALLINT PRIMARY KEY); + DECLARE @Indexes TABLE ( + Ind VARCHAR (200) PRIMARY KEY, + IndId INT ); + DECLARE @Tables TABLE ( + name VARCHAR (100) PRIMARY KEY, + Supported BIT ); + INSERT INTO @Tables + EXECUTE dbo.GetPartitionedTables @IncludeNotDisabled = 1, @IncludeNotSupported = 1; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = '@Tables', @Action = 'Insert', @Rows = @@rowcount; + WHILE EXISTS (SELECT * + FROM @Tables) + BEGIN + SELECT TOP 1 @Tbl = name, + @Supported = Supported + FROM @Tables + ORDER BY name; + IF @Supported = 0 + BEGIN + INSERT INTO @Commands + SELECT @Tbl, + name, + 'ALTER INDEX ' + name + ' ON dbo.' + @Tbl + ' REBUILD' + CASE WHEN (SELECT PropertyValue + FROM dbo.IndexProperties + WHERE TableName = @Tbl + AND IndexName = name) = 'PAGE' THEN ' PARTITION = ALL WITH (DATA_COMPRESSION = PAGE)' ELSE '' END, + CONVERT (BIGINT, 9e18) + FROM sys.indexes + WHERE object_id = object_id(@Tbl) + AND (is_disabled = 1 + AND index_id > 1 + AND @RebuildClustered = 0 + OR index_id = 1 + AND @RebuildClustered = 1); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = '@Commands', @Action = 'Insert', @Rows = @@rowcount, @Text = 'Not supported tables with disabled indexes'; + END + ELSE + BEGIN + DELETE @ResourceTypes; + INSERT INTO @ResourceTypes + SELECT CONVERT (SMALLINT, substring(name, charindex('_', name) + 1, 6)) AS ResourceTypeId + FROM sys.sysobjects + WHERE name LIKE @Tbl + '[_]%'; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = '@ResourceTypes', @Action = 'Insert', @Rows = @@rowcount; + WHILE EXISTS (SELECT * + FROM @ResourceTypes) + BEGIN + SET @ResourceTypeId = (SELECT TOP 1 ResourceTypeId + FROM @ResourceTypes + ORDER BY ResourceTypeId); + SET @TblInt = @Tbl + '_' + CONVERT (VARCHAR, @ResourceTypeId); + SET @Pages = (SELECT dpages + FROM sysindexes + WHERE id = object_id(@TblInt) + AND indid IN (0, 1)); + DELETE @Indexes; + INSERT INTO @Indexes + SELECT name, + index_id + FROM sys.indexes + WHERE object_id = object_id(@Tbl) + AND (index_id > 1 + AND @RebuildClustered = 0 + OR index_id = 1 + AND @RebuildClustered = 1); + SET @IndexesCnt = 0; + WHILE EXISTS (SELECT * + FROM @Indexes) + BEGIN + SELECT TOP 1 @Ind = Ind, + @IndId = IndId + FROM @Indexes + ORDER BY Ind; + IF @IndId = 1 + BEGIN + SET @Txt = 'ALTER INDEX ' + @Ind + ' ON dbo.' + @TblInt + ' REBUILD' + CASE WHEN (SELECT PropertyValue + FROM dbo.IndexProperties + WHERE TableName = @Tbl + AND IndexName = @Ind) = 'PAGE' THEN ' PARTITION = ALL WITH (DATA_COMPRESSION = PAGE)' ELSE '' END; + INSERT INTO @Commands + SELECT @TblInt, + @Ind, + @Txt, + @Pages; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = @TblInt, @Action = 'Add command', @Rows = @@rowcount, @Text = @Txt; + END + ELSE + IF NOT EXISTS (SELECT * + FROM sys.indexes + WHERE object_id = object_id(@TblInt) + AND name = @Ind) + BEGIN + EXECUTE dbo.GetIndexCommands @Tbl = @Tbl, @Ind = @Ind, @AddPartClause = 0, @IncludeClustered = 0, @Txt = @Txt OUTPUT; + SET @Txt = replace(@Txt, '[' + @Tbl + ']', @TblInt); + IF @Txt IS NOT NULL + BEGIN + SET @IndexesCnt = @IndexesCnt + 1; + INSERT INTO @Commands + SELECT @TblInt, + @Ind, + @Txt, + @Pages; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = @TblInt, @Action = 'Add command', @Rows = @@rowcount, @Text = @Txt; + END + END + DELETE @Indexes + WHERE Ind = @Ind; + END + IF @IndexesCnt > 1 + BEGIN + INSERT INTO @Commands + SELECT @TblInt, + 'UPDATE STAT', + 'UPDATE STATISTICS dbo.' + @TblInt, + @Pages; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = @TblInt, @Action = 'Add command', @Rows = @@rowcount, @Text = 'Add stats update'; + END + DELETE @ResourceTypes + WHERE ResourceTypeId = @ResourceTypeId; + END + END + DELETE @Tables + WHERE name = @Tbl; + END + SELECT Tbl, + Ind, + Txt + FROM @Commands + ORDER BY Pages DESC, Tbl, CASE WHEN Txt LIKE 'UPDATE STAT%' THEN 0 ELSE 1 END; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = '@Commands', @Action = 'Select', @Rows = @@rowcount; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error', @Start = @st; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.GetIndexCommands +@Tbl VARCHAR (100), @Ind VARCHAR (200), @AddPartClause BIT, @IncludeClustered BIT, @Txt VARCHAR (MAX)=NULL OUTPUT +WITH EXECUTE AS 'dbo' +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'GetIndexCommands', @Mode AS VARCHAR (200) = 'Tbl=' + isnull(@Tbl, 'NULL') + ' Ind=' + isnull(@Ind, 'NULL'), @st AS DATETIME = getUTCdate(); +DECLARE @Indexes TABLE ( + Ind VARCHAR (200) PRIMARY KEY, + Txt VARCHAR (MAX)); +BEGIN TRY + IF @Tbl IS NULL + RAISERROR ('@Tbl IS NULL', 18, 127); + INSERT INTO @Indexes + SELECT Ind, + CASE WHEN is_primary_key = 1 THEN 'ALTER TABLE dbo.[' + Tbl + '] ADD PRIMARY KEY ' + CASE WHEN type = 1 THEN ' CLUSTERED' ELSE '' END ELSE 'CREATE' + CASE WHEN is_unique = 1 THEN ' UNIQUE' ELSE '' END + CASE WHEN type = 1 THEN ' CLUSTERED' ELSE '' END + ' INDEX ' + Ind + ' ON dbo.[' + Tbl + ']' END + ' (' + KeyCols + ')' + IncClause + CASE WHEN filter_def IS NOT NULL THEN ' WHERE ' + filter_def ELSE '' END + CASE WHEN data_comp IS NOT NULL THEN ' WITH (DATA_COMPRESSION = ' + data_comp + ')' ELSE '' END + CASE WHEN @AddPartClause = 1 THEN PartClause ELSE '' END + FROM (SELECT O.Name AS Tbl, + I.Name AS Ind, + isnull((SELECT TOP 1 CASE WHEN data_compression_desc = 'PAGE' THEN 'PAGE' END + FROM sys.partitions AS P + WHERE P.object_id = I.object_id + AND I.index_id = P.index_id), (SELECT NULLIF (PropertyValue, 'NONE') + FROM dbo.IndexProperties + WHERE TableName = O.Name + AND IndexName = I.Name + AND PropertyName = 'DATA_COMPRESSION')) AS data_comp, + replace(replace(replace(replace(I.filter_definition, '[', ''), ']', ''), '(', ''), ')', '') AS filter_def, + I.is_unique, + I.is_primary_key, + I.type, + KeyCols, + CASE WHEN IncCols IS NOT NULL THEN ' INCLUDE (' + IncCols + ')' ELSE '' END AS IncClause, + CASE WHEN EXISTS (SELECT * + FROM sys.partition_schemes AS S + WHERE S.data_space_id = I.data_space_id + AND name = 'PartitionScheme_ResourceTypeId') THEN ' ON PartitionScheme_ResourceTypeId (ResourceTypeId)' ELSE '' END AS PartClause + FROM sys.indexes AS I + INNER JOIN + sys.objects AS O + ON O.object_id = I.object_id CROSS APPLY (SELECT string_agg(CASE WHEN IC.key_ordinal > 0 + AND IC.is_included_column = 0 THEN C.name END, ',') WITHIN GROUP (ORDER BY key_ordinal) AS KeyCols, + string_agg(CASE WHEN IC.is_included_column = 1 THEN C.name END, ',') WITHIN GROUP (ORDER BY key_ordinal) AS IncCols + FROM sys.index_columns AS IC + INNER JOIN + sys.columns AS C + ON C.object_id = IC.object_id + AND C.column_id = IC.column_id + WHERE IC.object_id = I.object_id + AND IC.index_id = I.index_id + GROUP BY IC.object_id, IC.index_id) AS IC + WHERE O.name = @Tbl + AND (@Ind IS NULL + OR I.name = @Ind) + AND (@IncludeClustered = 1 + OR index_id > 1)) AS A; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = '@Indexes', @Action = 'Insert', @Rows = @@rowcount; + IF @Ind IS NULL + SELECT Ind, + Txt + FROM @Indexes; + ELSE + SET @Txt = (SELECT Txt + FROM @Indexes); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Text = @Txt; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error', @Start = @st; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.GetJobs +@QueueType TINYINT, @JobId BIGINT=NULL, @JobIds BigintList READONLY, @GroupId BIGINT=NULL, @ReturnDefinition BIT=1 +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'GetJobs', @Mode AS VARCHAR (100) = 'Q=' + isnull(CONVERT (VARCHAR, @QueueType), 'NULL') + ' J=' + isnull(CONVERT (VARCHAR, @JobId), 'NULL') + ' G=' + isnull(CONVERT (VARCHAR, @GroupId), 'NULL'), @st AS DATETIME = getUTCdate(), @PartitionId AS TINYINT = @JobId % 16; +BEGIN TRY + IF @JobId IS NULL + AND @GroupId IS NULL + AND NOT EXISTS (SELECT * + FROM @JobIds) + RAISERROR ('@JobId = NULL and @GroupId = NULL and @JobIds is empty', 18, 127); + IF @JobId IS NOT NULL + SELECT GroupId, + JobId, + CASE WHEN @ReturnDefinition = 1 THEN Definition ELSE NULL END AS Definition, + Version, + Status, + Priority, + Data, + Result, + CreateDate, + StartDate, + EndDate, + HeartbeatDate, + CancelRequested + FROM dbo.JobQueue + WHERE QueueType = @QueueType + AND PartitionId = @PartitionId + AND JobId = isnull(@JobId, -1) + AND Status <> 5; + ELSE + IF @GroupId IS NOT NULL + SELECT GroupId, + JobId, + CASE WHEN @ReturnDefinition = 1 THEN Definition ELSE NULL END AS Definition, + Version, + Status, + Priority, + Data, + Result, + CreateDate, + StartDate, + EndDate, + HeartbeatDate, + CancelRequested + FROM dbo.JobQueue WITH (INDEX (IX_QueueType_GroupId)) + WHERE QueueType = @QueueType + AND GroupId = isnull(@GroupId, -1) + AND Status <> 5; + ELSE + SELECT GroupId, + JobId, + CASE WHEN @ReturnDefinition = 1 THEN Definition ELSE NULL END AS Definition, + Version, + Status, + Priority, + Data, + Result, + CreateDate, + StartDate, + EndDate, + HeartbeatDate, + CancelRequested + FROM dbo.JobQueue + WHERE QueueType = @QueueType + AND JobId IN (SELECT Id + FROM @JobIds) + AND PartitionId = JobId % 16 + AND Status <> 5; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @@rowcount; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error'; + THROW; +END CATCH + +GO +CREATE OR ALTER PROCEDURE dbo.GetNonCompletedJobCountOfSpecificQueueType +@queueType TINYINT +AS +BEGIN + SET NOCOUNT ON; + SELECT COUNT(*) + FROM dbo.JobQueue + WHERE QueueType = @queueType + AND (Status = 0 + OR Status = 1); +END + +GO +CREATE PROCEDURE dbo.GetPartitionedTables +@IncludeNotDisabled BIT, @IncludeNotSupported BIT +WITH EXECUTE AS 'dbo' +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'GetPartitionedTables', @Mode AS VARCHAR (200) = 'PS=PartitionScheme_ResourceTypeId D=' + isnull(CONVERT (VARCHAR, @IncludeNotDisabled), 'NULL') + ' S=' + isnull(CONVERT (VARCHAR, @IncludeNotSupported), 'NULL'), @st AS DATETIME = getUTCdate(); +DECLARE @NotSupportedTables TABLE ( + id INT PRIMARY KEY); +BEGIN TRY + INSERT INTO @NotSupportedTables + SELECT DISTINCT O.object_id + FROM sys.indexes AS I + INNER JOIN + sys.objects AS O + ON O.object_id = I.object_id + WHERE O.type = 'u' + AND EXISTS (SELECT * + FROM sys.partition_schemes AS PS + WHERE PS.data_space_id = I.data_space_id + AND name = 'PartitionScheme_ResourceTypeId') + AND (NOT EXISTS (SELECT * + FROM sys.index_columns AS IC + INNER JOIN + sys.columns AS C + ON C.object_id = IC.object_id + AND C.column_id = IC.column_id + WHERE IC.object_id = I.object_id + AND IC.index_id = I.index_id + AND IC.key_ordinal > 0 + AND IC.is_included_column = 0 + AND C.name = 'ResourceTypeId') + OR EXISTS (SELECT * + FROM sys.indexes AS NSI + WHERE NSI.object_id = O.object_id + AND NOT EXISTS (SELECT * + FROM sys.partition_schemes AS PS + WHERE PS.data_space_id = NSI.data_space_id + AND name = 'PartitionScheme_ResourceTypeId'))); + SELECT CONVERT (VARCHAR (100), O.name), + CONVERT (BIT, CASE WHEN EXISTS (SELECT * + FROM @NotSupportedTables AS NSI + WHERE NSI.id = O.object_id) THEN 0 ELSE 1 END) + FROM sys.indexes AS I + INNER JOIN + sys.objects AS O + ON O.object_id = I.object_id + WHERE O.type = 'u' + AND I.index_id IN (0, 1) + AND EXISTS (SELECT * + FROM sys.partition_schemes AS PS + WHERE PS.data_space_id = I.data_space_id + AND name = 'PartitionScheme_ResourceTypeId') + AND EXISTS (SELECT * + FROM sys.index_columns AS IC + INNER JOIN + sys.columns AS C + ON C.object_id = I.object_id + AND C.column_id = IC.column_id + AND IC.is_included_column = 0 + AND C.name = 'ResourceTypeId') + AND (@IncludeNotSupported = 1 + OR NOT EXISTS (SELECT * + FROM @NotSupportedTables AS NSI + WHERE NSI.id = O.object_id)) + AND (@IncludeNotDisabled = 1 + OR EXISTS (SELECT * + FROM sys.indexes AS D + WHERE D.object_id = O.object_id + AND D.is_disabled = 1)) + ORDER BY 1; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @@rowcount; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error', @Start = @st; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.GetReindexJobById +@id VARCHAR (64) +AS +SET NOCOUNT ON; +SELECT RawJobRecord, + JobVersion +FROM dbo.ReindexJob +WHERE Id = @id; + +GO +CREATE PROCEDURE dbo.GetResources +@ResourceKeys dbo.ResourceKeyList READONLY +AS +SET NOCOUNT ON; +DECLARE @st AS DATETIME = getUTCdate(), @SP AS VARCHAR (100) = 'GetResources', @InputRows AS INT, @DummyTop AS BIGINT = 9223372036854775807, @NotNullVersionExists AS BIT, @NullVersionExists AS BIT, @MinRT AS SMALLINT, @MaxRT AS SMALLINT; +SELECT @MinRT = min(ResourceTypeId), + @MaxRT = max(ResourceTypeId), + @InputRows = count(*), + @NotNullVersionExists = max(CASE WHEN Version IS NOT NULL THEN 1 ELSE 0 END), + @NullVersionExists = max(CASE WHEN Version IS NULL THEN 1 ELSE 0 END) +FROM @ResourceKeys; +DECLARE @Mode AS VARCHAR (100) = 'RT=[' + CONVERT (VARCHAR, @MinRT) + ',' + CONVERT (VARCHAR, @MaxRT) + '] Cnt=' + CONVERT (VARCHAR, @InputRows) + ' NNVE=' + CONVERT (VARCHAR, @NotNullVersionExists) + ' NVE=' + CONVERT (VARCHAR, @NullVersionExists); +BEGIN TRY + IF @NotNullVersionExists = 1 + IF @NullVersionExists = 0 + SELECT B.ResourceTypeId, + B.ResourceId, + ResourceSurrogateId, + B.Version, + IsDeleted, + IsHistory, + RawResource, + IsRawResourceMetaSet, + SearchParamHash + FROM (SELECT TOP (@DummyTop) * + FROM @ResourceKeys) AS A + INNER JOIN + dbo.Resource AS B WITH (INDEX (IX_Resource_ResourceTypeId_ResourceId_Version)) + ON B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceId = A.ResourceId + AND B.Version = A.Version + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)); + ELSE + SELECT * + FROM (SELECT B.ResourceTypeId, + B.ResourceId, + ResourceSurrogateId, + B.Version, + IsDeleted, + IsHistory, + RawResource, + IsRawResourceMetaSet, + SearchParamHash + FROM (SELECT TOP (@DummyTop) * + FROM @ResourceKeys + WHERE Version IS NOT NULL) AS A + INNER JOIN + dbo.Resource AS B WITH (INDEX (IX_Resource_ResourceTypeId_ResourceId_Version)) + ON B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceId = A.ResourceId + AND B.Version = A.Version + UNION ALL + SELECT B.ResourceTypeId, + B.ResourceId, + ResourceSurrogateId, + B.Version, + IsDeleted, + IsHistory, + RawResource, + IsRawResourceMetaSet, + SearchParamHash + FROM (SELECT TOP (@DummyTop) * + FROM @ResourceKeys + WHERE Version IS NULL) AS A + INNER JOIN + dbo.Resource AS B WITH (INDEX (IX_Resource_ResourceTypeId_ResourceId)) + ON B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceId = A.ResourceId + WHERE IsHistory = 0) AS A + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)); + ELSE + SELECT B.ResourceTypeId, + B.ResourceId, + ResourceSurrogateId, + B.Version, + IsDeleted, + IsHistory, + RawResource, + IsRawResourceMetaSet, + SearchParamHash + FROM (SELECT TOP (@DummyTop) * + FROM @ResourceKeys) AS A + INNER JOIN + dbo.Resource AS B WITH (INDEX (IX_Resource_ResourceTypeId_ResourceId)) + ON B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceId = A.ResourceId + WHERE IsHistory = 0 + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @@rowcount; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error', @Start = @st; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.GetResourcesByTransactionId +@TransactionId BIGINT, @IncludeHistory BIT=0, @ReturnResourceKeysOnly BIT=0 +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = object_name(@@procid), @Mode AS VARCHAR (100) = 'T=' + CONVERT (VARCHAR, @TransactionId) + ' H=' + CONVERT (VARCHAR, @IncludeHistory), @st AS DATETIME = getUTCdate(), @DummyTop AS BIGINT = 9223372036854775807, @TypeId AS SMALLINT; +BEGIN TRY + DECLARE @Types TABLE ( + TypeId SMALLINT PRIMARY KEY, + Name VARCHAR (100)); + INSERT INTO @Types + EXECUTE dbo.GetUsedResourceTypes ; + DECLARE @Keys TABLE ( + TypeId SMALLINT, + SurrogateId BIGINT PRIMARY KEY (TypeId, SurrogateId)); + WHILE EXISTS (SELECT * + FROM @Types) + BEGIN + SET @TypeId = (SELECT TOP 1 TypeId + FROM @Types + ORDER BY TypeId); + INSERT INTO @Keys + SELECT @TypeId, + ResourceSurrogateId + FROM dbo.Resource + WHERE ResourceTypeId = @TypeId + AND TransactionId = @TransactionId; + DELETE @Types + WHERE TypeId = @TypeId; + END + IF @ReturnResourceKeysOnly = 0 + SELECT ResourceTypeId, + ResourceId, + ResourceSurrogateId, + Version, + IsDeleted, + IsHistory, + RawResource, + IsRawResourceMetaSet, + SearchParamHash, + RequestMethod + FROM (SELECT TOP (@DummyTop) * + FROM @Keys) AS A + INNER JOIN + dbo.Resource AS B + ON ResourceTypeId = TypeId + AND ResourceSurrogateId = SurrogateId + WHERE IsHistory = 0 + OR @IncludeHistory = 1 + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)); + ELSE + SELECT ResourceTypeId, + ResourceId, + ResourceSurrogateId, + Version, + IsDeleted + FROM (SELECT TOP (@DummyTop) * + FROM @Keys) AS A + INNER JOIN + dbo.Resource AS B + ON ResourceTypeId = TypeId + AND ResourceSurrogateId = SurrogateId + WHERE IsHistory = 0 + OR @IncludeHistory = 1 + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @@rowcount; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error'; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.GetResourcesByTypeAndSurrogateIdRange +@ResourceTypeId SMALLINT, @StartId BIGINT, @EndId BIGINT, @GlobalEndId BIGINT=NULL, @IncludeHistory BIT=0, @IncludeDeleted BIT=0 +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'GetResourcesByTypeAndSurrogateIdRange', @Mode AS VARCHAR (100) = 'RT=' + isnull(CONVERT (VARCHAR, @ResourceTypeId), 'NULL') + ' S=' + isnull(CONVERT (VARCHAR, @StartId), 'NULL') + ' E=' + isnull(CONVERT (VARCHAR, @EndId), 'NULL') + ' GE=' + isnull(CONVERT (VARCHAR, @GlobalEndId), 'NULL') + ' HI=' + isnull(CONVERT (VARCHAR, @IncludeHistory), 'NULL') + ' DE' + isnull(CONVERT (VARCHAR, @IncludeDeleted), 'NULL'), @st AS DATETIME = getUTCdate(), @DummyTop AS BIGINT = 9223372036854775807; +BEGIN TRY + DECLARE @ResourceIds TABLE ( + ResourceId VARCHAR (64) COLLATE Latin1_General_100_CS_AS PRIMARY KEY); + DECLARE @SurrogateIds TABLE ( + MaxSurrogateId BIGINT PRIMARY KEY); + IF @GlobalEndId IS NOT NULL + AND @IncludeHistory = 0 + BEGIN + INSERT INTO @ResourceIds + SELECT DISTINCT ResourceId + FROM dbo.Resource + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceSurrogateId BETWEEN @StartId AND @EndId + AND IsHistory = 1 + AND (IsDeleted = 0 + OR @IncludeDeleted = 1) + OPTION (MAXDOP 1); + IF @@rowcount > 0 + INSERT INTO @SurrogateIds + SELECT ResourceSurrogateId + FROM (SELECT ResourceId, + ResourceSurrogateId, + row_number() OVER (PARTITION BY ResourceId ORDER BY ResourceSurrogateId DESC) AS RowId + FROM dbo.Resource WITH (INDEX (IX_Resource_ResourceTypeId_ResourceId_Version)) + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceId IN (SELECT TOP (@DummyTop) ResourceId + FROM @ResourceIds) + AND ResourceSurrogateId BETWEEN @StartId AND @GlobalEndId) AS A + WHERE RowId = 1 + AND ResourceSurrogateId BETWEEN @StartId AND @EndId + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)); + END + SELECT ResourceTypeId, + ResourceId, + Version, + IsDeleted, + ResourceSurrogateId, + RequestMethod, + CONVERT (BIT, 1) AS IsMatch, + CONVERT (BIT, 0) AS IsPartial, + IsRawResourceMetaSet, + SearchParamHash, + RawResource + FROM dbo.Resource + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceSurrogateId BETWEEN @StartId AND @EndId + AND (IsHistory = 0 + OR @IncludeHistory = 1) + AND (IsDeleted = 0 + OR @IncludeDeleted = 1) + UNION ALL + SELECT ResourceTypeId, + ResourceId, + Version, + IsDeleted, + ResourceSurrogateId, + RequestMethod, + CONVERT (BIT, 1) AS IsMatch, + CONVERT (BIT, 0) AS IsPartial, + IsRawResourceMetaSet, + SearchParamHash, + RawResource + FROM @SurrogateIds + INNER JOIN + dbo.Resource + ON ResourceTypeId = @ResourceTypeId + AND ResourceSurrogateId = MaxSurrogateId + WHERE IsHistory = 1 + AND (IsDeleted = 0 + OR @IncludeDeleted = 1) + OPTION (MAXDOP 1); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @@rowcount; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error'; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.GetResourceSearchParamStats +@Table VARCHAR (100)=NULL, @ResourceTypeId SMALLINT=NULL, @SearchParamId SMALLINT=NULL +WITH EXECUTE AS 'dbo' +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = object_name(@@procid), @Mode AS VARCHAR (200) = 'T=' + isnull(@Table, 'NULL') + ' RT=' + isnull(CONVERT (VARCHAR, @ResourceTypeId), 'NULL') + ' SP=' + isnull(CONVERT (VARCHAR, @SearchParamId), 'NULL'), @st AS DATETIME = getUTCdate(); +BEGIN TRY + SELECT T.name AS TableName, + S.name AS StatsName, + db_name() AS DatabaseName + FROM sys.stats AS S + INNER JOIN + sys.tables AS T + ON T.object_id = S.object_id + WHERE T.name LIKE '%SearchParam' + AND T.name <> 'SearchParam' + AND S.name LIKE 'ST[_]%' + AND (T.name LIKE @Table + OR @Table IS NULL) + AND (S.name LIKE '%ResourceTypeId[_]' + CONVERT (VARCHAR, @ResourceTypeId) + '[_]%' + OR @ResourceTypeId IS NULL) + AND (S.name LIKE '%SearchParamId[_]' + CONVERT (VARCHAR, @SearchParamId) + OR @SearchParamId IS NULL); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Rows = @@rowcount, @Start = @st; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error', @Start = @st; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.GetResourceSurrogateIdRanges +@ResourceTypeId SMALLINT, @StartId BIGINT, @EndId BIGINT, @RangeSize INT, @NumberOfRanges INT=100, @Up BIT=1 +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'GetResourceSurrogateIdRanges', @Mode AS VARCHAR (100) = 'RT=' + isnull(CONVERT (VARCHAR, @ResourceTypeId), 'NULL') + ' S=' + isnull(CONVERT (VARCHAR, @StartId), 'NULL') + ' E=' + isnull(CONVERT (VARCHAR, @EndId), 'NULL') + ' R=' + isnull(CONVERT (VARCHAR, @RangeSize), 'NULL') + ' UP=' + isnull(CONVERT (VARCHAR, @Up), 'NULL'), @st AS DATETIME = getUTCdate(); +BEGIN TRY + IF @Up = 1 + SELECT RangeId, + min(ResourceSurrogateId), + max(ResourceSurrogateId), + count(*) + FROM (SELECT isnull(CONVERT (INT, (row_number() OVER (ORDER BY ResourceSurrogateId) - 1) / @RangeSize), 0) AS RangeId, + ResourceSurrogateId + FROM (SELECT TOP (@RangeSize * @NumberOfRanges) ResourceSurrogateId + FROM dbo.Resource + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceSurrogateId >= @StartId + AND ResourceSurrogateId <= @EndId + ORDER BY ResourceSurrogateId) AS A) AS A + GROUP BY RangeId + OPTION (MAXDOP 1); + ELSE + SELECT RangeId, + min(ResourceSurrogateId), + max(ResourceSurrogateId), + count(*) + FROM (SELECT isnull(CONVERT (INT, (row_number() OVER (ORDER BY ResourceSurrogateId) - 1) / @RangeSize), 0) AS RangeId, + ResourceSurrogateId + FROM (SELECT TOP (@RangeSize * @NumberOfRanges) ResourceSurrogateId + FROM dbo.Resource + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceSurrogateId >= @StartId + AND ResourceSurrogateId <= @EndId + ORDER BY ResourceSurrogateId DESC) AS A) AS A + GROUP BY RangeId + OPTION (MAXDOP 1); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @@rowcount; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error'; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.GetResourceVersions +@ResourceDateKeys dbo.ResourceDateKeyList READONLY +AS +SET NOCOUNT ON; +DECLARE @st AS DATETIME = getUTCdate(), @SP AS VARCHAR (100) = 'GetResourceVersions', @Mode AS VARCHAR (100) = 'Rows=' + CONVERT (VARCHAR, (SELECT count(*) + FROM @ResourceDateKeys)), @DummyTop AS BIGINT = 9223372036854775807; +BEGIN TRY + SELECT A.ResourceTypeId, + A.ResourceId, + A.ResourceSurrogateId, + CASE WHEN EXISTS (SELECT * + FROM dbo.Resource AS B + WHERE B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId) THEN 0 WHEN isnull(U.Version, 1) - isnull(L.Version, 0) > 1 THEN isnull(U.Version, 1) - 1 ELSE 0 END AS Version + FROM (SELECT TOP (@DummyTop) * + FROM @ResourceDateKeys) AS A OUTER APPLY (SELECT TOP 1 * + FROM dbo.Resource AS B WITH (INDEX (IX_Resource_ResourceTypeId_ResourceId_Version)) + WHERE B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceId = A.ResourceId + AND B.ResourceSurrogateId < A.ResourceSurrogateId + ORDER BY B.ResourceSurrogateId DESC) AS L OUTER APPLY (SELECT TOP 1 * + FROM dbo.Resource AS B WITH (INDEX (IX_Resource_ResourceTypeId_ResourceId_Version)) + WHERE B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceId = A.ResourceId + AND B.ResourceSurrogateId > A.ResourceSurrogateId + ORDER BY B.ResourceSurrogateId) AS U + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @@rowcount; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error', @Start = @st; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.GetSearchParamStatuses +AS +SET NOCOUNT ON; +SELECT SearchParamId, + Uri, + Status, + LastUpdated, + IsPartiallySupported +FROM dbo.SearchParam; + +GO +CREATE PROCEDURE dbo.GetTransactions +@StartNotInclusiveTranId BIGINT, @EndInclusiveTranId BIGINT, @EndDate DATETIME=NULL +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = object_name(@@procid), @Mode AS VARCHAR (100) = 'ST=' + CONVERT (VARCHAR, @StartNotInclusiveTranId) + ' ET=' + CONVERT (VARCHAR, @EndInclusiveTranId) + ' ED=' + isnull(CONVERT (VARCHAR, @EndDate, 121), 'NULL'), @st AS DATETIME = getUTCdate(); +IF @EndDate IS NULL + SET @EndDate = getUTCdate(); +SELECT SurrogateIdRangeFirstValue, + VisibleDate, + InvisibleHistoryRemovedDate +FROM dbo.Transactions +WHERE SurrogateIdRangeFirstValue > @StartNotInclusiveTranId + AND SurrogateIdRangeFirstValue <= @EndInclusiveTranId + AND EndDate <= @EndDate +ORDER BY SurrogateIdRangeFirstValue; +EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @@rowcount; + +GO +CREATE PROCEDURE dbo.GetUsedResourceTypes +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'GetUsedResourceTypes', @Mode AS VARCHAR (100) = '', @st AS DATETIME = getUTCdate(); +BEGIN TRY + SELECT ResourceTypeId, + Name + FROM dbo.ResourceType AS A + WHERE EXISTS (SELECT * + FROM dbo.Resource AS B + WHERE B.ResourceTypeId = A.ResourceTypeId); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @@rowcount; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error'; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.HardDeleteResource +@ResourceTypeId SMALLINT, @ResourceId VARCHAR (64), @KeepCurrentVersion BIT, @IsResourceChangeCaptureEnabled BIT +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = object_name(@@procid), @Mode AS VARCHAR (200) = 'RT=' + CONVERT (VARCHAR, @ResourceTypeId) + ' R=' + @ResourceId + ' V=' + CONVERT (VARCHAR, @KeepCurrentVersion) + ' CC=' + CONVERT (VARCHAR, @IsResourceChangeCaptureEnabled), @st AS DATETIME = getUTCdate(), @TransactionId AS BIGINT; +BEGIN TRY + IF @IsResourceChangeCaptureEnabled = 1 + EXECUTE dbo.MergeResourcesBeginTransaction @Count = 1, @TransactionId = @TransactionId OUTPUT; + IF @KeepCurrentVersion = 0 + BEGIN TRANSACTION; + DECLARE @SurrogateIds TABLE ( + ResourceSurrogateId BIGINT NOT NULL); + IF @IsResourceChangeCaptureEnabled = 1 + AND NOT EXISTS (SELECT * + FROM dbo.Parameters + WHERE Id = 'InvisibleHistory.IsEnabled' + AND Number = 0) + UPDATE dbo.Resource + SET IsDeleted = 1, + RawResource = 0xF, + SearchParamHash = NULL, + HistoryTransactionId = @TransactionId + OUTPUT deleted.ResourceSurrogateId INTO @SurrogateIds + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceId = @ResourceId + AND (@KeepCurrentVersion = 0 + OR IsHistory = 1) + AND RawResource <> 0xF; + ELSE + DELETE dbo.Resource + OUTPUT deleted.ResourceSurrogateId INTO @SurrogateIds + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceId = @ResourceId + AND (@KeepCurrentVersion = 0 + OR IsHistory = 1) + AND RawResource <> 0xF; + IF @KeepCurrentVersion = 0 + BEGIN + DELETE B + FROM @SurrogateIds AS A + INNER LOOP JOIN + dbo.ResourceWriteClaim AS B WITH (INDEX (1), FORCESEEK, PAGLOCK) + ON B.ResourceSurrogateId = A.ResourceSurrogateId + OPTION (MAXDOP 1); + DELETE B + FROM @SurrogateIds AS A + INNER LOOP JOIN + dbo.ReferenceSearchParam AS B WITH (INDEX (1), FORCESEEK, PAGLOCK) + ON B.ResourceTypeId = @ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId + OPTION (MAXDOP 1); + DELETE B + FROM @SurrogateIds AS A + INNER LOOP JOIN + dbo.TokenSearchParam AS B WITH (INDEX (1), FORCESEEK, PAGLOCK) + ON B.ResourceTypeId = @ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId + OPTION (MAXDOP 1); + DELETE B + FROM @SurrogateIds AS A + INNER LOOP JOIN + dbo.TokenText AS B WITH (INDEX (1), FORCESEEK, PAGLOCK) + ON B.ResourceTypeId = @ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId + OPTION (MAXDOP 1); + DELETE B + FROM @SurrogateIds AS A + INNER LOOP JOIN + dbo.StringSearchParam AS B WITH (INDEX (1), FORCESEEK, PAGLOCK) + ON B.ResourceTypeId = @ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId + OPTION (MAXDOP 1); + DELETE B + FROM @SurrogateIds AS A + INNER LOOP JOIN + dbo.UriSearchParam AS B WITH (INDEX (1), FORCESEEK, PAGLOCK) + ON B.ResourceTypeId = @ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId + OPTION (MAXDOP 1); + DELETE B + FROM @SurrogateIds AS A + INNER LOOP JOIN + dbo.NumberSearchParam AS B WITH (INDEX (1), FORCESEEK, PAGLOCK) + ON B.ResourceTypeId = @ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId + OPTION (MAXDOP 1); + DELETE B + FROM @SurrogateIds AS A + INNER LOOP JOIN + dbo.QuantitySearchParam AS B WITH (INDEX (1), FORCESEEK, PAGLOCK) + ON B.ResourceTypeId = @ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId + OPTION (MAXDOP 1); + DELETE B + FROM @SurrogateIds AS A + INNER LOOP JOIN + dbo.DateTimeSearchParam AS B WITH (INDEX (1), FORCESEEK, PAGLOCK) + ON B.ResourceTypeId = @ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId + OPTION (MAXDOP 1); + DELETE B + FROM @SurrogateIds AS A + INNER LOOP JOIN + dbo.ReferenceTokenCompositeSearchParam AS B WITH (INDEX (1), FORCESEEK, PAGLOCK) + ON B.ResourceTypeId = @ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId + OPTION (MAXDOP 1); + DELETE B + FROM @SurrogateIds AS A + INNER LOOP JOIN + dbo.TokenTokenCompositeSearchParam AS B WITH (INDEX (1), FORCESEEK, PAGLOCK) + ON B.ResourceTypeId = @ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId + OPTION (MAXDOP 1); + DELETE B + FROM @SurrogateIds AS A + INNER LOOP JOIN + dbo.TokenDateTimeCompositeSearchParam AS B WITH (INDEX (1), FORCESEEK, PAGLOCK) + ON B.ResourceTypeId = @ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId + OPTION (MAXDOP 1); + DELETE B + FROM @SurrogateIds AS A + INNER LOOP JOIN + dbo.TokenQuantityCompositeSearchParam AS B WITH (INDEX (1), FORCESEEK, PAGLOCK) + ON B.ResourceTypeId = @ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId + OPTION (MAXDOP 1); + DELETE B + FROM @SurrogateIds AS A + INNER LOOP JOIN + dbo.TokenStringCompositeSearchParam AS B WITH (INDEX (1), FORCESEEK, PAGLOCK) + ON B.ResourceTypeId = @ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId + OPTION (MAXDOP 1); + DELETE B + FROM @SurrogateIds AS A + INNER LOOP JOIN + dbo.TokenNumberNumberCompositeSearchParam AS B WITH (INDEX (1), FORCESEEK, PAGLOCK) + ON B.ResourceTypeId = @ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId + OPTION (MAXDOP 1); + END + IF @@trancount > 0 + COMMIT TRANSACTION; + IF @IsResourceChangeCaptureEnabled = 1 + EXECUTE dbo.MergeResourcesCommitTransaction @TransactionId; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st; +END TRY +BEGIN CATCH + IF @@trancount > 0 + ROLLBACK; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error', @Start = @st; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.InitDefrag +@QueueType TINYINT, @GroupId BIGINT, @DefragItems INT=NULL OUTPUT +WITH EXECUTE AS 'dbo' +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'InitDefrag', @st AS DATETIME = getUTCdate(), @ObjectId AS INT, @msg AS VARCHAR (1000), @Rows AS INT, @MinFragPct AS INT = isnull((SELECT Number + FROM dbo.Parameters + WHERE Id = 'Defrag.MinFragPct'), 10), @MinSizeGB AS FLOAT = isnull((SELECT Number + FROM dbo.Parameters + WHERE Id = 'Defrag.MinSizeGB'), 0.1), @DefinitionsSorted AS StringList; +DECLARE @Mode AS VARCHAR (200) = 'G=' + CONVERT (VARCHAR, @GroupId) + ' MF=' + CONVERT (VARCHAR, @MinFragPct) + ' MS=' + CONVERT (VARCHAR, @MinSizeGB); +DECLARE @Definitions AS TABLE ( + Def VARCHAR (900) PRIMARY KEY, + FragGB FLOAT ); +BEGIN TRY + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Start'; + SELECT * + INTO #filter + FROM (SELECT object_id, + sum(reserved_page_count * 8.0 / 1024 / 1024) AS ReservedGB + FROM sys.dm_db_partition_stats AS A + WHERE object_id IN (SELECT object_id + FROM sys.objects + WHERE type = 'U' + AND name NOT IN ('EventLog')) + GROUP BY object_id) AS A + WHERE ReservedGB > @MinSizeGB; + WHILE EXISTS (SELECT * + FROM #filter) + BEGIN + SET @ObjectId = (SELECT TOP 1 object_id + FROM #filter + ORDER BY ReservedGB DESC); + INSERT INTO @Definitions + SELECT object_name(@ObjectId) + ';' + I.name + ';' + CONVERT (VARCHAR, partition_number) + ';' + CONVERT (VARCHAR, CASE WHEN EXISTS (SELECT * + FROM sys.partition_schemes AS PS + WHERE PS.data_space_id = I.data_space_id) THEN 1 ELSE 0 END) + ';' + CONVERT (VARCHAR, (SELECT sum(reserved_page_count) + FROM sys.dm_db_partition_stats AS S + WHERE S.object_id = A.object_id + AND S.index_id = A.index_id + AND S.partition_number = A.partition_number) * 8.0 / 1024 / 1024), + FragGB + FROM (SELECT object_id, + index_id, + partition_number, + A.avg_fragmentation_in_percent * A.page_count * 8.0 / 1024 / 1024 / 100 AS FragGB + FROM sys.dm_db_index_physical_stats(db_id(), @ObjectId, NULL, NULL, 'LIMITED') AS A + WHERE index_id > 0 + AND avg_fragmentation_in_percent >= @MinFragPct + AND A.page_count > 500) AS A + INNER JOIN + sys.indexes AS I + ON I.object_id = A.object_id + AND I.index_id = A.index_id; + SET @Rows = @@rowcount; + SET @msg = object_name(@ObjectId); + EXECUTE dbo.LogEvent @Process = @SP, @Status = 'Run', @Mode = @Mode, @Target = '@Definitions', @Action = 'Insert', @Rows = @Rows, @Text = @msg; + DELETE #filter + WHERE object_id = @ObjectId; + END + INSERT INTO @DefinitionsSorted + SELECT Def + ';' + CONVERT (VARCHAR, FragGB) + FROM @Definitions + ORDER BY FragGB DESC; + SET @DefragItems = @@rowcount; + IF @DefragItems > 0 + EXECUTE dbo.EnqueueJobs @QueueType = @QueueType, @Definitions = @DefinitionsSorted, @GroupId = @GroupId, @ForceOneActiveJobGroup = 1; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error'; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.InitializeIndexProperties +AS +SET NOCOUNT ON; +INSERT INTO dbo.IndexProperties (TableName, IndexName, PropertyName, PropertyValue) +SELECT Tbl, + Ind, + 'DATA_COMPRESSION', + isnull(data_comp, 'NONE') +FROM (SELECT O.Name AS Tbl, + I.Name AS Ind, + (SELECT TOP 1 CASE WHEN data_compression_desc = 'PAGE' THEN 'PAGE' END + FROM sys.partitions AS P + WHERE P.object_id = I.object_id + AND I.index_id = P.index_id) AS data_comp + FROM sys.indexes AS I + INNER JOIN + sys.objects AS O + ON O.object_id = I.object_id + WHERE O.type = 'u' + AND EXISTS (SELECT * + FROM sys.partition_schemes AS PS + WHERE PS.data_space_id = I.data_space_id + AND name = 'PartitionScheme_ResourceTypeId')) AS A +WHERE NOT EXISTS (SELECT * + FROM dbo.IndexProperties + WHERE TableName = Tbl + AND IndexName = Ind); + +GO +CREATE PROCEDURE dbo.LogEvent +@Process VARCHAR (100), @Status VARCHAR (10), @Mode VARCHAR (200)=NULL, @Action VARCHAR (20)=NULL, @Target VARCHAR (100)=NULL, @Rows BIGINT=NULL, @Start DATETIME=NULL, @Text NVARCHAR (3500)=NULL, @EventId BIGINT=NULL OUTPUT, @Retry INT=NULL +AS +SET NOCOUNT ON; +DECLARE @ErrorNumber AS INT = error_number(), @ErrorMessage AS VARCHAR (1000) = '', @TranCount AS INT = @@trancount, @DoWork AS BIT = 0, @NumberAdded AS BIT; +IF @ErrorNumber IS NOT NULL + OR @Status IN ('Warn', 'Error') + SET @DoWork = 1; +IF @DoWork = 0 + SET @DoWork = CASE WHEN EXISTS (SELECT * + FROM dbo.Parameters + WHERE Id = isnull(@Process, '') + AND Char = 'LogEvent') THEN 1 ELSE 0 END; +IF @DoWork = 0 + RETURN; +IF @ErrorNumber IS NOT NULL + SET @ErrorMessage = CASE WHEN @Retry IS NOT NULL THEN 'Retry ' + CONVERT (VARCHAR, @Retry) + ', ' ELSE '' END + 'Error ' + CONVERT (VARCHAR, error_number()) + ': ' + CONVERT (VARCHAR (1000), error_message()) + ', Level ' + CONVERT (VARCHAR, error_severity()) + ', State ' + CONVERT (VARCHAR, error_state()) + CASE WHEN error_procedure() IS NOT NULL THEN ', Procedure ' + error_procedure() ELSE '' END + ', Line ' + CONVERT (VARCHAR, error_line()); +IF @TranCount > 0 + AND @ErrorNumber IS NOT NULL + ROLLBACK; +IF databasepropertyex(db_name(), 'UpdateAbility') = 'READ_WRITE' + BEGIN + INSERT INTO dbo.EventLog (Process, Status, Mode, Action, Target, Rows, Milliseconds, EventDate, EventText, SPID, HostName) + SELECT @Process, + @Status, + @Mode, + @Action, + @Target, + @Rows, + datediff(millisecond, @Start, getUTCdate()), + getUTCdate() AS EventDate, + CASE WHEN @ErrorNumber IS NULL THEN @Text ELSE @ErrorMessage + CASE WHEN isnull(@Text, '') <> '' THEN '. ' + @Text ELSE '' END END AS Text, + @@SPID, + host_name() AS HostName; + SET @EventId = scope_identity(); + END +IF @TranCount > 0 + AND @ErrorNumber IS NOT NULL + BEGIN TRANSACTION; + +GO +CREATE PROCEDURE dbo.LogSchemaMigrationProgress +@message VARCHAR (MAX) +AS +INSERT INTO dbo.SchemaMigrationProgress (Message) +VALUES (@message); + +GO +CREATE PROCEDURE dbo.MergeResources +@AffectedRows INT=0 OUTPUT, @RaiseExceptionOnConflict BIT=1, @IsResourceChangeCaptureEnabled BIT=0, @TransactionId BIGINT=NULL, @SingleTransaction BIT=1, @Resources dbo.ResourceList READONLY, @ResourceWriteClaims dbo.ResourceWriteClaimList READONLY, @ReferenceSearchParams dbo.ReferenceSearchParamList READONLY, @TokenSearchParams dbo.TokenSearchParamList READONLY, @TokenTexts dbo.TokenTextList READONLY, @StringSearchParams dbo.StringSearchParamList READONLY, @UriSearchParams dbo.UriSearchParamList READONLY, @NumberSearchParams dbo.NumberSearchParamList READONLY, @QuantitySearchParams dbo.QuantitySearchParamList READONLY, @DateTimeSearchParms dbo.DateTimeSearchParamList READONLY, @ReferenceTokenCompositeSearchParams dbo.ReferenceTokenCompositeSearchParamList READONLY, @TokenTokenCompositeSearchParams dbo.TokenTokenCompositeSearchParamList READONLY, @TokenDateTimeCompositeSearchParams dbo.TokenDateTimeCompositeSearchParamList READONLY, @TokenQuantityCompositeSearchParams dbo.TokenQuantityCompositeSearchParamList READONLY, @TokenStringCompositeSearchParams dbo.TokenStringCompositeSearchParamList READONLY, @TokenNumberNumberCompositeSearchParams dbo.TokenNumberNumberCompositeSearchParamList READONLY +AS +SET NOCOUNT ON; +DECLARE @st AS DATETIME = getUTCdate(), @SP AS VARCHAR (100) = object_name(@@procid), @DummyTop AS BIGINT = 9223372036854775807, @InitialTranCount AS INT = @@trancount, @IsRetry AS BIT = 0; +DECLARE @Mode AS VARCHAR (200) = isnull((SELECT 'RT=[' + CONVERT (VARCHAR, min(ResourceTypeId)) + ',' + CONVERT (VARCHAR, max(ResourceTypeId)) + '] Sur=[' + CONVERT (VARCHAR, min(ResourceSurrogateId)) + ',' + CONVERT (VARCHAR, max(ResourceSurrogateId)) + '] V=' + CONVERT (VARCHAR, max(Version)) + ' Rows=' + CONVERT (VARCHAR, count(*)) + FROM @Resources), 'Input=Empty'); +SET @Mode += ' E=' + CONVERT (VARCHAR, @RaiseExceptionOnConflict) + ' CC=' + CONVERT (VARCHAR, @IsResourceChangeCaptureEnabled) + ' IT=' + CONVERT (VARCHAR, @InitialTranCount) + ' T=' + isnull(CONVERT (VARCHAR, @TransactionId), 'NULL'); +SET @AffectedRows = 0; +BEGIN TRY + DECLARE @Existing AS TABLE ( + ResourceTypeId SMALLINT NOT NULL, + SurrogateId BIGINT NOT NULL PRIMARY KEY (ResourceTypeId, SurrogateId)); + DECLARE @ResourceInfos AS TABLE ( + ResourceTypeId SMALLINT NOT NULL, + SurrogateId BIGINT NOT NULL, + Version INT NOT NULL, + KeepHistory BIT NOT NULL, + PreviousVersion INT NULL, + PreviousSurrogateId BIGINT NULL PRIMARY KEY (ResourceTypeId, SurrogateId)); + DECLARE @PreviousSurrogateIds AS TABLE ( + TypeId SMALLINT NOT NULL, + SurrogateId BIGINT NOT NULL PRIMARY KEY (TypeId, SurrogateId), + KeepHistory BIT ); + IF @SingleTransaction = 0 + AND isnull((SELECT Number + FROM dbo.Parameters + WHERE Id = 'MergeResources.NoTransaction.IsEnabled'), 0) = 0 + SET @SingleTransaction = 1; + SET @Mode += ' ST=' + CONVERT (VARCHAR, @SingleTransaction); + IF @InitialTranCount = 0 + BEGIN + IF EXISTS (SELECT * + FROM @Resources AS A + INNER JOIN + dbo.Resource AS B + ON B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId) + BEGIN + BEGIN TRANSACTION; + INSERT INTO @Existing (ResourceTypeId, SurrogateId) + SELECT B.ResourceTypeId, + B.ResourceSurrogateId + FROM (SELECT TOP (@DummyTop) * + FROM @Resources) AS A + INNER JOIN + dbo.Resource AS B WITH (ROWLOCK, HOLDLOCK) + ON B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId + WHERE B.IsHistory = 0 + AND B.ResourceId = A.ResourceId + AND B.Version = A.Version + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)); + IF @@rowcount = (SELECT count(*) + FROM @Resources) + SET @IsRetry = 1; + IF @IsRetry = 0 + COMMIT TRANSACTION; + END + END + SET @Mode += ' R=' + CONVERT (VARCHAR, @IsRetry); + IF @SingleTransaction = 1 + AND @@trancount = 0 + BEGIN TRANSACTION; + IF @IsRetry = 0 + BEGIN + INSERT INTO @ResourceInfos (ResourceTypeId, SurrogateId, Version, KeepHistory, PreviousVersion, PreviousSurrogateId) + SELECT A.ResourceTypeId, + A.ResourceSurrogateId, + A.Version, + A.KeepHistory, + B.Version, + B.ResourceSurrogateId + FROM (SELECT TOP (@DummyTop) * + FROM @Resources + WHERE HasVersionToCompare = 1) AS A + LEFT OUTER JOIN + dbo.Resource AS B + ON B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceId = A.ResourceId + AND B.IsHistory = 0 + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)); + IF @RaiseExceptionOnConflict = 1 + AND EXISTS (SELECT * + FROM @ResourceInfos + WHERE PreviousVersion IS NOT NULL + AND Version <= PreviousVersion) + THROW 50409, 'Resource has been recently updated or added, please compare the resource content in code for any duplicate updates', 1; + INSERT INTO @PreviousSurrogateIds + SELECT ResourceTypeId, + PreviousSurrogateId, + KeepHistory + FROM @ResourceInfos + WHERE PreviousSurrogateId IS NOT NULL; + IF @@rowcount > 0 + BEGIN + UPDATE dbo.Resource + SET IsHistory = 1 + WHERE EXISTS (SELECT * + FROM @PreviousSurrogateIds + WHERE TypeId = ResourceTypeId + AND SurrogateId = ResourceSurrogateId + AND KeepHistory = 1); + SET @AffectedRows += @@rowcount; + IF @IsResourceChangeCaptureEnabled = 1 + AND NOT EXISTS (SELECT * + FROM dbo.Parameters + WHERE Id = 'InvisibleHistory.IsEnabled' + AND Number = 0) + UPDATE dbo.Resource + SET IsHistory = 1, + RawResource = 0xF, + SearchParamHash = NULL, + HistoryTransactionId = @TransactionId + WHERE EXISTS (SELECT * + FROM @PreviousSurrogateIds + WHERE TypeId = ResourceTypeId + AND SurrogateId = ResourceSurrogateId + AND KeepHistory = 0); + ELSE + DELETE dbo.Resource + WHERE EXISTS (SELECT * + FROM @PreviousSurrogateIds + WHERE TypeId = ResourceTypeId + AND SurrogateId = ResourceSurrogateId + AND KeepHistory = 0); + SET @AffectedRows += @@rowcount; + DELETE dbo.ResourceWriteClaim + WHERE EXISTS (SELECT * + FROM @PreviousSurrogateIds + WHERE SurrogateId = ResourceSurrogateId); + SET @AffectedRows += @@rowcount; + DELETE dbo.ReferenceSearchParam + WHERE EXISTS (SELECT * + FROM @PreviousSurrogateIds + WHERE TypeId = ResourceTypeId + AND SurrogateId = ResourceSurrogateId); + SET @AffectedRows += @@rowcount; + DELETE dbo.TokenSearchParam + WHERE EXISTS (SELECT * + FROM @PreviousSurrogateIds + WHERE TypeId = ResourceTypeId + AND SurrogateId = ResourceSurrogateId); + SET @AffectedRows += @@rowcount; + DELETE dbo.TokenText + WHERE EXISTS (SELECT * + FROM @PreviousSurrogateIds + WHERE TypeId = ResourceTypeId + AND SurrogateId = ResourceSurrogateId); + SET @AffectedRows += @@rowcount; + DELETE dbo.StringSearchParam + WHERE EXISTS (SELECT * + FROM @PreviousSurrogateIds + WHERE TypeId = ResourceTypeId + AND SurrogateId = ResourceSurrogateId); + SET @AffectedRows += @@rowcount; + DELETE dbo.UriSearchParam + WHERE EXISTS (SELECT * + FROM @PreviousSurrogateIds + WHERE TypeId = ResourceTypeId + AND SurrogateId = ResourceSurrogateId); + SET @AffectedRows += @@rowcount; + DELETE dbo.NumberSearchParam + WHERE EXISTS (SELECT * + FROM @PreviousSurrogateIds + WHERE TypeId = ResourceTypeId + AND SurrogateId = ResourceSurrogateId); + SET @AffectedRows += @@rowcount; + DELETE dbo.QuantitySearchParam + WHERE EXISTS (SELECT * + FROM @PreviousSurrogateIds + WHERE TypeId = ResourceTypeId + AND SurrogateId = ResourceSurrogateId); + SET @AffectedRows += @@rowcount; + DELETE dbo.DateTimeSearchParam + WHERE EXISTS (SELECT * + FROM @PreviousSurrogateIds + WHERE TypeId = ResourceTypeId + AND SurrogateId = ResourceSurrogateId); + SET @AffectedRows += @@rowcount; + DELETE dbo.ReferenceTokenCompositeSearchParam + WHERE EXISTS (SELECT * + FROM @PreviousSurrogateIds + WHERE TypeId = ResourceTypeId + AND SurrogateId = ResourceSurrogateId); + SET @AffectedRows += @@rowcount; + DELETE dbo.TokenTokenCompositeSearchParam + WHERE EXISTS (SELECT * + FROM @PreviousSurrogateIds + WHERE TypeId = ResourceTypeId + AND SurrogateId = ResourceSurrogateId); + SET @AffectedRows += @@rowcount; + DELETE dbo.TokenDateTimeCompositeSearchParam + WHERE EXISTS (SELECT * + FROM @PreviousSurrogateIds + WHERE TypeId = ResourceTypeId + AND SurrogateId = ResourceSurrogateId); + SET @AffectedRows += @@rowcount; + DELETE dbo.TokenQuantityCompositeSearchParam + WHERE EXISTS (SELECT * + FROM @PreviousSurrogateIds + WHERE TypeId = ResourceTypeId + AND SurrogateId = ResourceSurrogateId); + SET @AffectedRows += @@rowcount; + DELETE dbo.TokenStringCompositeSearchParam + WHERE EXISTS (SELECT * + FROM @PreviousSurrogateIds + WHERE TypeId = ResourceTypeId + AND SurrogateId = ResourceSurrogateId); + SET @AffectedRows += @@rowcount; + DELETE dbo.TokenNumberNumberCompositeSearchParam + WHERE EXISTS (SELECT * + FROM @PreviousSurrogateIds + WHERE TypeId = ResourceTypeId + AND SurrogateId = ResourceSurrogateId); + SET @AffectedRows += @@rowcount; + END + INSERT INTO dbo.Resource (ResourceTypeId, ResourceId, Version, IsHistory, ResourceSurrogateId, IsDeleted, RequestMethod, RawResource, IsRawResourceMetaSet, SearchParamHash, TransactionId) + SELECT ResourceTypeId, + ResourceId, + Version, + IsHistory, + ResourceSurrogateId, + IsDeleted, + RequestMethod, + RawResource, + IsRawResourceMetaSet, + SearchParamHash, + @TransactionId + FROM @Resources; + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.ResourceWriteClaim (ResourceSurrogateId, ClaimTypeId, ClaimValue) + SELECT ResourceSurrogateId, + ClaimTypeId, + ClaimValue + FROM @ResourceWriteClaims; + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.ReferenceSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, BaseUri, ReferenceResourceTypeId, ReferenceResourceId, ReferenceResourceVersion) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + BaseUri, + ReferenceResourceTypeId, + ReferenceResourceId, + ReferenceResourceVersion + FROM @ReferenceSearchParams; + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.TokenSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId, Code, CodeOverflow) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + SystemId, + Code, + CodeOverflow + FROM @TokenSearchParams; + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.TokenText (ResourceTypeId, ResourceSurrogateId, SearchParamId, Text) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + Text + FROM @TokenTexts; + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.StringSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, Text, TextOverflow, IsMin, IsMax) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + Text, + TextOverflow, + IsMin, + IsMax + FROM @StringSearchParams; + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.UriSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, Uri) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + Uri + FROM @UriSearchParams; + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.NumberSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, SingleValue, LowValue, HighValue) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + SingleValue, + LowValue, + HighValue + FROM @NumberSearchParams; + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.QuantitySearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId, QuantityCodeId, SingleValue, LowValue, HighValue) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + SystemId, + QuantityCodeId, + SingleValue, + LowValue, + HighValue + FROM @QuantitySearchParams; + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.DateTimeSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, StartDateTime, EndDateTime, IsLongerThanADay, IsMin, IsMax) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + StartDateTime, + EndDateTime, + IsLongerThanADay, + IsMin, + IsMax + FROM @DateTimeSearchParms; + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.ReferenceTokenCompositeSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, BaseUri1, ReferenceResourceTypeId1, ReferenceResourceId1, ReferenceResourceVersion1, SystemId2, Code2, CodeOverflow2) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + BaseUri1, + ReferenceResourceTypeId1, + ReferenceResourceId1, + ReferenceResourceVersion1, + SystemId2, + Code2, + CodeOverflow2 + FROM @ReferenceTokenCompositeSearchParams; + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.TokenTokenCompositeSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId1, Code1, CodeOverflow1, SystemId2, Code2, CodeOverflow2) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + SystemId1, + Code1, + CodeOverflow1, + SystemId2, + Code2, + CodeOverflow2 + FROM @TokenTokenCompositeSearchParams; + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.TokenDateTimeCompositeSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId1, Code1, CodeOverflow1, StartDateTime2, EndDateTime2, IsLongerThanADay2) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + SystemId1, + Code1, + CodeOverflow1, + StartDateTime2, + EndDateTime2, + IsLongerThanADay2 + FROM @TokenDateTimeCompositeSearchParams; + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.TokenQuantityCompositeSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId1, Code1, CodeOverflow1, SingleValue2, SystemId2, QuantityCodeId2, LowValue2, HighValue2) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + SystemId1, + Code1, + CodeOverflow1, + SingleValue2, + SystemId2, + QuantityCodeId2, + LowValue2, + HighValue2 + FROM @TokenQuantityCompositeSearchParams; + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.TokenStringCompositeSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId1, Code1, CodeOverflow1, Text2, TextOverflow2) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + SystemId1, + Code1, + CodeOverflow1, + Text2, + TextOverflow2 + FROM @TokenStringCompositeSearchParams; + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.TokenNumberNumberCompositeSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId1, Code1, CodeOverflow1, SingleValue2, LowValue2, HighValue2, SingleValue3, LowValue3, HighValue3, HasRange) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + SystemId1, + Code1, + CodeOverflow1, + SingleValue2, + LowValue2, + HighValue2, + SingleValue3, + LowValue3, + HighValue3, + HasRange + FROM @TokenNumberNumberCompositeSearchParams; + SET @AffectedRows += @@rowcount; + END + ELSE + BEGIN + INSERT INTO dbo.ResourceWriteClaim (ResourceSurrogateId, ClaimTypeId, ClaimValue) + SELECT ResourceSurrogateId, + ClaimTypeId, + ClaimValue + FROM (SELECT TOP (@DummyTop) * + FROM @ResourceWriteClaims) AS A + WHERE EXISTS (SELECT * + FROM @Existing AS B + WHERE B.SurrogateId = A.ResourceSurrogateId) + AND NOT EXISTS (SELECT * + FROM dbo.ResourceWriteClaim AS C + WHERE C.ResourceSurrogateId = A.ResourceSurrogateId) + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)); + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.ReferenceSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, BaseUri, ReferenceResourceTypeId, ReferenceResourceId, ReferenceResourceVersion) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + BaseUri, + ReferenceResourceTypeId, + ReferenceResourceId, + ReferenceResourceVersion + FROM (SELECT TOP (@DummyTop) * + FROM @ReferenceSearchParams) AS A + WHERE EXISTS (SELECT * + FROM @Existing AS B + WHERE B.ResourceTypeId = A.ResourceTypeId + AND B.SurrogateId = A.ResourceSurrogateId) + AND NOT EXISTS (SELECT * + FROM dbo.ReferenceSearchParam AS C + WHERE C.ResourceTypeId = A.ResourceTypeId + AND C.ResourceSurrogateId = A.ResourceSurrogateId) + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)); + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.TokenSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId, Code, CodeOverflow) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + SystemId, + Code, + CodeOverflow + FROM (SELECT TOP (@DummyTop) * + FROM @TokenSearchParams) AS A + WHERE EXISTS (SELECT * + FROM @Existing AS B + WHERE B.ResourceTypeId = A.ResourceTypeId + AND B.SurrogateId = A.ResourceSurrogateId) + AND NOT EXISTS (SELECT * + FROM dbo.TokenSearchParam AS C + WHERE C.ResourceTypeId = A.ResourceTypeId + AND C.ResourceSurrogateId = A.ResourceSurrogateId) + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)); + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.TokenText (ResourceTypeId, ResourceSurrogateId, SearchParamId, Text) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + Text + FROM (SELECT TOP (@DummyTop) * + FROM @TokenTexts) AS A + WHERE EXISTS (SELECT * + FROM @Existing AS B + WHERE B.ResourceTypeId = A.ResourceTypeId + AND B.SurrogateId = A.ResourceSurrogateId) + AND NOT EXISTS (SELECT * + FROM dbo.TokenSearchParam AS C + WHERE C.ResourceTypeId = A.ResourceTypeId + AND C.ResourceSurrogateId = A.ResourceSurrogateId) + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)); + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.StringSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, Text, TextOverflow, IsMin, IsMax) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + Text, + TextOverflow, + IsMin, + IsMax + FROM (SELECT TOP (@DummyTop) * + FROM @StringSearchParams) AS A + WHERE EXISTS (SELECT * + FROM @Existing AS B + WHERE B.ResourceTypeId = A.ResourceTypeId + AND B.SurrogateId = A.ResourceSurrogateId) + AND NOT EXISTS (SELECT * + FROM dbo.TokenText AS C + WHERE C.ResourceTypeId = A.ResourceTypeId + AND C.ResourceSurrogateId = A.ResourceSurrogateId) + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)); + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.UriSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, Uri) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + Uri + FROM (SELECT TOP (@DummyTop) * + FROM @UriSearchParams) AS A + WHERE EXISTS (SELECT * + FROM @Existing AS B + WHERE B.ResourceTypeId = A.ResourceTypeId + AND B.SurrogateId = A.ResourceSurrogateId) + AND NOT EXISTS (SELECT * + FROM dbo.UriSearchParam AS C + WHERE C.ResourceTypeId = A.ResourceTypeId + AND C.ResourceSurrogateId = A.ResourceSurrogateId) + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)); + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.NumberSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, SingleValue, LowValue, HighValue) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + SingleValue, + LowValue, + HighValue + FROM (SELECT TOP (@DummyTop) * + FROM @NumberSearchParams) AS A + WHERE EXISTS (SELECT * + FROM @Existing AS B + WHERE B.ResourceTypeId = A.ResourceTypeId + AND B.SurrogateId = A.ResourceSurrogateId) + AND NOT EXISTS (SELECT * + FROM dbo.NumberSearchParam AS C + WHERE C.ResourceTypeId = A.ResourceTypeId + AND C.ResourceSurrogateId = A.ResourceSurrogateId) + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)); + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.QuantitySearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId, QuantityCodeId, SingleValue, LowValue, HighValue) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + SystemId, + QuantityCodeId, + SingleValue, + LowValue, + HighValue + FROM (SELECT TOP (@DummyTop) * + FROM @QuantitySearchParams) AS A + WHERE EXISTS (SELECT * + FROM @Existing AS B + WHERE B.ResourceTypeId = A.ResourceTypeId + AND B.SurrogateId = A.ResourceSurrogateId) + AND NOT EXISTS (SELECT * + FROM dbo.QuantitySearchParam AS C + WHERE C.ResourceTypeId = A.ResourceTypeId + AND C.ResourceSurrogateId = A.ResourceSurrogateId) + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)); + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.DateTimeSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, StartDateTime, EndDateTime, IsLongerThanADay, IsMin, IsMax) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + StartDateTime, + EndDateTime, + IsLongerThanADay, + IsMin, + IsMax + FROM (SELECT TOP (@DummyTop) * + FROM @DateTimeSearchParms) AS A + WHERE EXISTS (SELECT * + FROM @Existing AS B + WHERE B.ResourceTypeId = A.ResourceTypeId + AND B.SurrogateId = A.ResourceSurrogateId) + AND NOT EXISTS (SELECT * + FROM dbo.TokenSearchParam AS C + WHERE C.ResourceTypeId = A.ResourceTypeId + AND C.ResourceSurrogateId = A.ResourceSurrogateId) + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)); + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.ReferenceTokenCompositeSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, BaseUri1, ReferenceResourceTypeId1, ReferenceResourceId1, ReferenceResourceVersion1, SystemId2, Code2, CodeOverflow2) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + BaseUri1, + ReferenceResourceTypeId1, + ReferenceResourceId1, + ReferenceResourceVersion1, + SystemId2, + Code2, + CodeOverflow2 + FROM (SELECT TOP (@DummyTop) * + FROM @ReferenceTokenCompositeSearchParams) AS A + WHERE EXISTS (SELECT * + FROM @Existing AS B + WHERE B.ResourceTypeId = A.ResourceTypeId + AND B.SurrogateId = A.ResourceSurrogateId) + AND NOT EXISTS (SELECT * + FROM dbo.DateTimeSearchParam AS C + WHERE C.ResourceTypeId = A.ResourceTypeId + AND C.ResourceSurrogateId = A.ResourceSurrogateId) + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)); + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.TokenTokenCompositeSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId1, Code1, CodeOverflow1, SystemId2, Code2, CodeOverflow2) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + SystemId1, + Code1, + CodeOverflow1, + SystemId2, + Code2, + CodeOverflow2 + FROM (SELECT TOP (@DummyTop) * + FROM @TokenTokenCompositeSearchParams) AS A + WHERE EXISTS (SELECT * + FROM @Existing AS B + WHERE B.ResourceTypeId = A.ResourceTypeId + AND B.SurrogateId = A.ResourceSurrogateId) + AND NOT EXISTS (SELECT * + FROM dbo.TokenTokenCompositeSearchParam AS C + WHERE C.ResourceTypeId = A.ResourceTypeId + AND C.ResourceSurrogateId = A.ResourceSurrogateId) + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)); + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.TokenDateTimeCompositeSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId1, Code1, CodeOverflow1, StartDateTime2, EndDateTime2, IsLongerThanADay2) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + SystemId1, + Code1, + CodeOverflow1, + StartDateTime2, + EndDateTime2, + IsLongerThanADay2 + FROM (SELECT TOP (@DummyTop) * + FROM @TokenDateTimeCompositeSearchParams) AS A + WHERE EXISTS (SELECT * + FROM @Existing AS B + WHERE B.ResourceTypeId = A.ResourceTypeId + AND B.SurrogateId = A.ResourceSurrogateId) + AND NOT EXISTS (SELECT * + FROM dbo.TokenDateTimeCompositeSearchParam AS C + WHERE C.ResourceTypeId = A.ResourceTypeId + AND C.ResourceSurrogateId = A.ResourceSurrogateId) + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)); + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.TokenQuantityCompositeSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId1, Code1, CodeOverflow1, SingleValue2, SystemId2, QuantityCodeId2, LowValue2, HighValue2) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + SystemId1, + Code1, + CodeOverflow1, + SingleValue2, + SystemId2, + QuantityCodeId2, + LowValue2, + HighValue2 + FROM (SELECT TOP (@DummyTop) * + FROM @TokenQuantityCompositeSearchParams) AS A + WHERE EXISTS (SELECT * + FROM @Existing AS B + WHERE B.ResourceTypeId = A.ResourceTypeId + AND B.SurrogateId = A.ResourceSurrogateId) + AND NOT EXISTS (SELECT * + FROM dbo.TokenQuantityCompositeSearchParam AS C + WHERE C.ResourceTypeId = A.ResourceTypeId + AND C.ResourceSurrogateId = A.ResourceSurrogateId) + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)); + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.TokenStringCompositeSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId1, Code1, CodeOverflow1, Text2, TextOverflow2) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + SystemId1, + Code1, + CodeOverflow1, + Text2, + TextOverflow2 + FROM (SELECT TOP (@DummyTop) * + FROM @TokenStringCompositeSearchParams) AS A + WHERE EXISTS (SELECT * + FROM @Existing AS B + WHERE B.ResourceTypeId = A.ResourceTypeId + AND B.SurrogateId = A.ResourceSurrogateId) + AND NOT EXISTS (SELECT * + FROM dbo.TokenStringCompositeSearchParam AS C + WHERE C.ResourceTypeId = A.ResourceTypeId + AND C.ResourceSurrogateId = A.ResourceSurrogateId) + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)); + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.TokenNumberNumberCompositeSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId1, Code1, CodeOverflow1, SingleValue2, LowValue2, HighValue2, SingleValue3, LowValue3, HighValue3, HasRange) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + SystemId1, + Code1, + CodeOverflow1, + SingleValue2, + LowValue2, + HighValue2, + SingleValue3, + LowValue3, + HighValue3, + HasRange + FROM (SELECT TOP (@DummyTop) * + FROM @TokenNumberNumberCompositeSearchParams) AS A + WHERE EXISTS (SELECT * + FROM @Existing AS B + WHERE B.ResourceTypeId = A.ResourceTypeId + AND B.SurrogateId = A.ResourceSurrogateId) + AND NOT EXISTS (SELECT * + FROM dbo.TokenNumberNumberCompositeSearchParam AS C + WHERE C.ResourceTypeId = A.ResourceTypeId + AND C.ResourceSurrogateId = A.ResourceSurrogateId) + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)); + SET @AffectedRows += @@rowcount; + END + IF @IsResourceChangeCaptureEnabled = 1 + EXECUTE dbo.CaptureResourceIdsForChanges @Resources; + IF @TransactionId IS NOT NULL + EXECUTE dbo.MergeResourcesCommitTransaction @TransactionId; + IF @InitialTranCount = 0 + AND @@trancount > 0 + COMMIT TRANSACTION; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @AffectedRows; +END TRY +BEGIN CATCH + IF @InitialTranCount = 0 + AND @@trancount > 0 + ROLLBACK; + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error', @Start = @st; + IF @RaiseExceptionOnConflict = 1 + AND error_number() IN (2601, 2627) + AND error_message() LIKE '%''dbo.Resource''%' + THROW 50409, 'Resource has been recently updated or added, please compare the resource content in code for any duplicate updates', 1; + ELSE + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.MergeResourcesAdvanceTransactionVisibility +@AffectedRows INT=0 OUTPUT +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = object_name(@@procid), @Mode AS VARCHAR (100) = '', @st AS DATETIME = getUTCdate(), @msg AS VARCHAR (1000), @MaxTransactionId AS BIGINT, @MinTransactionId AS BIGINT, @MinNotCompletedTransactionId AS BIGINT, @CurrentTransactionId AS BIGINT; +SET @AffectedRows = 0; +BEGIN TRY + EXECUTE dbo.MergeResourcesGetTransactionVisibility @MinTransactionId OUTPUT; + SET @MinTransactionId += 1; + SET @CurrentTransactionId = (SELECT TOP 1 SurrogateIdRangeFirstValue + FROM dbo.Transactions + ORDER BY SurrogateIdRangeFirstValue DESC); + SET @MinNotCompletedTransactionId = isnull((SELECT TOP 1 SurrogateIdRangeFirstValue + FROM dbo.Transactions + WHERE IsCompleted = 0 + AND SurrogateIdRangeFirstValue BETWEEN @MinTransactionId AND @CurrentTransactionId + ORDER BY SurrogateIdRangeFirstValue), @CurrentTransactionId + 1); + SET @MaxTransactionId = (SELECT TOP 1 SurrogateIdRangeFirstValue + FROM dbo.Transactions + WHERE IsCompleted = 1 + AND SurrogateIdRangeFirstValue BETWEEN @MinTransactionId AND @CurrentTransactionId + AND SurrogateIdRangeFirstValue < @MinNotCompletedTransactionId + ORDER BY SurrogateIdRangeFirstValue DESC); + IF @MaxTransactionId >= @MinTransactionId + BEGIN + UPDATE A + SET IsVisible = 1, + VisibleDate = getUTCdate() + FROM dbo.Transactions AS A WITH (INDEX (1)) + WHERE SurrogateIdRangeFirstValue BETWEEN @MinTransactionId AND @CurrentTransactionId + AND SurrogateIdRangeFirstValue <= @MaxTransactionId; + SET @AffectedRows += @@rowcount; + END + SET @msg = 'Min=' + CONVERT (VARCHAR, @MinTransactionId) + ' C=' + CONVERT (VARCHAR, @CurrentTransactionId) + ' MinNC=' + CONVERT (VARCHAR, @MinNotCompletedTransactionId) + ' Max=' + CONVERT (VARCHAR, @MaxTransactionId); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @AffectedRows, @Text = @msg; +END TRY +BEGIN CATCH + IF @@trancount > 0 + ROLLBACK; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error'; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.MergeResourcesBeginTransaction +@Count INT, @TransactionId BIGINT OUTPUT, @SequenceRangeFirstValue INT=NULL OUTPUT, @HeartbeatDate DATETIME=NULL +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'MergeResourcesBeginTransaction', @Mode AS VARCHAR (200) = 'Cnt=' + CONVERT (VARCHAR, @Count), @st AS DATETIME = getUTCdate(), @FirstValueVar AS SQL_VARIANT, @LastValueVar AS SQL_VARIANT; +BEGIN TRY + SET @TransactionId = NULL; + IF @@trancount > 0 + RAISERROR ('MergeResourcesBeginTransaction cannot be called inside outer transaction.', 18, 127); + SET @FirstValueVar = NULL; + WHILE @FirstValueVar IS NULL + BEGIN + EXECUTE sys.sp_sequence_get_range @sequence_name = 'dbo.ResourceSurrogateIdUniquifierSequence', @range_size = @Count, @range_first_value = @FirstValueVar OUTPUT, @range_last_value = @LastValueVar OUTPUT; + SET @SequenceRangeFirstValue = CONVERT (INT, @FirstValueVar); + IF @SequenceRangeFirstValue > CONVERT (INT, @LastValueVar) + SET @FirstValueVar = NULL; + END + SET @TransactionId = datediff_big(millisecond, '0001-01-01', sysUTCdatetime()) * 80000 + @SequenceRangeFirstValue; + INSERT INTO dbo.Transactions (SurrogateIdRangeFirstValue, SurrogateIdRangeLastValue, HeartbeatDate) + SELECT @TransactionId, + @TransactionId + @Count - 1, + isnull(@HeartbeatDate, getUTCdate()); +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + IF @@trancount > 0 + ROLLBACK; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error'; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.MergeResourcesCommitTransaction +@TransactionId BIGINT, @FailureReason VARCHAR (MAX)=NULL, @OverrideIsControlledByClientCheck BIT=0 +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'MergeResourcesCommitTransaction', @st AS DATETIME = getUTCdate(), @InitialTranCount AS INT = @@trancount, @IsCompletedBefore AS BIT, @Rows AS INT, @msg AS VARCHAR (1000); +DECLARE @Mode AS VARCHAR (200) = 'TR=' + CONVERT (VARCHAR, @TransactionId) + ' OC=' + isnull(CONVERT (VARCHAR, @OverrideIsControlledByClientCheck), 'NULL'); +BEGIN TRY + IF @InitialTranCount = 0 + BEGIN TRANSACTION; + UPDATE dbo.Transactions + SET IsCompleted = 1, + @IsCompletedBefore = IsCompleted, + EndDate = getUTCdate(), + IsSuccess = CASE WHEN @FailureReason IS NULL THEN 1 ELSE 0 END, + FailureReason = @FailureReason + WHERE SurrogateIdRangeFirstValue = @TransactionId + AND (IsControlledByClient = 1 + OR @OverrideIsControlledByClientCheck = 1); + SET @Rows = @@rowcount; + IF @Rows = 0 + BEGIN + SET @msg = 'Transaction [' + CONVERT (VARCHAR (20), @TransactionId) + '] is not controlled by client or does not exist.'; + RAISERROR (@msg, 18, 127); + END + IF @IsCompletedBefore = 1 + BEGIN + IF @InitialTranCount = 0 + ROLLBACK; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @Rows, @Target = '@IsCompletedBefore', @Text = '=1'; + RETURN; + END + IF @InitialTranCount = 0 + COMMIT TRANSACTION; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @Rows; +END TRY +BEGIN CATCH + IF @InitialTranCount = 0 + AND @@trancount > 0 + ROLLBACK; + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error'; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.MergeResourcesDeleteInvisibleHistory +@TransactionId BIGINT, @AffectedRows INT=NULL OUTPUT +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = object_name(@@procid), @Mode AS VARCHAR (100) = 'T=' + CONVERT (VARCHAR, @TransactionId), @st AS DATETIME = getUTCdate(), @TypeId AS SMALLINT; +SET @AffectedRows = 0; +BEGIN TRY + DECLARE @Types TABLE ( + TypeId SMALLINT PRIMARY KEY, + Name VARCHAR (100)); + INSERT INTO @Types + EXECUTE dbo.GetUsedResourceTypes ; + WHILE EXISTS (SELECT * + FROM @Types) + BEGIN + SET @TypeId = (SELECT TOP 1 TypeId + FROM @Types + ORDER BY TypeId); + DELETE dbo.Resource + WHERE ResourceTypeId = @TypeId + AND HistoryTransactionId = @TransactionId + AND RawResource = 0xF; + SET @AffectedRows += @@rowcount; + DELETE @Types + WHERE TypeId = @TypeId; + END + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @AffectedRows; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error'; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.MergeResourcesGetTimeoutTransactions +@TimeoutSec INT +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = object_name(@@procid), @Mode AS VARCHAR (100) = 'T=' + CONVERT (VARCHAR, @TimeoutSec), @st AS DATETIME = getUTCdate(), @MinTransactionId AS BIGINT; +BEGIN TRY + EXECUTE dbo.MergeResourcesGetTransactionVisibility @MinTransactionId OUTPUT; + SELECT SurrogateIdRangeFirstValue + FROM dbo.Transactions + WHERE SurrogateIdRangeFirstValue > @MinTransactionId + AND IsCompleted = 0 + AND datediff(second, HeartbeatDate, getUTCdate()) > @TimeoutSec + ORDER BY SurrogateIdRangeFirstValue; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @@rowcount; +END TRY +BEGIN CATCH + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error'; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.MergeResourcesGetTransactionVisibility +@TransactionId BIGINT OUTPUT +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = object_name(@@procid), @Mode AS VARCHAR (100) = '', @st AS DATETIME = getUTCdate(); +SET @TransactionId = isnull((SELECT TOP 1 SurrogateIdRangeFirstValue + FROM dbo.Transactions + WHERE IsVisible = 1 + ORDER BY SurrogateIdRangeFirstValue DESC), -1); +EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @@rowcount, @Text = @TransactionId; + +GO +CREATE PROCEDURE dbo.MergeResourcesPutTransactionHeartbeat +@TransactionId BIGINT +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'MergeResourcesPutTransactionHeartbeat', @Mode AS VARCHAR (100) = 'TR=' + CONVERT (VARCHAR, @TransactionId); +BEGIN TRY + UPDATE dbo.Transactions + SET HeartbeatDate = getUTCdate() + WHERE SurrogateIdRangeFirstValue = @TransactionId + AND IsControlledByClient = 1; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error'; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.MergeResourcesPutTransactionInvisibleHistory +@TransactionId BIGINT +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = object_name(@@procid), @Mode AS VARCHAR (100) = 'TR=' + CONVERT (VARCHAR, @TransactionId), @st AS DATETIME = getUTCdate(); +BEGIN TRY + UPDATE dbo.Transactions + SET InvisibleHistoryRemovedDate = getUTCdate() + WHERE SurrogateIdRangeFirstValue = @TransactionId + AND InvisibleHistoryRemovedDate IS NULL; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @@rowcount; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error'; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.PutJobCancelation +@QueueType TINYINT, @GroupId BIGINT=NULL, @JobId BIGINT=NULL +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'PutJobCancelation', @Mode AS VARCHAR (100) = 'Q=' + isnull(CONVERT (VARCHAR, @QueueType), 'NULL') + ' G=' + isnull(CONVERT (VARCHAR, @GroupId), 'NULL') + ' J=' + isnull(CONVERT (VARCHAR, @JobId), 'NULL'), @st AS DATETIME = getUTCdate(), @Rows AS INT, @PartitionId AS TINYINT = @JobId % 16; +BEGIN TRY + IF @JobId IS NULL + AND @GroupId IS NULL + RAISERROR ('@JobId = NULL and @GroupId = NULL', 18, 127); + IF @JobId IS NOT NULL + BEGIN + UPDATE dbo.JobQueue + SET Status = 4, + EndDate = getUTCdate(), + Version = datediff_big(millisecond, '0001-01-01', getUTCdate()) + WHERE QueueType = @QueueType + AND PartitionId = @PartitionId + AND JobId = @JobId + AND Status = 0; + SET @Rows = @@rowcount; + IF @Rows = 0 + BEGIN + UPDATE dbo.JobQueue + SET CancelRequested = 1 + WHERE QueueType = @QueueType + AND PartitionId = @PartitionId + AND JobId = @JobId + AND Status = 1; + SET @Rows = @@rowcount; + END + END + ELSE + BEGIN + UPDATE dbo.JobQueue + SET Status = 4, + EndDate = getUTCdate(), + Version = datediff_big(millisecond, '0001-01-01', getUTCdate()) + WHERE QueueType = @QueueType + AND GroupId = @GroupId + AND Status = 0; + SET @Rows = @@rowcount; + UPDATE dbo.JobQueue + SET CancelRequested = 1 + WHERE QueueType = @QueueType + AND GroupId = @GroupId + AND Status = 1; + SET @Rows += @@rowcount; + END + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @Rows; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error'; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.PutJobHeartbeat +@QueueType TINYINT, @JobId BIGINT, @Version BIGINT, @Data BIGINT=NULL, @CancelRequested BIT=0 OUTPUT +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'PutJobHeartbeat', @Mode AS VARCHAR (100), @st AS DATETIME = getUTCdate(), @Rows AS INT = 0, @PartitionId AS TINYINT = @JobId % 16; +SET @Mode = 'Q=' + CONVERT (VARCHAR, @QueueType) + ' J=' + CONVERT (VARCHAR, @JobId) + ' P=' + CONVERT (VARCHAR, @PartitionId) + ' V=' + CONVERT (VARCHAR, @Version) + ' D=' + isnull(CONVERT (VARCHAR, @Data), 'NULL'); +BEGIN TRY + UPDATE dbo.JobQueue + SET @CancelRequested = CancelRequested, + HeartbeatDate = getUTCdate() + WHERE QueueType = @QueueType + AND PartitionId = @PartitionId + AND JobId = @JobId + AND Status = 1 + AND Version = @Version; + SET @Rows = @@rowcount; + IF @Rows = 0 + AND NOT EXISTS (SELECT * + FROM dbo.JobQueue + WHERE QueueType = @QueueType + AND PartitionId = @PartitionId + AND JobId = @JobId + AND Version = @Version + AND Status IN (2, 3, 4)) + BEGIN + IF EXISTS (SELECT * + FROM dbo.JobQueue + WHERE QueueType = @QueueType + AND PartitionId = @PartitionId + AND JobId = @JobId) + THROW 50412, 'Precondition failed', 1; + ELSE + THROW 50404, 'Job record not found', 1; + END + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @Rows; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error'; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.PutJobStatus +@QueueType TINYINT, @JobId BIGINT, @Version BIGINT, @Failed BIT, @Data BIGINT, @FinalResult VARCHAR (MAX), @RequestCancellationOnFailure BIT +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'PutJobStatus', @Mode AS VARCHAR (100), @st AS DATETIME = getUTCdate(), @Rows AS INT = 0, @PartitionId AS TINYINT = @JobId % 16, @GroupId AS BIGINT; +SET @Mode = 'Q=' + CONVERT (VARCHAR, @QueueType) + ' J=' + CONVERT (VARCHAR, @JobId) + ' P=' + CONVERT (VARCHAR, @PartitionId) + ' V=' + CONVERT (VARCHAR, @Version) + ' F=' + CONVERT (VARCHAR, @Failed) + ' R=' + isnull(@FinalResult, 'NULL'); +BEGIN TRY + UPDATE dbo.JobQueue + SET EndDate = getUTCdate(), + Status = CASE WHEN @Failed = 1 THEN 3 WHEN CancelRequested = 1 THEN 4 ELSE 2 END, + Data = @Data, + Result = @FinalResult, + @GroupId = GroupId + WHERE QueueType = @QueueType + AND PartitionId = @PartitionId + AND JobId = @JobId + AND Status = 1 + AND Version = @Version; + SET @Rows = @@rowcount; + IF @Rows = 0 + BEGIN + SET @GroupId = (SELECT GroupId + FROM dbo.JobQueue + WHERE QueueType = @QueueType + AND PartitionId = @PartitionId + AND JobId = @JobId + AND Version = @Version + AND Status IN (2, 3, 4)); + IF @GroupId IS NULL + IF EXISTS (SELECT * + FROM dbo.JobQueue + WHERE QueueType = @QueueType + AND PartitionId = @PartitionId + AND JobId = @JobId) + THROW 50412, 'Precondition failed', 1; + ELSE + THROW 50404, 'Job record not found', 1; + END + IF @Failed = 1 + AND @RequestCancellationOnFailure = 1 + EXECUTE dbo.PutJobCancelation @QueueType = @QueueType, @GroupId = @GroupId; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @Rows; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error'; + THROW; +END CATCH + +GO +CREATE OR ALTER PROCEDURE dbo.RemovePartitionFromResourceChanges_2 +@partitionNumberToSwitchOut INT, @partitionBoundaryToMerge DATETIME2 (7) +AS +BEGIN + TRUNCATE TABLE dbo.ResourceChangeDataStaging; + ALTER TABLE dbo.ResourceChangeData SWITCH PARTITION @partitionNumberToSwitchOut TO dbo.ResourceChangeDataStaging; + ALTER PARTITION FUNCTION PartitionFunction_ResourceChangeData_Timestamp( ) + MERGE RANGE (@partitionBoundaryToMerge); + TRUNCATE TABLE dbo.ResourceChangeDataStaging; +END + +GO +CREATE PROCEDURE dbo.SwitchPartitionsIn +@Tbl VARCHAR (100) +WITH EXECUTE AS 'dbo' +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'SwitchPartitionsIn', @Mode AS VARCHAR (200) = 'Tbl=' + isnull(@Tbl, 'NULL'), @st AS DATETIME = getUTCdate(), @ResourceTypeId AS SMALLINT, @Rows AS BIGINT, @Txt AS VARCHAR (1000), @TblInt AS VARCHAR (100), @Ind AS VARCHAR (200), @IndId AS INT, @DataComp AS VARCHAR (100); +DECLARE @Indexes TABLE ( + IndId INT PRIMARY KEY, + name VARCHAR (200)); +DECLARE @ResourceTypes TABLE ( + ResourceTypeId SMALLINT PRIMARY KEY); +BEGIN TRY + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Start'; + IF @Tbl IS NULL + RAISERROR ('@Tbl IS NULL', 18, 127); + INSERT INTO @Indexes + SELECT index_id, + name + FROM sys.indexes + WHERE object_id = object_id(@Tbl) + AND is_disabled = 1; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = '@Indexes', @Action = 'Insert', @Rows = @@rowcount; + WHILE EXISTS (SELECT * + FROM @Indexes) + BEGIN + SELECT TOP 1 @IndId = IndId, + @Ind = name + FROM @Indexes + ORDER BY IndId; + SET @DataComp = CASE WHEN (SELECT PropertyValue + FROM dbo.IndexProperties + WHERE TableName = @Tbl + AND IndexName = @Ind) = 'PAGE' THEN ' PARTITION = ALL WITH (DATA_COMPRESSION = PAGE)' ELSE '' END; + SET @Txt = 'IF EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id(''' + @Tbl + ''') AND name = ''' + @Ind + ''' AND is_disabled = 1) ALTER INDEX ' + @Ind + ' ON dbo.' + @Tbl + ' REBUILD' + @DataComp; + EXECUTE (@Txt); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = @Ind, @Action = 'Rebuild', @Text = @Txt; + DELETE @Indexes + WHERE IndId = @IndId; + END + INSERT INTO @ResourceTypes + SELECT CONVERT (SMALLINT, substring(name, charindex('_', name) + 1, 6)) AS ResourceTypeId + FROM sys.objects AS O + WHERE name LIKE @Tbl + '[_]%' + AND EXISTS (SELECT * + FROM sysindexes + WHERE id = O.object_id + AND indid IN (0, 1) + AND rows > 0); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = '#ResourceTypes', @Action = 'Select Into', @Rows = @@rowcount; + WHILE EXISTS (SELECT * + FROM @ResourceTypes) + BEGIN + SET @ResourceTypeId = (SELECT TOP 1 ResourceTypeId + FROM @ResourceTypes); + SET @TblInt = @Tbl + '_' + CONVERT (VARCHAR, @ResourceTypeId); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = @TblInt; + SET @Txt = 'ALTER TABLE dbo.' + @TblInt + ' SWITCH TO dbo.' + @Tbl + ' PARTITION $partition.PartitionFunction_ResourceTypeId(' + CONVERT (VARCHAR, @ResourceTypeId) + ')'; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = @Tbl, @Action = 'Switch in start', @Text = @Txt; + EXECUTE (@Txt); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = @Tbl, @Action = 'Switch in', @Text = @Txt; + IF EXISTS (SELECT * + FROM sysindexes + WHERE id = object_id(@TblInt) + AND rows > 0) + BEGIN + SET @Txt = @TblInt + ' is not empty after switch'; + RAISERROR (@Txt, 18, 127); + END + EXECUTE ('DROP TABLE dbo.' + @TblInt); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = @TblInt, @Action = 'Drop'; + DELETE @ResourceTypes + WHERE ResourceTypeId = @ResourceTypeId; + END + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error', @Start = @st; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.SwitchPartitionsInAllTables +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'SwitchPartitionsInAllTables', @Mode AS VARCHAR (200) = 'PS=PartitionScheme_ResourceTypeId', @st AS DATETIME = getUTCdate(), @Tbl AS VARCHAR (100); +BEGIN TRY + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Start'; + DECLARE @Tables TABLE ( + name VARCHAR (100) PRIMARY KEY, + supported BIT ); + INSERT INTO @Tables + EXECUTE dbo.GetPartitionedTables @IncludeNotDisabled = 1, @IncludeNotSupported = 0; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = '@Tables', @Action = 'Insert', @Rows = @@rowcount; + WHILE EXISTS (SELECT * + FROM @Tables) + BEGIN + SET @Tbl = (SELECT TOP 1 name + FROM @Tables + ORDER BY name); + EXECUTE dbo.SwitchPartitionsIn @Tbl = @Tbl; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = 'SwitchPartitionsIn', @Action = 'Execute', @Text = @Tbl; + DELETE @Tables + WHERE name = @Tbl; + END + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error', @Start = @st; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.SwitchPartitionsOut +@Tbl VARCHAR (100), @RebuildClustered BIT +WITH EXECUTE AS 'dbo' +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'SwitchPartitionsOut', @Mode AS VARCHAR (200) = 'Tbl=' + isnull(@Tbl, 'NULL') + ' ND=' + isnull(CONVERT (VARCHAR, @RebuildClustered), 'NULL'), @st AS DATETIME = getUTCdate(), @ResourceTypeId AS SMALLINT, @Rows AS BIGINT, @Txt AS VARCHAR (MAX), @TblInt AS VARCHAR (100), @IndId AS INT, @Ind AS VARCHAR (200), @Name AS VARCHAR (100), @checkName AS VARCHAR (200), @definition AS VARCHAR (200); +DECLARE @Indexes TABLE ( + IndId INT PRIMARY KEY, + name VARCHAR (200), + IsDisabled BIT ); +DECLARE @IndexesRT TABLE ( + IndId INT PRIMARY KEY, + name VARCHAR (200), + IsDisabled BIT ); +DECLARE @ResourceTypes TABLE ( + ResourceTypeId SMALLINT PRIMARY KEY, + partition_number_roundtrip INT , + partition_number INT , + row_count BIGINT ); +DECLARE @Names TABLE ( + name VARCHAR (100) PRIMARY KEY); +DECLARE @CheckConstraints TABLE ( + CheckName VARCHAR (200), + CheckDefinition VARCHAR (200)); +BEGIN TRY + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Start'; + IF @Tbl IS NULL + RAISERROR ('@Tbl IS NULL', 18, 127); + IF @RebuildClustered IS NULL + RAISERROR ('@RebuildClustered IS NULL', 18, 127); + INSERT INTO @Indexes + SELECT index_id, + name, + is_disabled + FROM sys.indexes + WHERE object_id = object_id(@Tbl) + AND (is_disabled = 0 + OR @RebuildClustered = 1); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = '@Indexes', @Action = 'Insert', @Rows = @@rowcount; + INSERT INTO @ResourceTypes + SELECT partition_number - 1 AS ResourceTypeId, + $PARTITION.PartitionFunction_ResourceTypeId (partition_number - 1) AS partition_number_roundtrip, + partition_number, + row_count + FROM sys.dm_db_partition_stats + WHERE object_id = object_id(@Tbl) + AND index_id = 1 + AND row_count > 0; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = '@ResourceTypes', @Action = 'Insert', @Rows = @@rowcount, @Text = 'For partition switch'; + IF EXISTS (SELECT * + FROM @ResourceTypes + WHERE partition_number_roundtrip <> partition_number) + RAISERROR ('Partition sanity check failed', 18, 127); + WHILE EXISTS (SELECT * + FROM @ResourceTypes) + BEGIN + SELECT TOP 1 @ResourceTypeId = ResourceTypeId, + @Rows = row_count + FROM @ResourceTypes + ORDER BY ResourceTypeId; + SET @TblInt = @Tbl + '_' + CONVERT (VARCHAR, @ResourceTypeId); + SET @Txt = 'Starting @ResourceTypeId=' + CONVERT (VARCHAR, @ResourceTypeId) + ' row_count=' + CONVERT (VARCHAR, @Rows); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Text = @Txt; + IF NOT EXISTS (SELECT * + FROM sysindexes + WHERE id = object_id(@TblInt) + AND rows > 0) + BEGIN + IF object_id(@TblInt) IS NOT NULL + BEGIN + EXECUTE ('DROP TABLE dbo.' + @TblInt); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = @TblInt, @Action = 'Drop'; + END + EXECUTE ('SELECT * INTO dbo.' + @TblInt + ' FROM dbo.' + @Tbl + ' WHERE 1 = 2'); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = @TblInt, @Action = 'Select Into', @Rows = @@rowcount; + DELETE @CheckConstraints; + INSERT INTO @CheckConstraints + SELECT name, + definition + FROM sys.check_constraints + WHERE parent_object_id = object_id(@Tbl); + WHILE EXISTS (SELECT * + FROM @CheckConstraints) + BEGIN + SELECT TOP 1 @checkName = CheckName, + @definition = CheckDefinition + FROM @CheckConstraints; + SET @Txt = 'ALTER TABLE ' + @TblInt + ' ADD CHECK ' + @definition; + EXECUTE (@Txt); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = @TblInt, @Action = 'ALTER', @Text = @Txt; + DELETE @CheckConstraints + WHERE CheckName = @checkName; + END + DELETE @Names; + INSERT INTO @Names + SELECT name + FROM sys.columns + WHERE object_id = object_id(@Tbl) + AND is_sparse = 1; + WHILE EXISTS (SELECT * + FROM @Names) + BEGIN + SET @Name = (SELECT TOP 1 name + FROM @Names + ORDER BY name); + SET @Txt = (SELECT 'ALTER TABLE dbo.' + @TblInt + ' ALTER COLUMN ' + @Name + ' ' + T.name + '(' + CONVERT (VARCHAR, C.precision) + ',' + CONVERT (VARCHAR, C.scale) + ') SPARSE NULL' + FROM sys.types AS T + INNER JOIN + sys.columns AS C + ON C.system_type_id = T.system_type_id + WHERE C.object_id = object_id(@Tbl) + AND C.name = @Name); + EXECUTE (@Txt); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = @TblInt, @Action = 'ALTER', @Text = @Txt; + DELETE @Names + WHERE name = @Name; + END + END + INSERT INTO @IndexesRT + SELECT * + FROM @Indexes + WHERE IsDisabled = 0; + WHILE EXISTS (SELECT * + FROM @IndexesRT) + BEGIN + SELECT TOP 1 @IndId = IndId, + @Ind = name + FROM @IndexesRT + ORDER BY IndId; + IF NOT EXISTS (SELECT * + FROM sys.indexes + WHERE object_id = object_id(@TblInt) + AND name = @Ind) + BEGIN + EXECUTE dbo.GetIndexCommands @Tbl = @Tbl, @Ind = @Ind, @AddPartClause = 0, @IncludeClustered = 1, @Txt = @Txt OUTPUT; + SET @Txt = replace(@Txt, '[' + @Tbl + ']', @TblInt); + EXECUTE (@Txt); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = @TblInt, @Action = 'Create Index', @Text = @Txt; + END + DELETE @IndexesRT + WHERE IndId = @IndId; + END + SET @Txt = 'ALTER TABLE dbo.' + @TblInt + ' ADD CHECK (ResourceTypeId >= ' + CONVERT (VARCHAR, @ResourceTypeId) + ' AND ResourceTypeId < ' + CONVERT (VARCHAR, @ResourceTypeId) + ' + 1)'; + EXECUTE (@Txt); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = @Tbl, @Action = 'Add check', @Text = @Txt; + SET @Txt = 'ALTER TABLE dbo.' + @Tbl + ' SWITCH PARTITION $partition.PartitionFunction_ResourceTypeId(' + CONVERT (VARCHAR, @ResourceTypeId) + ') TO dbo.' + @TblInt; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = @Tbl, @Action = 'Switch out start', @Text = @Txt; + EXECUTE (@Txt); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = @Tbl, @Action = 'Switch out end', @Text = @Txt; + DELETE @ResourceTypes + WHERE ResourceTypeId = @ResourceTypeId; + END + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error', @Start = @st; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.SwitchPartitionsOutAllTables +@RebuildClustered BIT +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'SwitchPartitionsOutAllTables', @Mode AS VARCHAR (200) = 'PS=PartitionScheme_ResourceTypeId ND=' + isnull(CONVERT (VARCHAR, @RebuildClustered), 'NULL'), @st AS DATETIME = getUTCdate(), @Tbl AS VARCHAR (100); +BEGIN TRY + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Start'; + DECLARE @Tables TABLE ( + name VARCHAR (100) PRIMARY KEY, + supported BIT ); + INSERT INTO @Tables + EXECUTE dbo.GetPartitionedTables @IncludeNotDisabled = @RebuildClustered, @IncludeNotSupported = 0; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = '@Tables', @Action = 'Insert', @Rows = @@rowcount; + WHILE EXISTS (SELECT * + FROM @Tables) + BEGIN + SET @Tbl = (SELECT TOP 1 name + FROM @Tables + ORDER BY name); + EXECUTE dbo.SwitchPartitionsOut @Tbl = @Tbl, @RebuildClustered = @RebuildClustered; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = 'SwitchPartitionsOut', @Action = 'Execute', @Text = @Tbl; + DELETE @Tables + WHERE name = @Tbl; + END + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error', @Start = @st; + THROW; +END CATCH + +GO +GO +CREATE OR ALTER PROCEDURE dbo.UpdateEventAgentCheckpoint +@CheckpointId VARCHAR (64), @LastProcessedDateTime DATETIMEOFFSET (7)=NULL, @LastProcessedIdentifier VARCHAR (64)=NULL +AS +BEGIN + IF EXISTS (SELECT * + FROM dbo.EventAgentCheckpoint + WHERE CheckpointId = @CheckpointId) + UPDATE dbo.EventAgentCheckpoint + SET CheckpointId = @CheckpointId, + LastProcessedDateTime = @LastProcessedDateTime, + LastProcessedIdentifier = @LastProcessedIdentifier, + UpdatedOn = sysutcdatetime() + WHERE CheckpointId = @CheckpointId; + ELSE + INSERT INTO dbo.EventAgentCheckpoint (CheckpointId, LastProcessedDateTime, LastProcessedIdentifier, UpdatedOn) + VALUES (@CheckpointId, @LastProcessedDateTime, @LastProcessedIdentifier, sysutcdatetime()); +END + +GO +CREATE PROCEDURE dbo.UpdateReindexJob +@id VARCHAR (64), @status VARCHAR (10), @rawJobRecord VARCHAR (MAX), @jobVersion BINARY (8) +AS +SET NOCOUNT ON; +SET XACT_ABORT ON; +BEGIN TRANSACTION; +DECLARE @currentJobVersion AS BINARY (8); +SELECT @currentJobVersion = JobVersion +FROM dbo.ReindexJob WITH (UPDLOCK, HOLDLOCK) +WHERE Id = @id; +IF (@currentJobVersion IS NULL) + BEGIN + THROW 50404, 'Reindex job record not found', 1; + END +IF (@jobVersion <> @currentJobVersion) + BEGIN + THROW 50412, 'Precondition failed', 1; + END +DECLARE @heartbeatDateTime AS DATETIME2 (7) = SYSUTCDATETIME(); +UPDATE dbo.ReindexJob +SET Status = @status, + HeartbeatDateTime = @heartbeatDateTime, + RawJobRecord = @rawJobRecord +WHERE Id = @id; +SELECT @@DBTS; +COMMIT TRANSACTION; + +GO +CREATE PROCEDURE dbo.UpdateResourceSearchParams +@FailedResources INT=0 OUTPUT, @Resources dbo.ResourceList READONLY, @ResourceWriteClaims dbo.ResourceWriteClaimList READONLY, @ReferenceSearchParams dbo.ReferenceSearchParamList READONLY, @TokenSearchParams dbo.TokenSearchParamList READONLY, @TokenTexts dbo.TokenTextList READONLY, @StringSearchParams dbo.StringSearchParamList READONLY, @UriSearchParams dbo.UriSearchParamList READONLY, @NumberSearchParams dbo.NumberSearchParamList READONLY, @QuantitySearchParams dbo.QuantitySearchParamList READONLY, @DateTimeSearchParams dbo.DateTimeSearchParamList READONLY, @ReferenceTokenCompositeSearchParams dbo.ReferenceTokenCompositeSearchParamList READONLY, @TokenTokenCompositeSearchParams dbo.TokenTokenCompositeSearchParamList READONLY, @TokenDateTimeCompositeSearchParams dbo.TokenDateTimeCompositeSearchParamList READONLY, @TokenQuantityCompositeSearchParams dbo.TokenQuantityCompositeSearchParamList READONLY, @TokenStringCompositeSearchParams dbo.TokenStringCompositeSearchParamList READONLY, @TokenNumberNumberCompositeSearchParams dbo.TokenNumberNumberCompositeSearchParamList READONLY +AS +SET NOCOUNT ON; +DECLARE @st AS DATETIME = getUTCdate(), @SP AS VARCHAR (100) = object_name(@@procid), @Mode AS VARCHAR (200) = isnull((SELECT 'RT=[' + CONVERT (VARCHAR, min(ResourceTypeId)) + ',' + CONVERT (VARCHAR, max(ResourceTypeId)) + '] Sur=[' + CONVERT (VARCHAR, min(ResourceSurrogateId)) + ',' + CONVERT (VARCHAR, max(ResourceSurrogateId)) + '] V=' + CONVERT (VARCHAR, max(Version)) + ' Rows=' + CONVERT (VARCHAR, count(*)) + FROM @Resources), 'Input=Empty'), @Rows AS INT; +BEGIN TRY + DECLARE @Ids TABLE ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL); + BEGIN TRANSACTION; + UPDATE B + SET SearchParamHash = A.SearchParamHash + OUTPUT deleted.ResourceTypeId, deleted.ResourceSurrogateId INTO @Ids + FROM @Resources AS A + INNER JOIN + dbo.Resource AS B + ON B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId + WHERE B.IsHistory = 0; + SET @Rows = @@rowcount; + DELETE B + FROM @Ids AS A + INNER JOIN + dbo.ResourceWriteClaim AS B + ON B.ResourceSurrogateId = A.ResourceSurrogateId; + DELETE B + FROM @Ids AS A + INNER JOIN + dbo.ReferenceSearchParam AS B + ON B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId; + DELETE B + FROM @Ids AS A + INNER JOIN + dbo.TokenSearchParam AS B + ON B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId; + DELETE B + FROM @Ids AS A + INNER JOIN + dbo.TokenText AS B + ON B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId; + DELETE B + FROM @Ids AS A + INNER JOIN + dbo.StringSearchParam AS B + ON B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId; + DELETE B + FROM @Ids AS A + INNER JOIN + dbo.UriSearchParam AS B + ON B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId; + DELETE B + FROM @Ids AS A + INNER JOIN + dbo.NumberSearchParam AS B + ON B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId; + DELETE B + FROM @Ids AS A + INNER JOIN + dbo.QuantitySearchParam AS B + ON B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId; + DELETE B + FROM @Ids AS A + INNER JOIN + dbo.DateTimeSearchParam AS B + ON B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId; + DELETE B + FROM @Ids AS A + INNER JOIN + dbo.ReferenceTokenCompositeSearchParam AS B + ON B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId; + DELETE B + FROM @Ids AS A + INNER JOIN + dbo.TokenTokenCompositeSearchParam AS B + ON B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId; + DELETE B + FROM @Ids AS A + INNER JOIN + dbo.TokenDateTimeCompositeSearchParam AS B + ON B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId; + DELETE B + FROM @Ids AS A + INNER JOIN + dbo.TokenQuantityCompositeSearchParam AS B + ON B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId; + DELETE B + FROM @Ids AS A + INNER JOIN + dbo.TokenStringCompositeSearchParam AS B + ON B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId; + DELETE B + FROM @Ids AS A + INNER JOIN + dbo.TokenNumberNumberCompositeSearchParam AS B + ON B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId; + INSERT INTO dbo.ResourceWriteClaim (ResourceSurrogateId, ClaimTypeId, ClaimValue) + SELECT ResourceSurrogateId, + ClaimTypeId, + ClaimValue + FROM @ResourceWriteClaims; + INSERT INTO dbo.ReferenceSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, BaseUri, ReferenceResourceTypeId, ReferenceResourceId, ReferenceResourceVersion) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + BaseUri, + ReferenceResourceTypeId, + ReferenceResourceId, + ReferenceResourceVersion + FROM @ReferenceSearchParams; + INSERT INTO dbo.TokenSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId, Code, CodeOverflow) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + SystemId, + Code, + CodeOverflow + FROM @TokenSearchParams; + INSERT INTO dbo.TokenText (ResourceTypeId, ResourceSurrogateId, SearchParamId, Text) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + Text + FROM @TokenTexts; + INSERT INTO dbo.StringSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, Text, TextOverflow, IsMin, IsMax) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + Text, + TextOverflow, + IsMin, + IsMax + FROM @StringSearchParams; + INSERT INTO dbo.UriSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, Uri) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + Uri + FROM @UriSearchParams; + INSERT INTO dbo.NumberSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, SingleValue, LowValue, HighValue) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + SingleValue, + LowValue, + HighValue + FROM @NumberSearchParams; + INSERT INTO dbo.QuantitySearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId, QuantityCodeId, SingleValue, LowValue, HighValue) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + SystemId, + QuantityCodeId, + SingleValue, + LowValue, + HighValue + FROM @QuantitySearchParams; + INSERT INTO dbo.DateTimeSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, StartDateTime, EndDateTime, IsLongerThanADay, IsMin, IsMax) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + StartDateTime, + EndDateTime, + IsLongerThanADay, + IsMin, + IsMax + FROM @DateTimeSearchParams; + INSERT INTO dbo.ReferenceTokenCompositeSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, BaseUri1, ReferenceResourceTypeId1, ReferenceResourceId1, ReferenceResourceVersion1, SystemId2, Code2, CodeOverflow2) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + BaseUri1, + ReferenceResourceTypeId1, + ReferenceResourceId1, + ReferenceResourceVersion1, + SystemId2, + Code2, + CodeOverflow2 + FROM @ReferenceTokenCompositeSearchParams; + INSERT INTO dbo.TokenTokenCompositeSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId1, Code1, CodeOverflow1, SystemId2, Code2, CodeOverflow2) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + SystemId1, + Code1, + CodeOverflow1, + SystemId2, + Code2, + CodeOverflow2 + FROM @TokenTokenCompositeSearchParams; + INSERT INTO dbo.TokenDateTimeCompositeSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId1, Code1, CodeOverflow1, StartDateTime2, EndDateTime2, IsLongerThanADay2) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + SystemId1, + Code1, + CodeOverflow1, + StartDateTime2, + EndDateTime2, + IsLongerThanADay2 + FROM @TokenDateTimeCompositeSearchParams; + INSERT INTO dbo.TokenQuantityCompositeSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId1, Code1, CodeOverflow1, SingleValue2, SystemId2, QuantityCodeId2, LowValue2, HighValue2) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + SystemId1, + Code1, + CodeOverflow1, + SingleValue2, + SystemId2, + QuantityCodeId2, + LowValue2, + HighValue2 + FROM @TokenQuantityCompositeSearchParams; + INSERT INTO dbo.TokenStringCompositeSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId1, Code1, CodeOverflow1, Text2, TextOverflow2) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + SystemId1, + Code1, + CodeOverflow1, + Text2, + TextOverflow2 + FROM @TokenStringCompositeSearchParams; + INSERT INTO dbo.TokenNumberNumberCompositeSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId1, Code1, CodeOverflow1, SingleValue2, LowValue2, HighValue2, SingleValue3, LowValue3, HighValue3, HasRange) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + SystemId1, + Code1, + CodeOverflow1, + SingleValue2, + LowValue2, + HighValue2, + SingleValue3, + LowValue3, + HighValue3, + HasRange + FROM @TokenNumberNumberCompositeSearchParams; + COMMIT TRANSACTION; + SET @FailedResources = (SELECT count(*) + FROM @Resources) - @Rows; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @Rows; +END TRY +BEGIN CATCH + IF @@trancount > 0 + ROLLBACK; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error', @Start = @st; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.UpsertSearchParams +@searchParams dbo.SearchParamTableType_2 READONLY +AS +SET NOCOUNT ON; +SET XACT_ABORT ON; +SET TRANSACTION ISOLATION LEVEL SERIALIZABLE; +BEGIN TRANSACTION; +DECLARE @lastUpdated AS DATETIMEOFFSET (7) = SYSDATETIMEOFFSET(); +DECLARE @summaryOfChanges TABLE ( + Uri VARCHAR (128) COLLATE Latin1_General_100_CS_AS NOT NULL, + Action VARCHAR (20) NOT NULL); +MERGE INTO dbo.SearchParam WITH (TABLOCKX) + AS target +USING @searchParams AS source ON target.Uri = source.Uri +WHEN MATCHED THEN UPDATE +SET Status = source.Status, + LastUpdated = @lastUpdated, + IsPartiallySupported = source.IsPartiallySupported +WHEN NOT MATCHED BY TARGET THEN INSERT (Uri, Status, LastUpdated, IsPartiallySupported) VALUES (source.Uri, source.Status, @lastUpdated, source.IsPartiallySupported) +OUTPUT source.Uri, $ACTION INTO @summaryOfChanges; +SELECT SearchParamId, + SearchParam.Uri +FROM dbo.SearchParam AS searchParam + INNER JOIN + @summaryOfChanges AS upsertedSearchParam + ON searchParam.Uri = upsertedSearchParam.Uri +WHERE upsertedSearchParam.Action = 'INSERT'; +COMMIT TRANSACTION; + +GO diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/SchemaVersion.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/SchemaVersion.cs index 47b347521a..84330af27a 100644 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/SchemaVersion.cs +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/SchemaVersion.cs @@ -87,5 +87,6 @@ public enum SchemaVersion V75 = 75, V76 = 76, V77 = 77, + V78 = 78, } } diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/SchemaVersionConstants.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/SchemaVersionConstants.cs index 59ac1b73b1..f69471481b 100644 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/SchemaVersionConstants.cs +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/SchemaVersionConstants.cs @@ -8,7 +8,7 @@ namespace Microsoft.Health.Fhir.SqlServer.Features.Schema public static class SchemaVersionConstants { public const int Min = (int)SchemaVersion.V73; - public const int Max = (int)SchemaVersion.V77; + public const int Max = (int)SchemaVersion.V78; public const int MinForUpgrade = (int)SchemaVersion.V73; // this is used for upgrade tests only public const int SearchParameterStatusSchemaVersion = (int)SchemaVersion.V6; public const int SupportForReferencesWithMissingTypeVersion = (int)SchemaVersion.V7; diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/Sql/Scripts/TransactionCheckWithInitialiScript.sql b/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/Sql/Scripts/TransactionCheckWithInitialiScript.sql index 989e1eb5db..cc9eb2d8fa 100644 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/Sql/Scripts/TransactionCheckWithInitialiScript.sql +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/Sql/Scripts/TransactionCheckWithInitialiScript.sql @@ -19,6 +19,6 @@ Go INSERT INTO dbo.SchemaVersion VALUES - (77, 'started') + (78, 'started') Go diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/Sql/Sprocs/GetResourcesByTypeAndSurrogateIdRange.sql b/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/Sql/Sprocs/GetResourcesByTypeAndSurrogateIdRange.sql index b0b700004a..4b8115c946 100644 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/Sql/Sprocs/GetResourcesByTypeAndSurrogateIdRange.sql +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/Sql/Sprocs/GetResourcesByTypeAndSurrogateIdRange.sql @@ -1,7 +1,6 @@ --DROP PROCEDURE dbo.GetResourcesByTypeAndSurrogateIdRange GO --- @GlobalStartId left for backwards compatability for V67 -> v68. Can be removed in the future. -CREATE PROCEDURE dbo.GetResourcesByTypeAndSurrogateIdRange @ResourceTypeId smallint, @StartId bigint, @EndId bigint, @GlobalStartId bigint = NULL, @GlobalEndId bigint = NULL, @IncludeHistory bit = 0, @IncludeDeleted bit = 0 +CREATE PROCEDURE dbo.GetResourcesByTypeAndSurrogateIdRange @ResourceTypeId smallint, @StartId bigint, @EndId bigint, @GlobalEndId bigint = NULL, @IncludeHistory bit = 0, @IncludeDeleted bit = 0 AS set nocount on DECLARE @SP varchar(100) = 'GetResourcesByTypeAndSurrogateIdRange' diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/Sql/Sprocs/PutJobHeartbeat.sql b/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/Sql/Sprocs/PutJobHeartbeat.sql index 1a6d2016cc..cda33b261b 100644 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/Sql/Sprocs/PutJobHeartbeat.sql +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/Sql/Sprocs/PutJobHeartbeat.sql @@ -1,6 +1,7 @@ --DROP PROCEDURE dbo.PutJobHeartbeat +--TODO: Remove @Data from signature after deployment GO -CREATE PROCEDURE dbo.PutJobHeartbeat @QueueType tinyint, @JobId bigint, @Version bigint, @Data bigint = NULL, @CurrentResult varchar(max) = NULL, @CancelRequested bit = 0 OUTPUT +CREATE PROCEDURE dbo.PutJobHeartbeat @QueueType tinyint, @JobId bigint, @Version bigint, @Data bigint = NULL, @CancelRequested bit = 0 OUTPUT AS set nocount on DECLARE @SP varchar(100) = 'PutJobHeartbeat' @@ -12,28 +13,14 @@ DECLARE @SP varchar(100) = 'PutJobHeartbeat' SET @Mode = 'Q='+convert(varchar,@QueueType)+' J='+convert(varchar,@JobId)+' P='+convert(varchar,@PartitionId)+' V='+convert(varchar,@Version)+' D='+isnull(convert(varchar,@Data),'NULL') BEGIN TRY - IF @CurrentResult IS NULL - UPDATE dbo.JobQueue - SET @CancelRequested = CancelRequested - ,HeartbeatDate = getUTCdate() - ,Data = isnull(@Data,Data) - WHERE QueueType = @QueueType - AND PartitionId = @PartitionId - AND JobId = @JobId - AND Status = 1 - AND Version = @Version - ELSE - UPDATE dbo.JobQueue - SET @CancelRequested = CancelRequested - ,HeartbeatDate = getUTCdate() - ,Data = isnull(@Data,Data) - ,Result = @CurrentResult - WHERE QueueType = @QueueType - AND PartitionId = @PartitionId - AND JobId = @JobId - AND Status = 1 - AND Version = @Version - + UPDATE dbo.JobQueue + SET @CancelRequested = CancelRequested + ,HeartbeatDate = getUTCdate() + WHERE QueueType = @QueueType + AND PartitionId = @PartitionId + AND JobId = @JobId + AND Status = 1 + AND Version = @Version SET @Rows = @@rowcount IF @Rows = 0 AND NOT EXISTS (SELECT * FROM dbo.JobQueue WHERE QueueType = @QueueType AND PartitionId = @PartitionId AND JobId = @JobId AND Version = @Version AND Status IN (2,3,4)) diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/SqlQueueClient.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/SqlQueueClient.cs index 7983c105a7..810c227bc9 100644 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/SqlQueueClient.cs +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/SqlQueueClient.cs @@ -204,20 +204,6 @@ public async Task PutJobHeartbeatAsync(JobInfo jobInfo, CancellationToken cmd.Parameters.AddWithValue("@QueueType", jobInfo.QueueType); cmd.Parameters.AddWithValue("@JobId", jobInfo.Id); cmd.Parameters.AddWithValue("@Version", jobInfo.Version); - if (jobInfo.Data.HasValue) - { - cmd.Parameters.AddWithValue("@Data", jobInfo.Data.Value); - } - else - { - cmd.Parameters.AddWithValue("@Data", DBNull.Value); - } - - if (jobInfo.Result != null) - { - cmd.Parameters.AddWithValue("@CurrentResult", jobInfo.Result); - } - var cancelParam = new SqlParameter("@CancelRequested", SqlDbType.Bit) { Direction = ParameterDirection.Output }; cmd.Parameters.Add(cancelParam); await cmd.ExecuteNonQueryAsync(_sqlRetryService, _logger, cancellationToken); diff --git a/src/Microsoft.Health.Fhir.SqlServer/Microsoft.Health.Fhir.SqlServer.csproj b/src/Microsoft.Health.Fhir.SqlServer/Microsoft.Health.Fhir.SqlServer.csproj index 98472b1dce..3e7a1db1fc 100644 --- a/src/Microsoft.Health.Fhir.SqlServer/Microsoft.Health.Fhir.SqlServer.csproj +++ b/src/Microsoft.Health.Fhir.SqlServer/Microsoft.Health.Fhir.SqlServer.csproj @@ -1,7 +1,7 @@  - 77 + 78 Features\Schema\Migrations\$(LatestSchemaVersion).sql diff --git a/src/Microsoft.Health.TaskManagement/JobHosting.cs b/src/Microsoft.Health.TaskManagement/JobHosting.cs index ec15a3aba0..2c75adeed2 100644 --- a/src/Microsoft.Health.TaskManagement/JobHosting.cs +++ b/src/Microsoft.Health.TaskManagement/JobHosting.cs @@ -42,13 +42,7 @@ public JobHosting(IQueueClient queueClient, IJobFactory jobFactory, ILogger(); @@ -242,19 +236,6 @@ public static async Task ExecuteJobWithHeartbeatsAsync(IQueueClient queu } } - [Obsolete("Heartbeats should only update timestamp, results should only be written when job reaches terminal state.")] - public static async Task ExecuteJobWithHeavyHeartbeatsAsync(IQueueClient queueClient, JobInfo jobInfo, Func> action, TimeSpan heartbeatPeriod, CancellationTokenSource cancellationTokenSource) - { - EnsureArg.IsNotNull(queueClient, nameof(queueClient)); - EnsureArg.IsNotNull(jobInfo, nameof(jobInfo)); - EnsureArg.IsNotNull(action, nameof(action)); - - await using (new Timer(async _ => await PutJobHeartbeatAsync(queueClient, jobInfo, cancellationTokenSource), null, TimeSpan.FromSeconds(RandomNumberGenerator.GetInt32(100) / 100.0 * heartbeatPeriod.TotalSeconds), heartbeatPeriod)) - { - return await action(cancellationTokenSource); - } - } - private static async Task PutJobHeartbeatAsync(IQueueClient queueClient, JobInfo jobInfo, CancellationTokenSource cancellationTokenSource) { try // this try/catch is redundant with try/catch in queueClient.PutJobHeartbeatAsync, but it is extra guarantee diff --git a/test/Microsoft.Health.Fhir.Shared.Tests.Integration/Persistence/QueueClientTests.cs b/test/Microsoft.Health.Fhir.Shared.Tests.Integration/Persistence/QueueClientTests.cs index 217c9bf0c5..9609f433b6 100644 --- a/test/Microsoft.Health.Fhir.Shared.Tests.Integration/Persistence/QueueClientTests.cs +++ b/test/Microsoft.Health.Fhir.Shared.Tests.Integration/Persistence/QueueClientTests.cs @@ -135,67 +135,6 @@ public async Task GivenJobsEnqueue_WhenDequeue_ThenAllJobsShouldBeReturned() Assert.Contains("job2", definitions); } - [Fact] - [Obsolete("Unit test for obsolete method")] - public async Task GivenJobWithExpiredHeartbeat_WhenDequeue_ThenJobWithResultShouldBeReturned() - { - byte queueType = (byte)TestQueueType.GivenJobWithExpiredHeartbeat_WhenDequeue_ThenJobWithResultShouldBeReturned; - - await _queueClient.EnqueueAsync(queueType, new[] { "job1" }, null, false, false, CancellationToken.None); - - JobInfo jobInfo1 = await _queueClient.DequeueAsync(queueType, "test-worker", 1, CancellationToken.None); - jobInfo1.QueueType = queueType; - jobInfo1.Result = "current-result"; - await JobHosting.ExecuteJobWithHeavyHeartbeatsAsync( - _queueClient, - jobInfo1, - async cancelSource => - { - await Task.Delay(TimeSpan.FromSeconds(2)); - return jobInfo1.Result; - }, - TimeSpan.FromSeconds(0.1), - new CancellationTokenSource()); - await Task.Delay(TimeSpan.FromSeconds(1)); - JobInfo jobInfo2 = await _queueClient.DequeueAsync(queueType, "test-worker", 0, CancellationToken.None); - - Assert.Equal(jobInfo1.Result, jobInfo2?.Result); - } - - [Fact] - [Obsolete("Unit test for obsolete method")] - public async Task GivenRunningJobCancelled_WhenHeartbeat_ThenCancelRequestedShouldBeReturned() - { - byte queueType = (byte)TestQueueType.GivenRunningJobCancelled_WhenHeartbeat_ThenCancelRequestedShouldBeReturned; - - await _queueClient.EnqueueAsync(queueType, new[] { "job" }, null, false, false, CancellationToken.None); - - var job = await _queueClient.DequeueAsync(queueType, "test-worker", 10, CancellationToken.None); - job.QueueType = queueType; - await _queueClient.CancelJobByGroupIdAsync(queueType, job.GroupId, CancellationToken.None); - try - { - await JobHosting.ExecuteJobWithHeavyHeartbeatsAsync( - _queueClient, - job, - async cancelSource => - { - await Task.Delay(TimeSpan.FromSeconds(10), cancelSource.Token); - return job.Result; - }, - TimeSpan.FromSeconds(0.1), - new CancellationTokenSource()); - } - catch (TaskCanceledException) - { - // do nothing - } - - Assert.Equal(JobStatus.Running, job.Status); - job = await _queueClient.GetJobByIdAsync(queueType, job.Id, false, CancellationToken.None); - Assert.True(job.CancelRequested); - } - [Fact] public async Task GivenJobNotHeartbeat_WhenDequeue_ThenJobShouldBeReturnedAgain() { @@ -382,63 +321,5 @@ await this.RetryAsync( Assert.True(heartbeatChanges >= 1, $"Heartbeats recorded: ${heartbeatChanges}"); }); } - - [Fact(Skip = "Doesn't run within time limits. Bug: 103102")] - [Obsolete("Unit test for obsolete method")] - public async Task GivenAJob_WhenExecutedWithHeavyHeartbeats_ThenHeavyHeartbeatsAreRecorded() - { - await this.RetryAsync( - async () => - { - var queueType = (byte)TestQueueType.ExecuteWithHeartbeatsHeavy; - await _queueClient.EnqueueAsync(queueType, new[] { "job" }, null, false, false, CancellationToken.None); - JobInfo job = await _queueClient.DequeueAsync(queueType, "test-worker", 1, CancellationToken.None); - var cancel = new CancellationTokenSource(); - cancel.CancelAfter(TimeSpan.FromSeconds(30)); - var execTask = JobHosting.ExecuteJobWithHeavyHeartbeatsAsync( - _queueClient, - job, - async cancelSource => - { - await Task.Delay(TimeSpan.FromSeconds(5), cancelSource.Token); - job.Result = "Something"; - await Task.Delay(TimeSpan.FromSeconds(5)); - await _queueClient.CompleteJobAsync(job, false, cancelSource.Token); - return "Test"; - }, - TimeSpan.FromSeconds(1), - cancel); - - var currentJob = job; - var previousJob = job; - var heartbeatChanges = 0; - var heavyHeartbeatRecorded = false; - var dequeueTask = Task.Run( - async () => - { - while (currentJob.Status == JobStatus.Running) - { - await Task.Delay(TimeSpan.FromSeconds(1)); - currentJob = await _queueClient.GetJobByIdAsync(queueType, job.Id, true, cancel.Token); - - if (currentJob.Status == JobStatus.Running && currentJob.Result != null) - { - heavyHeartbeatRecorded = true; - } - - if (currentJob.HeartbeatDateTime != previousJob.HeartbeatDateTime) - { - heartbeatChanges++; - previousJob = currentJob; - } - } - }, - cancel.Token); - Task.WaitAll(execTask, dequeueTask); - - Assert.True(heartbeatChanges >= 1, $"Heartbeats recorded: ${heartbeatChanges}"); - Assert.True(heavyHeartbeatRecorded, $"Heavy heartbeat not recorded"); - }); - } } } From b8ba532748b7bc056b18d1a9220ae981bbddcdf1 Mon Sep 17 00:00:00 2001 From: SergeyGaluzo <95932081+SergeyGaluzo@users.noreply.github.com> Date: Thu, 18 Apr 2024 20:04:11 -0700 Subject: [PATCH 152/155] Added abilty to disable retries (#3819) --- .../Features/Storage/SqlQueueClient.cs | 2 +- .../Features/Storage/SqlRetry/ISqlRetryService.cs | 2 +- .../Features/Storage/SqlRetry/SqlCommandExtensions.cs | 4 ++-- .../Features/Storage/SqlRetry/SqlRetryService.cs | 5 +++-- 4 files changed, 7 insertions(+), 6 deletions(-) diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/SqlQueueClient.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/SqlQueueClient.cs index 810c227bc9..7fc752f9b1 100644 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/SqlQueueClient.cs +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/SqlQueueClient.cs @@ -206,7 +206,7 @@ public async Task PutJobHeartbeatAsync(JobInfo jobInfo, CancellationToken cmd.Parameters.AddWithValue("@Version", jobInfo.Version); var cancelParam = new SqlParameter("@CancelRequested", SqlDbType.Bit) { Direction = ParameterDirection.Output }; cmd.Parameters.Add(cancelParam); - await cmd.ExecuteNonQueryAsync(_sqlRetryService, _logger, cancellationToken); + await cmd.ExecuteNonQueryAsync(_sqlRetryService, _logger, cancellationToken, disableRetries: true); // this should be fire and forget cancel = (bool)cancelParam.Value; } catch (Exception ex) diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/SqlRetry/ISqlRetryService.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/SqlRetry/ISqlRetryService.cs index 8430bb3791..e2181dac90 100644 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/SqlRetry/ISqlRetryService.cs +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/SqlRetry/ISqlRetryService.cs @@ -18,7 +18,7 @@ public interface ISqlRetryService Task ExecuteSql(Func action, CancellationToken cancellationToken, bool isReadOnly = false); - Task ExecuteSql(SqlCommand sqlCommand, Func action, ILogger logger, string logMessage, CancellationToken cancellationToken, bool isReadOnly = false); + Task ExecuteSql(SqlCommand sqlCommand, Func action, ILogger logger, string logMessage, CancellationToken cancellationToken, bool isReadOnly = false, bool disableRetries = false); Task> ExecuteReaderAsync(SqlCommand sqlCommand, Func readerToResult, ILogger logger, string logMessage, CancellationToken cancellationToken, bool isReadOnly = false); } diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/SqlRetry/SqlCommandExtensions.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/SqlRetry/SqlCommandExtensions.cs index fb952405a4..d39a899066 100644 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/SqlRetry/SqlCommandExtensions.cs +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/SqlRetry/SqlCommandExtensions.cs @@ -14,9 +14,9 @@ namespace Microsoft.Health.Fhir.SqlServer.Features.Storage { public static class SqlCommandExtensions { - public static async Task ExecuteNonQueryAsync(this SqlCommand cmd, ISqlRetryService retryService, ILogger logger, CancellationToken cancellationToken, string logMessage = null, bool isReadOnly = false) + public static async Task ExecuteNonQueryAsync(this SqlCommand cmd, ISqlRetryService retryService, ILogger logger, CancellationToken cancellationToken, string logMessage = null, bool isReadOnly = false, bool disableRetries = false) { - await retryService.ExecuteSql(cmd, async (sql, cancel) => await sql.ExecuteNonQueryAsync(cancel), logger, logMessage, cancellationToken, isReadOnly); + await retryService.ExecuteSql(cmd, async (sql, cancel) => await sql.ExecuteNonQueryAsync(cancel), logger, logMessage, cancellationToken, isReadOnly, disableRetries); } public static async Task> ExecuteReaderAsync(this SqlCommand cmd, ISqlRetryService retryService, Func readerToResult, ILogger logger, CancellationToken cancellationToken, string logMessage = null, bool isReadOnly = false) diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/SqlRetry/SqlRetryService.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/SqlRetry/SqlRetryService.cs index 5957d3118d..514927982e 100644 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/SqlRetry/SqlRetryService.cs +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Storage/SqlRetry/SqlRetryService.cs @@ -248,9 +248,10 @@ public async Task ExecuteSql(FuncMessage to be logged on error. /// Cancellation token. /// "Flag indicating whether connection to read only replica can be used." + /// "Flag indicating whether retries are disabled." /// A task representing the asynchronous operation. /// When executing this method, if exception is thrown that is not retriable or if last retry fails, then same exception is thrown by this method. - public async Task ExecuteSql(SqlCommand sqlCommand, Func action, ILogger logger, string logMessage, CancellationToken cancellationToken, bool isReadOnly = false) + public async Task ExecuteSql(SqlCommand sqlCommand, Func action, ILogger logger, string logMessage, CancellationToken cancellationToken, bool isReadOnly = false, bool disableRetries = false) { EnsureArg.IsNotNull(sqlCommand, nameof(sqlCommand)); EnsureArg.IsNotNull(action, nameof(action)); @@ -283,7 +284,7 @@ public async Task ExecuteSql(SqlCommand sqlCommand, Func Date: Fri, 19 Apr 2024 17:47:18 -0700 Subject: [PATCH 153/155] Remove garbage stored procedure plus test (#3820) * Remove garbage stored procedure * data type conversion test * using --- .../Features/Schema/Migrations/79.diff.sql | 2 + .../Features/Schema/Migrations/79.sql | 5143 +++++++++++++++++ .../Features/Schema/SchemaVersion.cs | 1 + .../Features/Schema/SchemaVersionConstants.cs | 2 +- .../TransactionCheckWithInitialiScript.sql | 2 +- ...onCompletedJobCountOfSpecificQueueType.sql | 26 - .../Microsoft.Health.Fhir.SqlServer.csproj | 2 +- ...th.Fhir.Shared.Tests.Integration.projitems | 1 + .../SqlServerColumnTypeChangeTests.cs | 282 + 9 files changed, 5432 insertions(+), 29 deletions(-) create mode 100644 src/Microsoft.Health.Fhir.SqlServer/Features/Schema/Migrations/79.diff.sql create mode 100644 src/Microsoft.Health.Fhir.SqlServer/Features/Schema/Migrations/79.sql delete mode 100644 src/Microsoft.Health.Fhir.SqlServer/Features/Schema/Sql/Sprocs/GetNonCompletedJobCountOfSpecificQueueType.sql create mode 100644 test/Microsoft.Health.Fhir.Shared.Tests.Integration/Persistence/SqlServerColumnTypeChangeTests.cs diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/Migrations/79.diff.sql b/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/Migrations/79.diff.sql new file mode 100644 index 0000000000..091c08d980 --- /dev/null +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/Migrations/79.diff.sql @@ -0,0 +1,2 @@ +IF object_id('dbo.GetNonCompletedJobCountOfSpecificQueueType') IS NOT NULL + DROP PROCEDURE dbo.GetNonCompletedJobCountOfSpecificQueueType diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/Migrations/79.sql b/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/Migrations/79.sql new file mode 100644 index 0000000000..e049ad514d --- /dev/null +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/Migrations/79.sql @@ -0,0 +1,5143 @@ + +/************************************************************************************************* + Auto-Generated from Sql build task. Do not manually edit it. +**************************************************************************************************/ +SET XACT_ABORT ON +BEGIN TRAN +IF EXISTS (SELECT * + FROM sys.tables + WHERE name = 'ClaimType') + BEGIN + ROLLBACK; + RETURN; + END + + +GO +INSERT INTO dbo.SchemaVersion +VALUES (79, 'started'); + +CREATE PARTITION FUNCTION PartitionFunction_ResourceTypeId(SMALLINT) + AS RANGE RIGHT + FOR VALUES (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150); + +CREATE PARTITION SCHEME PartitionScheme_ResourceTypeId + AS PARTITION PartitionFunction_ResourceTypeId + ALL TO ([PRIMARY]); + + +GO +CREATE PARTITION FUNCTION PartitionFunction_ResourceChangeData_Timestamp(DATETIME2 (7)) + AS RANGE RIGHT + FOR VALUES (N'1970-01-01T00:00:00.0000000'); + +CREATE PARTITION SCHEME PartitionScheme_ResourceChangeData_Timestamp + AS PARTITION PartitionFunction_ResourceChangeData_Timestamp + ALL TO ([PRIMARY]); + +DECLARE @numberOfHistoryPartitions AS INT = 48; + +DECLARE @numberOfFuturePartitions AS INT = 720; + +DECLARE @rightPartitionBoundary AS DATETIME2 (7); + +DECLARE @currentDateTime AS DATETIME2 (7) = sysutcdatetime(); + +WHILE @numberOfHistoryPartitions >= -@numberOfFuturePartitions + BEGIN + SET @rightPartitionBoundary = DATEADD(hour, DATEDIFF(hour, 0, @currentDateTime) - @numberOfHistoryPartitions, 0); + ALTER PARTITION SCHEME PartitionScheme_ResourceChangeData_Timestamp NEXT USED [Primary]; + ALTER PARTITION FUNCTION PartitionFunction_ResourceChangeData_Timestamp( ) + SPLIT RANGE (@rightPartitionBoundary); + SET @numberOfHistoryPartitions -= 1; + END + +CREATE SEQUENCE dbo.ResourceSurrogateIdUniquifierSequence + AS INT + START WITH 0 + INCREMENT BY 1 + MINVALUE 0 + MAXVALUE 79999 + CYCLE + CACHE 1000000; + +CREATE TYPE dbo.BigintList AS TABLE ( + Id BIGINT NOT NULL PRIMARY KEY); + +CREATE TYPE dbo.DateTimeSearchParamList AS TABLE ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + StartDateTime DATETIMEOFFSET (7) NOT NULL, + EndDateTime DATETIMEOFFSET (7) NOT NULL, + IsLongerThanADay BIT NOT NULL, + IsMin BIT NOT NULL, + IsMax BIT NOT NULL UNIQUE (ResourceTypeId, ResourceSurrogateId, SearchParamId, StartDateTime, EndDateTime, IsLongerThanADay, IsMin, IsMax)); + +CREATE TYPE dbo.NumberSearchParamList AS TABLE ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + SingleValue DECIMAL (36, 18) NULL, + LowValue DECIMAL (36, 18) NULL, + HighValue DECIMAL (36, 18) NULL UNIQUE (ResourceTypeId, ResourceSurrogateId, SearchParamId, SingleValue, LowValue, HighValue)); + +CREATE TYPE dbo.QuantitySearchParamList AS TABLE ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + SystemId INT NULL, + QuantityCodeId INT NULL, + SingleValue DECIMAL (36, 18) NULL, + LowValue DECIMAL (36, 18) NULL, + HighValue DECIMAL (36, 18) NULL UNIQUE (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId, QuantityCodeId, SingleValue, LowValue, HighValue)); + +CREATE TYPE dbo.ReferenceSearchParamList AS TABLE ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + BaseUri VARCHAR (128) COLLATE Latin1_General_100_CS_AS NULL, + ReferenceResourceTypeId SMALLINT NULL, + ReferenceResourceId VARCHAR (64) COLLATE Latin1_General_100_CS_AS NOT NULL, + ReferenceResourceVersion INT NULL UNIQUE (ResourceTypeId, ResourceSurrogateId, SearchParamId, BaseUri, ReferenceResourceTypeId, ReferenceResourceId)); + +CREATE TYPE dbo.ReferenceTokenCompositeSearchParamList AS TABLE ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + BaseUri1 VARCHAR (128) COLLATE Latin1_General_100_CS_AS NULL, + ReferenceResourceTypeId1 SMALLINT NULL, + ReferenceResourceId1 VARCHAR (64) COLLATE Latin1_General_100_CS_AS NOT NULL, + ReferenceResourceVersion1 INT NULL, + SystemId2 INT NULL, + Code2 VARCHAR (256) COLLATE Latin1_General_100_CS_AS NOT NULL, + CodeOverflow2 VARCHAR (MAX) COLLATE Latin1_General_100_CS_AS NULL); + +CREATE TYPE dbo.ResourceDateKeyList AS TABLE ( + ResourceTypeId SMALLINT NOT NULL, + ResourceId VARCHAR (64) COLLATE Latin1_General_100_CS_AS NOT NULL, + ResourceSurrogateId BIGINT NOT NULL PRIMARY KEY (ResourceTypeId, ResourceId, ResourceSurrogateId)); + +CREATE TYPE dbo.ResourceKeyList AS TABLE ( + ResourceTypeId SMALLINT NOT NULL, + ResourceId VARCHAR (64) COLLATE Latin1_General_100_CS_AS NOT NULL, + Version INT NULL UNIQUE (ResourceTypeId, ResourceId, Version)); + +CREATE TYPE dbo.ResourceList AS TABLE ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + ResourceId VARCHAR (64) COLLATE Latin1_General_100_CS_AS NOT NULL, + Version INT NOT NULL, + HasVersionToCompare BIT NOT NULL, + IsDeleted BIT NOT NULL, + IsHistory BIT NOT NULL, + KeepHistory BIT NOT NULL, + RawResource VARBINARY (MAX) NOT NULL, + IsRawResourceMetaSet BIT NOT NULL, + RequestMethod VARCHAR (10) NULL, + SearchParamHash VARCHAR (64) NULL PRIMARY KEY (ResourceTypeId, ResourceSurrogateId), + UNIQUE (ResourceTypeId, ResourceId, Version)); + +CREATE TYPE dbo.ResourceWriteClaimList AS TABLE ( + ResourceSurrogateId BIGINT NOT NULL, + ClaimTypeId TINYINT NOT NULL, + ClaimValue NVARCHAR (128) NOT NULL); + +CREATE TYPE dbo.StringList AS TABLE ( + String VARCHAR (MAX)); + +CREATE TYPE dbo.StringSearchParamList AS TABLE ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + Text NVARCHAR (256) COLLATE Latin1_General_100_CI_AI_SC NOT NULL, + TextOverflow NVARCHAR (MAX) COLLATE Latin1_General_100_CI_AI_SC NULL, + IsMin BIT NOT NULL, + IsMax BIT NOT NULL); + +CREATE TYPE dbo.TokenDateTimeCompositeSearchParamList AS TABLE ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + SystemId1 INT NULL, + Code1 VARCHAR (256) COLLATE Latin1_General_100_CS_AS NOT NULL, + CodeOverflow1 VARCHAR (MAX) COLLATE Latin1_General_100_CS_AS NULL, + StartDateTime2 DATETIMEOFFSET (7) NOT NULL, + EndDateTime2 DATETIMEOFFSET (7) NOT NULL, + IsLongerThanADay2 BIT NOT NULL); + +CREATE TYPE dbo.TokenNumberNumberCompositeSearchParamList AS TABLE ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + SystemId1 INT NULL, + Code1 VARCHAR (256) COLLATE Latin1_General_100_CS_AS NOT NULL, + CodeOverflow1 VARCHAR (MAX) COLLATE Latin1_General_100_CS_AS NULL, + SingleValue2 DECIMAL (36, 18) NULL, + LowValue2 DECIMAL (36, 18) NULL, + HighValue2 DECIMAL (36, 18) NULL, + SingleValue3 DECIMAL (36, 18) NULL, + LowValue3 DECIMAL (36, 18) NULL, + HighValue3 DECIMAL (36, 18) NULL, + HasRange BIT NOT NULL); + +CREATE TYPE dbo.TokenQuantityCompositeSearchParamList AS TABLE ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + SystemId1 INT NULL, + Code1 VARCHAR (256) COLLATE Latin1_General_100_CS_AS NOT NULL, + CodeOverflow1 VARCHAR (MAX) COLLATE Latin1_General_100_CS_AS NULL, + SystemId2 INT NULL, + QuantityCodeId2 INT NULL, + SingleValue2 DECIMAL (36, 18) NULL, + LowValue2 DECIMAL (36, 18) NULL, + HighValue2 DECIMAL (36, 18) NULL); + +CREATE TYPE dbo.TokenSearchParamList AS TABLE ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + SystemId INT NULL, + Code VARCHAR (256) COLLATE Latin1_General_100_CS_AS NOT NULL, + CodeOverflow VARCHAR (MAX) COLLATE Latin1_General_100_CS_AS NULL); + +CREATE TYPE dbo.TokenStringCompositeSearchParamList AS TABLE ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + SystemId1 INT NULL, + Code1 VARCHAR (256) COLLATE Latin1_General_100_CS_AS NOT NULL, + CodeOverflow1 VARCHAR (MAX) COLLATE Latin1_General_100_CS_AS NULL, + Text2 NVARCHAR (256) COLLATE Latin1_General_100_CI_AI_SC NOT NULL, + TextOverflow2 NVARCHAR (MAX) COLLATE Latin1_General_100_CI_AI_SC NULL); + +CREATE TYPE dbo.TokenTextList AS TABLE ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + Text NVARCHAR (400) COLLATE Latin1_General_CI_AI NOT NULL); + +CREATE TYPE dbo.TokenTokenCompositeSearchParamList AS TABLE ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + SystemId1 INT NULL, + Code1 VARCHAR (256) COLLATE Latin1_General_100_CS_AS NOT NULL, + CodeOverflow1 VARCHAR (MAX) COLLATE Latin1_General_100_CS_AS NULL, + SystemId2 INT NULL, + Code2 VARCHAR (256) COLLATE Latin1_General_100_CS_AS NOT NULL, + CodeOverflow2 VARCHAR (MAX) COLLATE Latin1_General_100_CS_AS NULL); + +CREATE TYPE dbo.SearchParamTableType_2 AS TABLE ( + Uri VARCHAR (128) COLLATE Latin1_General_100_CS_AS NOT NULL, + Status VARCHAR (20) NOT NULL, + IsPartiallySupported BIT NOT NULL); + +CREATE TYPE dbo.BulkReindexResourceTableType_1 AS TABLE ( + Offset INT NOT NULL, + ResourceTypeId SMALLINT NOT NULL, + ResourceId VARCHAR (64) COLLATE Latin1_General_100_CS_AS NOT NULL, + ETag INT NULL, + SearchParamHash VARCHAR (64) NOT NULL); + +CREATE TYPE dbo.UriSearchParamList AS TABLE ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + Uri VARCHAR (256) COLLATE Latin1_General_100_CS_AS NOT NULL PRIMARY KEY (ResourceTypeId, ResourceSurrogateId, SearchParamId, Uri)); + +CREATE TABLE dbo.ClaimType ( + ClaimTypeId TINYINT IDENTITY (1, 1) NOT NULL, + Name VARCHAR (128) COLLATE Latin1_General_100_CS_AS NOT NULL, + CONSTRAINT UQ_ClaimType_ClaimTypeId UNIQUE (ClaimTypeId), + CONSTRAINT PKC_ClaimType PRIMARY KEY CLUSTERED (Name) WITH (DATA_COMPRESSION = PAGE) +); + +CREATE TABLE dbo.CompartmentAssignment ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + CompartmentTypeId TINYINT NOT NULL, + ReferenceResourceId VARCHAR (64) COLLATE Latin1_General_100_CS_AS NOT NULL, + IsHistory BIT NOT NULL, + CONSTRAINT PKC_CompartmentAssignment PRIMARY KEY CLUSTERED (ResourceTypeId, ResourceSurrogateId, CompartmentTypeId, ReferenceResourceId) WITH (DATA_COMPRESSION = PAGE) ON PartitionScheme_ResourceTypeId (ResourceTypeId) +); + + +GO +ALTER TABLE dbo.CompartmentAssignment + ADD CONSTRAINT DF_CompartmentAssignment_IsHistory DEFAULT 0 FOR IsHistory; + + +GO +ALTER TABLE dbo.CompartmentAssignment SET (LOCK_ESCALATION = AUTO); + + +GO +CREATE NONCLUSTERED INDEX IX_CompartmentAssignment_CompartmentTypeId_ReferenceResourceId + ON dbo.CompartmentAssignment(ResourceTypeId, CompartmentTypeId, ReferenceResourceId, ResourceSurrogateId) WHERE IsHistory = 0 WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE TABLE dbo.CompartmentType ( + CompartmentTypeId TINYINT IDENTITY (1, 1) NOT NULL, + Name VARCHAR (128) COLLATE Latin1_General_100_CS_AS NOT NULL, + CONSTRAINT UQ_CompartmentType_CompartmentTypeId UNIQUE (CompartmentTypeId), + CONSTRAINT PKC_CompartmentType PRIMARY KEY CLUSTERED (Name) WITH (DATA_COMPRESSION = PAGE) +); + +CREATE TABLE dbo.DateTimeSearchParam ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + StartDateTime DATETIME2 (7) NOT NULL, + EndDateTime DATETIME2 (7) NOT NULL, + IsLongerThanADay BIT NOT NULL, + IsMin BIT CONSTRAINT date_IsMin_Constraint DEFAULT 0 NOT NULL, + IsMax BIT CONSTRAINT date_IsMax_Constraint DEFAULT 0 NOT NULL +); + +ALTER TABLE dbo.DateTimeSearchParam SET (LOCK_ESCALATION = AUTO); + +CREATE CLUSTERED INDEX IXC_DateTimeSearchParam + ON dbo.DateTimeSearchParam(ResourceTypeId, ResourceSurrogateId, SearchParamId) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_StartDateTime_EndDateTime_INCLUDE_IsLongerThanADay_IsMin_IsMax + ON dbo.DateTimeSearchParam(SearchParamId, StartDateTime, EndDateTime) + INCLUDE(IsLongerThanADay, IsMin, IsMax) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_EndDateTime_StartDateTime_INCLUDE_IsLongerThanADay_IsMin_IsMax + ON dbo.DateTimeSearchParam(SearchParamId, EndDateTime, StartDateTime) + INCLUDE(IsLongerThanADay, IsMin, IsMax) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_StartDateTime_EndDateTime_INCLUDE_IsMin_IsMax_WHERE_IsLongerThanADay_1 + ON dbo.DateTimeSearchParam(SearchParamId, StartDateTime, EndDateTime) + INCLUDE(IsMin, IsMax) WHERE IsLongerThanADay = 1 + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_EndDateTime_StartDateTime_INCLUDE_IsMin_IsMax_WHERE_IsLongerThanADay_1 + ON dbo.DateTimeSearchParam(SearchParamId, EndDateTime, StartDateTime) + INCLUDE(IsMin, IsMax) WHERE IsLongerThanADay = 1 + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +IF NOT EXISTS (SELECT 1 + FROM sys.tables + WHERE name = 'EventAgentCheckpoint') + BEGIN + CREATE TABLE dbo.EventAgentCheckpoint ( + CheckpointId VARCHAR (64) NOT NULL, + LastProcessedDateTime DATETIMEOFFSET (7), + LastProcessedIdentifier VARCHAR (64) , + UpdatedOn DATETIME2 (7) DEFAULT sysutcdatetime() NOT NULL, + CONSTRAINT PK_EventAgentCheckpoint PRIMARY KEY CLUSTERED (CheckpointId) + ) ON [PRIMARY]; + END + +CREATE PARTITION FUNCTION EventLogPartitionFunction(TINYINT) + AS RANGE RIGHT + FOR VALUES (0, 1, 2, 3, 4, 5, 6, 7); + + +GO +CREATE PARTITION SCHEME EventLogPartitionScheme + AS PARTITION EventLogPartitionFunction + ALL TO ([PRIMARY]); + + +GO +CREATE TABLE dbo.EventLog ( + PartitionId AS isnull(CONVERT (TINYINT, EventId % 8), 0) PERSISTED, + EventId BIGINT IDENTITY (1, 1) NOT NULL, + EventDate DATETIME NOT NULL, + Process VARCHAR (100) NOT NULL, + Status VARCHAR (10) NOT NULL, + Mode VARCHAR (200) NULL, + Action VARCHAR (20) NULL, + Target VARCHAR (100) NULL, + Rows BIGINT NULL, + Milliseconds INT NULL, + EventText NVARCHAR (3500) NULL, + SPID SMALLINT NOT NULL, + HostName VARCHAR (64) NOT NULL CONSTRAINT PKC_EventLog_EventDate_EventId_PartitionId PRIMARY KEY CLUSTERED (EventDate, EventId, PartitionId) ON EventLogPartitionScheme (PartitionId) +); + +CREATE TABLE dbo.ExportJob ( + Id VARCHAR (64) COLLATE Latin1_General_100_CS_AS NOT NULL, + Hash VARCHAR (64) COLLATE Latin1_General_100_CS_AS NOT NULL, + Status VARCHAR (10) NOT NULL, + HeartbeatDateTime DATETIME2 (7) NULL, + RawJobRecord VARCHAR (MAX) NOT NULL, + JobVersion ROWVERSION NOT NULL, + CONSTRAINT PKC_ExportJob PRIMARY KEY CLUSTERED (Id) +); + +CREATE UNIQUE NONCLUSTERED INDEX IX_ExportJob_Hash_Status_HeartbeatDateTime + ON dbo.ExportJob(Hash, Status, HeartbeatDateTime); + +CREATE TABLE dbo.IndexProperties ( + TableName VARCHAR (100) NOT NULL, + IndexName VARCHAR (200) NOT NULL, + PropertyName VARCHAR (100) NOT NULL, + PropertyValue VARCHAR (100) NOT NULL, + CreateDate DATETIME CONSTRAINT DF_IndexProperties_CreateDate DEFAULT getUTCdate() NOT NULL CONSTRAINT PKC_IndexProperties_TableName_IndexName_PropertyName PRIMARY KEY CLUSTERED (TableName, IndexName, PropertyName) +); + +CREATE PARTITION FUNCTION TinyintPartitionFunction(TINYINT) + AS RANGE RIGHT + FOR VALUES (0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255); + + +GO +CREATE PARTITION SCHEME TinyintPartitionScheme + AS PARTITION TinyintPartitionFunction + ALL TO ([PRIMARY]); + + +GO +CREATE TABLE dbo.JobQueue ( + QueueType TINYINT NOT NULL, + GroupId BIGINT NOT NULL, + JobId BIGINT NOT NULL, + PartitionId AS CONVERT (TINYINT, JobId % 16) PERSISTED, + Definition VARCHAR (MAX) NOT NULL, + DefinitionHash VARBINARY (20) NOT NULL, + Version BIGINT CONSTRAINT DF_JobQueue_Version DEFAULT datediff_big(millisecond, '0001-01-01', getUTCdate()) NOT NULL, + Status TINYINT CONSTRAINT DF_JobQueue_Status DEFAULT 0 NOT NULL, + Priority TINYINT CONSTRAINT DF_JobQueue_Priority DEFAULT 100 NOT NULL, + Data BIGINT NULL, + Result VARCHAR (MAX) NULL, + CreateDate DATETIME CONSTRAINT DF_JobQueue_CreateDate DEFAULT getUTCdate() NOT NULL, + StartDate DATETIME NULL, + EndDate DATETIME NULL, + HeartbeatDate DATETIME CONSTRAINT DF_JobQueue_HeartbeatDate DEFAULT getUTCdate() NOT NULL, + Worker VARCHAR (100) NULL, + Info VARCHAR (1000) NULL, + CancelRequested BIT CONSTRAINT DF_JobQueue_CancelRequested DEFAULT 0 NOT NULL CONSTRAINT PKC_JobQueue_QueueType_PartitionId_JobId PRIMARY KEY CLUSTERED (QueueType, PartitionId, JobId) ON TinyintPartitionScheme (QueueType), + CONSTRAINT U_JobQueue_QueueType_JobId UNIQUE (QueueType, JobId) +); + + +GO +CREATE INDEX IX_QueueType_PartitionId_Status_Priority + ON dbo.JobQueue(PartitionId, Status, Priority) + ON TinyintPartitionScheme (QueueType); + + +GO +CREATE INDEX IX_QueueType_GroupId + ON dbo.JobQueue(QueueType, GroupId) + ON TinyintPartitionScheme (QueueType); + + +GO +CREATE INDEX IX_QueueType_DefinitionHash + ON dbo.JobQueue(QueueType, DefinitionHash) + ON TinyintPartitionScheme (QueueType); + +CREATE TABLE dbo.NumberSearchParam ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + SingleValue DECIMAL (36, 18) NULL, + LowValue DECIMAL (36, 18) NOT NULL, + HighValue DECIMAL (36, 18) NOT NULL +); + +ALTER TABLE dbo.NumberSearchParam SET (LOCK_ESCALATION = AUTO); + +CREATE CLUSTERED INDEX IXC_NumberSearchParam + ON dbo.NumberSearchParam(ResourceTypeId, ResourceSurrogateId, SearchParamId) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_SingleValue_WHERE_SingleValue_NOT_NULL + ON dbo.NumberSearchParam(SearchParamId, SingleValue) WHERE SingleValue IS NOT NULL + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_LowValue_HighValue + ON dbo.NumberSearchParam(SearchParamId, LowValue, HighValue) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_HighValue_LowValue + ON dbo.NumberSearchParam(SearchParamId, HighValue, LowValue) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE TABLE dbo.Parameters ( + Id VARCHAR (100) NOT NULL, + Date DATETIME NULL, + Number FLOAT NULL, + Bigint BIGINT NULL, + Char VARCHAR (4000) NULL, + Binary VARBINARY (MAX) NULL, + UpdatedDate DATETIME NULL, + UpdatedBy NVARCHAR (255) NULL CONSTRAINT PKC_Parameters_Id PRIMARY KEY CLUSTERED (Id) WITH (IGNORE_DUP_KEY = ON) +); + + +GO +CREATE TABLE dbo.ParametersHistory ( + ChangeId INT IDENTITY (1, 1) NOT NULL, + Id VARCHAR (100) NOT NULL, + Date DATETIME NULL, + Number FLOAT NULL, + Bigint BIGINT NULL, + Char VARCHAR (4000) NULL, + Binary VARBINARY (MAX) NULL, + UpdatedDate DATETIME NULL, + UpdatedBy NVARCHAR (255) NULL +); + +CREATE TABLE dbo.QuantityCode ( + QuantityCodeId INT IDENTITY (1, 1) NOT NULL, + Value NVARCHAR (256) COLLATE Latin1_General_100_CS_AS NOT NULL, + CONSTRAINT UQ_QuantityCode_QuantityCodeId UNIQUE (QuantityCodeId), + CONSTRAINT PKC_QuantityCode PRIMARY KEY CLUSTERED (Value) WITH (DATA_COMPRESSION = PAGE) +); + +CREATE TABLE dbo.QuantitySearchParam ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + SystemId INT NULL, + QuantityCodeId INT NULL, + SingleValue DECIMAL (36, 18) NULL, + LowValue DECIMAL (36, 18) NOT NULL, + HighValue DECIMAL (36, 18) NOT NULL +); + +ALTER TABLE dbo.QuantitySearchParam SET (LOCK_ESCALATION = AUTO); + +CREATE CLUSTERED INDEX IXC_QuantitySearchParam + ON dbo.QuantitySearchParam(ResourceTypeId, ResourceSurrogateId, SearchParamId) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_QuantityCodeId_SingleValue_INCLUDE_SystemId_WHERE_SingleValue_NOT_NULL + ON dbo.QuantitySearchParam(SearchParamId, QuantityCodeId, SingleValue) + INCLUDE(SystemId) WHERE SingleValue IS NOT NULL + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_QuantityCodeId_LowValue_HighValue_INCLUDE_SystemId + ON dbo.QuantitySearchParam(SearchParamId, QuantityCodeId, LowValue, HighValue) + INCLUDE(SystemId) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_QuantityCodeId_HighValue_LowValue_INCLUDE_SystemId + ON dbo.QuantitySearchParam(SearchParamId, QuantityCodeId, HighValue, LowValue) + INCLUDE(SystemId) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE TABLE dbo.ReferenceSearchParam ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + BaseUri VARCHAR (128) COLLATE Latin1_General_100_CS_AS NULL, + ReferenceResourceTypeId SMALLINT NULL, + ReferenceResourceId VARCHAR (64) COLLATE Latin1_General_100_CS_AS NOT NULL, + ReferenceResourceVersion INT NULL +); + +ALTER TABLE dbo.ReferenceSearchParam SET (LOCK_ESCALATION = AUTO); + +CREATE CLUSTERED INDEX IXC_ReferenceSearchParam + ON dbo.ReferenceSearchParam(ResourceTypeId, ResourceSurrogateId, SearchParamId) WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE UNIQUE INDEX IXU_ReferenceResourceId_ReferenceResourceTypeId_SearchParamId_BaseUri_ResourceSurrogateId_ResourceTypeId + ON dbo.ReferenceSearchParam(ReferenceResourceId, ReferenceResourceTypeId, SearchParamId, BaseUri, ResourceSurrogateId, ResourceTypeId) WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE TABLE dbo.ReferenceTokenCompositeSearchParam ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + BaseUri1 VARCHAR (128) COLLATE Latin1_General_100_CS_AS NULL, + ReferenceResourceTypeId1 SMALLINT NULL, + ReferenceResourceId1 VARCHAR (64) COLLATE Latin1_General_100_CS_AS NOT NULL, + ReferenceResourceVersion1 INT NULL, + SystemId2 INT NULL, + Code2 VARCHAR (256) COLLATE Latin1_General_100_CS_AS NOT NULL, + CodeOverflow2 VARCHAR (MAX) COLLATE Latin1_General_100_CS_AS NULL +); + +ALTER TABLE dbo.ReferenceTokenCompositeSearchParam + ADD CONSTRAINT CHK_ReferenceTokenCompositeSearchParam_CodeOverflow2 CHECK (LEN(Code2) = 256 + OR CodeOverflow2 IS NULL); + +ALTER TABLE dbo.ReferenceTokenCompositeSearchParam SET (LOCK_ESCALATION = AUTO); + +CREATE CLUSTERED INDEX IXC_ReferenceTokenCompositeSearchParam + ON dbo.ReferenceTokenCompositeSearchParam(ResourceTypeId, ResourceSurrogateId, SearchParamId) WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_ReferenceResourceId1_Code2_INCLUDE_ReferenceResourceTypeId1_BaseUri1_SystemId2 + ON dbo.ReferenceTokenCompositeSearchParam(SearchParamId, ReferenceResourceId1, Code2) + INCLUDE(ReferenceResourceTypeId1, BaseUri1, SystemId2) WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE TABLE dbo.ReindexJob ( + Id VARCHAR (64) COLLATE Latin1_General_100_CS_AS NOT NULL, + Status VARCHAR (10) NOT NULL, + HeartbeatDateTime DATETIME2 (7) NULL, + RawJobRecord VARCHAR (MAX) NOT NULL, + JobVersion ROWVERSION NOT NULL, + CONSTRAINT PKC_ReindexJob PRIMARY KEY CLUSTERED (Id) +); + +CREATE TABLE dbo.Resource ( + ResourceTypeId SMALLINT NOT NULL, + ResourceId VARCHAR (64) COLLATE Latin1_General_100_CS_AS NOT NULL, + Version INT NOT NULL, + IsHistory BIT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + IsDeleted BIT NOT NULL, + RequestMethod VARCHAR (10) NULL, + RawResource VARBINARY (MAX) NOT NULL, + IsRawResourceMetaSet BIT DEFAULT 0 NOT NULL, + SearchParamHash VARCHAR (64) NULL, + TransactionId BIGINT NULL, + HistoryTransactionId BIGINT NULL CONSTRAINT PKC_Resource PRIMARY KEY CLUSTERED (ResourceTypeId, ResourceSurrogateId) WITH (DATA_COMPRESSION = PAGE) ON PartitionScheme_ResourceTypeId (ResourceTypeId), + CONSTRAINT CH_Resource_RawResource_Length CHECK (RawResource > 0x0) +); + +ALTER TABLE dbo.Resource SET (LOCK_ESCALATION = AUTO); + +CREATE INDEX IX_ResourceTypeId_TransactionId + ON dbo.Resource(ResourceTypeId, TransactionId) WHERE TransactionId IS NOT NULL + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_ResourceTypeId_HistoryTransactionId + ON dbo.Resource(ResourceTypeId, HistoryTransactionId) WHERE HistoryTransactionId IS NOT NULL + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE UNIQUE NONCLUSTERED INDEX IX_Resource_ResourceTypeId_ResourceId_Version + ON dbo.Resource(ResourceTypeId, ResourceId, Version) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE UNIQUE NONCLUSTERED INDEX IX_Resource_ResourceTypeId_ResourceId + ON dbo.Resource(ResourceTypeId, ResourceId) + INCLUDE(Version, IsDeleted) WHERE IsHistory = 0 + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE UNIQUE NONCLUSTERED INDEX IX_Resource_ResourceTypeId_ResourceSurrgateId + ON dbo.Resource(ResourceTypeId, ResourceSurrogateId) WHERE IsHistory = 0 + AND IsDeleted = 0 + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE TABLE dbo.ResourceChangeData ( + Id BIGINT IDENTITY (1, 1) NOT NULL, + Timestamp DATETIME2 (7) CONSTRAINT DF_ResourceChangeData_Timestamp DEFAULT sysutcdatetime() NOT NULL, + ResourceId VARCHAR (64) NOT NULL, + ResourceTypeId SMALLINT NOT NULL, + ResourceVersion INT NOT NULL, + ResourceChangeTypeId TINYINT NOT NULL +) ON PartitionScheme_ResourceChangeData_Timestamp (Timestamp); + +CREATE CLUSTERED INDEX IXC_ResourceChangeData + ON dbo.ResourceChangeData(Id ASC) WITH (ONLINE = ON) + ON PartitionScheme_ResourceChangeData_Timestamp (Timestamp); + +CREATE TABLE dbo.ResourceChangeDataStaging ( + Id BIGINT IDENTITY (1, 1) NOT NULL, + Timestamp DATETIME2 (7) CONSTRAINT DF_ResourceChangeDataStaging_Timestamp DEFAULT sysutcdatetime() NOT NULL, + ResourceId VARCHAR (64) NOT NULL, + ResourceTypeId SMALLINT NOT NULL, + ResourceVersion INT NOT NULL, + ResourceChangeTypeId TINYINT NOT NULL +) ON [PRIMARY]; + +CREATE CLUSTERED INDEX IXC_ResourceChangeDataStaging + ON dbo.ResourceChangeDataStaging(Id ASC, Timestamp ASC) WITH (ONLINE = ON) + ON [PRIMARY]; + +ALTER TABLE dbo.ResourceChangeDataStaging WITH CHECK + ADD CONSTRAINT CHK_ResourceChangeDataStaging_partition CHECK (Timestamp < CONVERT (DATETIME2 (7), N'9999-12-31 23:59:59.9999999')); + +ALTER TABLE dbo.ResourceChangeDataStaging CHECK CONSTRAINT CHK_ResourceChangeDataStaging_partition; + +CREATE TABLE dbo.ResourceChangeType ( + ResourceChangeTypeId TINYINT NOT NULL, + Name NVARCHAR (50) NOT NULL, + CONSTRAINT PK_ResourceChangeType PRIMARY KEY CLUSTERED (ResourceChangeTypeId), + CONSTRAINT UQ_ResourceChangeType_Name UNIQUE NONCLUSTERED (Name) +) ON [PRIMARY]; + + +GO +INSERT dbo.ResourceChangeType (ResourceChangeTypeId, Name) +VALUES (0, N'Creation'); + +INSERT dbo.ResourceChangeType (ResourceChangeTypeId, Name) +VALUES (1, N'Update'); + +INSERT dbo.ResourceChangeType (ResourceChangeTypeId, Name) +VALUES (2, N'Deletion'); + +CREATE TABLE dbo.ResourceType ( + ResourceTypeId SMALLINT IDENTITY (1, 1) NOT NULL, + Name NVARCHAR (50) COLLATE Latin1_General_100_CS_AS NOT NULL, + CONSTRAINT UQ_ResourceType_ResourceTypeId UNIQUE (ResourceTypeId), + CONSTRAINT PKC_ResourceType PRIMARY KEY CLUSTERED (Name) WITH (DATA_COMPRESSION = PAGE) +); + +CREATE TABLE dbo.ResourceWriteClaim ( + ResourceSurrogateId BIGINT NOT NULL, + ClaimTypeId TINYINT NOT NULL, + ClaimValue NVARCHAR (128) NOT NULL +) +WITH (DATA_COMPRESSION = PAGE); + +CREATE CLUSTERED INDEX IXC_ResourceWriteClaim + ON dbo.ResourceWriteClaim(ResourceSurrogateId, ClaimTypeId); + +CREATE TABLE dbo.SchemaMigrationProgress ( + Timestamp DATETIME2 (3) DEFAULT CURRENT_TIMESTAMP, + Message NVARCHAR (MAX) +); + +CREATE TABLE dbo.SearchParam ( + SearchParamId SMALLINT IDENTITY (1, 1) NOT NULL, + Uri VARCHAR (128) COLLATE Latin1_General_100_CS_AS NOT NULL, + Status VARCHAR (20) NULL, + LastUpdated DATETIMEOFFSET (7) NULL, + IsPartiallySupported BIT NULL, + CONSTRAINT UQ_SearchParam_SearchParamId UNIQUE (SearchParamId), + CONSTRAINT PKC_SearchParam PRIMARY KEY CLUSTERED (Uri) WITH (DATA_COMPRESSION = PAGE) +); + +CREATE TABLE dbo.StringSearchParam ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + Text NVARCHAR (256) COLLATE Latin1_General_100_CI_AI_SC NOT NULL, + TextOverflow NVARCHAR (MAX) COLLATE Latin1_General_100_CI_AI_SC NULL, + IsMin BIT CONSTRAINT string_IsMin_Constraint DEFAULT 0 NOT NULL, + IsMax BIT CONSTRAINT string_IsMax_Constraint DEFAULT 0 NOT NULL +); + +ALTER TABLE dbo.StringSearchParam SET (LOCK_ESCALATION = AUTO); + +CREATE CLUSTERED INDEX IXC_StringSearchParam + ON dbo.StringSearchParam(ResourceTypeId, ResourceSurrogateId, SearchParamId) WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_Text_INCLUDE_TextOverflow_IsMin_IsMax + ON dbo.StringSearchParam(SearchParamId, Text) + INCLUDE(TextOverflow, IsMin, IsMax) WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_Text_INCLUDE_IsMin_IsMax_WHERE_TextOverflow_NOT_NULL + ON dbo.StringSearchParam(SearchParamId, Text) + INCLUDE(IsMin, IsMax) WHERE TextOverflow IS NOT NULL WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE TABLE dbo.System ( + SystemId INT IDENTITY (1, 1) NOT NULL, + Value NVARCHAR (256) NOT NULL, + CONSTRAINT UQ_System_SystemId UNIQUE (SystemId), + CONSTRAINT PKC_System PRIMARY KEY CLUSTERED (Value) WITH (DATA_COMPRESSION = PAGE) +); + +CREATE TABLE [dbo].[TaskInfo] ( + [TaskId] VARCHAR (64) NOT NULL, + [QueueId] VARCHAR (64) NOT NULL, + [Status] SMALLINT NOT NULL, + [TaskTypeId] SMALLINT NOT NULL, + [RunId] VARCHAR (50) NULL, + [IsCanceled] BIT NOT NULL, + [RetryCount] SMALLINT NOT NULL, + [MaxRetryCount] SMALLINT NOT NULL, + [HeartbeatDateTime] DATETIME2 (7) NULL, + [InputData] VARCHAR (MAX) NOT NULL, + [TaskContext] VARCHAR (MAX) NULL, + [Result] VARCHAR (MAX) NULL, + [CreateDateTime] DATETIME2 (7) CONSTRAINT DF_TaskInfo_CreateDate DEFAULT SYSUTCDATETIME() NOT NULL, + [StartDateTime] DATETIME2 (7) NULL, + [EndDateTime] DATETIME2 (7) NULL, + [Worker] VARCHAR (100) NULL, + [RestartInfo] VARCHAR (MAX) NULL, + [ParentTaskId] VARCHAR (64) NULL, + CONSTRAINT PKC_TaskInfo PRIMARY KEY CLUSTERED (TaskId) WITH (DATA_COMPRESSION = PAGE) +) ON [PRIMARY] TEXTIMAGE_ON [PRIMARY]; + + +GO +CREATE NONCLUSTERED INDEX IX_QueueId_Status + ON dbo.TaskInfo(QueueId, Status); + + +GO +CREATE NONCLUSTERED INDEX IX_QueueId_ParentTaskId + ON dbo.TaskInfo(QueueId, ParentTaskId); + +CREATE TABLE dbo.TokenDateTimeCompositeSearchParam ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + SystemId1 INT NULL, + Code1 VARCHAR (256) COLLATE Latin1_General_100_CS_AS NOT NULL, + StartDateTime2 DATETIME2 (7) NOT NULL, + EndDateTime2 DATETIME2 (7) NOT NULL, + IsLongerThanADay2 BIT NOT NULL, + CodeOverflow1 VARCHAR (MAX) COLLATE Latin1_General_100_CS_AS NULL +); + +ALTER TABLE dbo.TokenDateTimeCompositeSearchParam + ADD CONSTRAINT CHK_TokenDateTimeCompositeSearchParam_CodeOverflow1 CHECK (LEN(Code1) = 256 + OR CodeOverflow1 IS NULL); + +ALTER TABLE dbo.TokenDateTimeCompositeSearchParam SET (LOCK_ESCALATION = AUTO); + +CREATE CLUSTERED INDEX IXC_TokenDateTimeCompositeSearchParam + ON dbo.TokenDateTimeCompositeSearchParam(ResourceTypeId, ResourceSurrogateId, SearchParamId) WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_Code1_StartDateTime2_EndDateTime2_INCLUDE_SystemId1_IsLongerThanADay2 + ON dbo.TokenDateTimeCompositeSearchParam(SearchParamId, Code1, StartDateTime2, EndDateTime2) + INCLUDE(SystemId1, IsLongerThanADay2) WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_Code1_EndDateTime2_StartDateTime2_INCLUDE_SystemId1_IsLongerThanADay2 + ON dbo.TokenDateTimeCompositeSearchParam(SearchParamId, Code1, EndDateTime2, StartDateTime2) + INCLUDE(SystemId1, IsLongerThanADay2) WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_Code1_StartDateTime2_EndDateTime2_INCLUDE_SystemId1_WHERE_IsLongerThanADay2_1 + ON dbo.TokenDateTimeCompositeSearchParam(SearchParamId, Code1, StartDateTime2, EndDateTime2) + INCLUDE(SystemId1) WHERE IsLongerThanADay2 = 1 WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_Code1_EndDateTime2_StartDateTime2_INCLUDE_SystemId1_WHERE_IsLongerThanADay2_1 + ON dbo.TokenDateTimeCompositeSearchParam(SearchParamId, Code1, EndDateTime2, StartDateTime2) + INCLUDE(SystemId1) WHERE IsLongerThanADay2 = 1 WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE TABLE dbo.TokenNumberNumberCompositeSearchParam ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + SystemId1 INT NULL, + Code1 VARCHAR (256) COLLATE Latin1_General_100_CS_AS NOT NULL, + SingleValue2 DECIMAL (36, 18) NULL, + LowValue2 DECIMAL (36, 18) NULL, + HighValue2 DECIMAL (36, 18) NULL, + SingleValue3 DECIMAL (36, 18) NULL, + LowValue3 DECIMAL (36, 18) NULL, + HighValue3 DECIMAL (36, 18) NULL, + HasRange BIT NOT NULL, + CodeOverflow1 VARCHAR (MAX) COLLATE Latin1_General_100_CS_AS NULL +); + +ALTER TABLE dbo.TokenNumberNumberCompositeSearchParam + ADD CONSTRAINT CHK_TokenNumberNumberCompositeSearchParam_CodeOverflow1 CHECK (LEN(Code1) = 256 + OR CodeOverflow1 IS NULL); + +ALTER TABLE dbo.TokenNumberNumberCompositeSearchParam SET (LOCK_ESCALATION = AUTO); + +CREATE CLUSTERED INDEX IXC_TokenNumberNumberCompositeSearchParam + ON dbo.TokenNumberNumberCompositeSearchParam(ResourceTypeId, ResourceSurrogateId, SearchParamId) WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_Code1_SingleValue2_SingleValue3_INCLUDE_SystemId1_WHERE_HasRange_0 + ON dbo.TokenNumberNumberCompositeSearchParam(SearchParamId, Code1, SingleValue2, SingleValue3) + INCLUDE(SystemId1) WHERE HasRange = 0 WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_Code1_LowValue2_HighValue2_LowValue3_HighValue3_INCLUDE_SystemId1_WHERE_HasRange_1 + ON dbo.TokenNumberNumberCompositeSearchParam(SearchParamId, Code1, LowValue2, HighValue2, LowValue3, HighValue3) + INCLUDE(SystemId1) WHERE HasRange = 1 WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE TABLE dbo.TokenQuantityCompositeSearchParam ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + SystemId1 INT NULL, + Code1 VARCHAR (256) COLLATE Latin1_General_100_CS_AS NOT NULL, + SystemId2 INT NULL, + QuantityCodeId2 INT NULL, + SingleValue2 DECIMAL (36, 18) NULL, + LowValue2 DECIMAL (36, 18) NULL, + HighValue2 DECIMAL (36, 18) NULL, + CodeOverflow1 VARCHAR (MAX) COLLATE Latin1_General_100_CS_AS NULL +); + +ALTER TABLE dbo.TokenQuantityCompositeSearchParam + ADD CONSTRAINT CHK_TokenQuantityCompositeSearchParam_CodeOverflow1 CHECK (LEN(Code1) = 256 + OR CodeOverflow1 IS NULL); + +ALTER TABLE dbo.TokenQuantityCompositeSearchParam SET (LOCK_ESCALATION = AUTO); + +CREATE CLUSTERED INDEX IXC_TokenQuantityCompositeSearchParam + ON dbo.TokenQuantityCompositeSearchParam(ResourceTypeId, ResourceSurrogateId, SearchParamId) WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_Code1_SingleValue2_INCLUDE_QuantityCodeId2_SystemId1_SystemId2_WHERE_SingleValue2_NOT_NULL + ON dbo.TokenQuantityCompositeSearchParam(SearchParamId, Code1, SingleValue2) + INCLUDE(QuantityCodeId2, SystemId1, SystemId2) WHERE SingleValue2 IS NOT NULL WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_Code1_LowValue2_HighValue2_INCLUDE_QuantityCodeId2_SystemId1_SystemId2_WHERE_LowValue2_NOT_NULL + ON dbo.TokenQuantityCompositeSearchParam(SearchParamId, Code1, LowValue2, HighValue2) + INCLUDE(QuantityCodeId2, SystemId1, SystemId2) WHERE LowValue2 IS NOT NULL WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_Code1_HighValue2_LowValue2_INCLUDE_QuantityCodeId2_SystemId1_SystemId2_WHERE_LowValue2_NOT_NULL + ON dbo.TokenQuantityCompositeSearchParam(SearchParamId, Code1, HighValue2, LowValue2) + INCLUDE(QuantityCodeId2, SystemId1, SystemId2) WHERE LowValue2 IS NOT NULL WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE TABLE dbo.TokenSearchParam ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + SystemId INT NULL, + Code VARCHAR (256) COLLATE Latin1_General_100_CS_AS NOT NULL, + CodeOverflow VARCHAR (MAX) COLLATE Latin1_General_100_CS_AS NULL +); + +ALTER TABLE dbo.TokenSearchParam + ADD CONSTRAINT CHK_TokenSearchParam_CodeOverflow CHECK (LEN(Code) = 256 + OR CodeOverflow IS NULL); + +ALTER TABLE dbo.TokenSearchParam SET (LOCK_ESCALATION = AUTO); + +CREATE CLUSTERED INDEX IXC_TokenSearchParam + ON dbo.TokenSearchParam(ResourceTypeId, ResourceSurrogateId, SearchParamId) WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_Code_INCLUDE_SystemId + ON dbo.TokenSearchParam(SearchParamId, Code) + INCLUDE(SystemId) WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE TABLE dbo.TokenStringCompositeSearchParam ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + SystemId1 INT NULL, + Code1 VARCHAR (256) COLLATE Latin1_General_100_CS_AS NOT NULL, + Text2 NVARCHAR (256) COLLATE Latin1_General_CI_AI NOT NULL, + TextOverflow2 NVARCHAR (MAX) COLLATE Latin1_General_CI_AI NULL, + CodeOverflow1 VARCHAR (MAX) COLLATE Latin1_General_100_CS_AS NULL +); + +ALTER TABLE dbo.TokenStringCompositeSearchParam + ADD CONSTRAINT CHK_TokenStringCompositeSearchParam_CodeOverflow1 CHECK (LEN(Code1) = 256 + OR CodeOverflow1 IS NULL); + +ALTER TABLE dbo.TokenStringCompositeSearchParam SET (LOCK_ESCALATION = AUTO); + +CREATE CLUSTERED INDEX IXC_TokenStringCompositeSearchParam + ON dbo.TokenStringCompositeSearchParam(ResourceSurrogateId, SearchParamId) WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_Code1_Text2_INCLUDE_SystemId1_TextOverflow2 + ON dbo.TokenStringCompositeSearchParam(SearchParamId, Code1, Text2) + INCLUDE(SystemId1, TextOverflow2) WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_Code1_Text2_INCLUDE_SystemId1_WHERE_TextOverflow2_NOT_NULL + ON dbo.TokenStringCompositeSearchParam(SearchParamId, Code1, Text2) + INCLUDE(SystemId1) WHERE TextOverflow2 IS NOT NULL WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE TABLE dbo.TokenText ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + Text NVARCHAR (400) COLLATE Latin1_General_CI_AI NOT NULL, + IsHistory BIT NOT NULL +); + +ALTER TABLE dbo.TokenText + ADD CONSTRAINT DF_TokenText_IsHistory DEFAULT 0 FOR IsHistory; + +ALTER TABLE dbo.TokenText SET (LOCK_ESCALATION = AUTO); + +CREATE CLUSTERED INDEX IXC_TokenText + ON dbo.TokenText(ResourceTypeId, ResourceSurrogateId, SearchParamId) WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE NONCLUSTERED INDEX IX_TokenText_SearchParamId_Text + ON dbo.TokenText(ResourceTypeId, SearchParamId, Text, ResourceSurrogateId) WHERE IsHistory = 0 WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE TABLE dbo.TokenTokenCompositeSearchParam ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + SystemId1 INT NULL, + Code1 VARCHAR (256) COLLATE Latin1_General_100_CS_AS NOT NULL, + SystemId2 INT NULL, + Code2 VARCHAR (256) COLLATE Latin1_General_100_CS_AS NOT NULL, + CodeOverflow1 VARCHAR (MAX) COLLATE Latin1_General_100_CS_AS NULL, + CodeOverflow2 VARCHAR (MAX) COLLATE Latin1_General_100_CS_AS NULL +); + +ALTER TABLE dbo.TokenTokenCompositeSearchParam + ADD CONSTRAINT CHK_TokenTokenCompositeSearchParam_CodeOverflow1 CHECK (LEN(Code1) = 256 + OR CodeOverflow1 IS NULL); + +ALTER TABLE dbo.TokenTokenCompositeSearchParam + ADD CONSTRAINT CHK_TokenTokenCompositeSearchParam_CodeOverflow2 CHECK (LEN(Code2) = 256 + OR CodeOverflow2 IS NULL); + +ALTER TABLE dbo.TokenTokenCompositeSearchParam SET (LOCK_ESCALATION = AUTO); + +CREATE CLUSTERED INDEX IXC_TokenTokenCompositeSearchParam + ON dbo.TokenTokenCompositeSearchParam(ResourceSurrogateId, SearchParamId) WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_Code1_Code2_INCLUDE_SystemId1_SystemId2 + ON dbo.TokenTokenCompositeSearchParam(SearchParamId, Code1, Code2) + INCLUDE(SystemId1, SystemId2) WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE TABLE dbo.Transactions ( + SurrogateIdRangeFirstValue BIGINT NOT NULL, + SurrogateIdRangeLastValue BIGINT NOT NULL, + Definition VARCHAR (2000) NULL, + IsCompleted BIT CONSTRAINT DF_Transactions_IsCompleted DEFAULT 0 NOT NULL, + IsSuccess BIT CONSTRAINT DF_Transactions_IsSuccess DEFAULT 0 NOT NULL, + IsVisible BIT CONSTRAINT DF_Transactions_IsVisible DEFAULT 0 NOT NULL, + IsHistoryMoved BIT CONSTRAINT DF_Transactions_IsHistoryMoved DEFAULT 0 NOT NULL, + CreateDate DATETIME CONSTRAINT DF_Transactions_CreateDate DEFAULT getUTCdate() NOT NULL, + EndDate DATETIME NULL, + VisibleDate DATETIME NULL, + HistoryMovedDate DATETIME NULL, + HeartbeatDate DATETIME CONSTRAINT DF_Transactions_HeartbeatDate DEFAULT getUTCdate() NOT NULL, + FailureReason VARCHAR (MAX) NULL, + IsControlledByClient BIT CONSTRAINT DF_Transactions_IsControlledByClient DEFAULT 1 NOT NULL, + InvisibleHistoryRemovedDate DATETIME NULL CONSTRAINT PKC_Transactions_SurrogateIdRangeFirstValue PRIMARY KEY CLUSTERED (SurrogateIdRangeFirstValue) +); + +CREATE INDEX IX_IsVisible + ON dbo.Transactions(IsVisible); + +CREATE TABLE dbo.UriSearchParam ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL, + SearchParamId SMALLINT NOT NULL, + Uri VARCHAR (256) COLLATE Latin1_General_100_CS_AS NOT NULL +); + +ALTER TABLE dbo.UriSearchParam SET (LOCK_ESCALATION = AUTO); + +CREATE CLUSTERED INDEX IXC_UriSearchParam + ON dbo.UriSearchParam(ResourceTypeId, ResourceSurrogateId, SearchParamId) WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE INDEX IX_SearchParamId_Uri + ON dbo.UriSearchParam(SearchParamId, Uri) WITH (DATA_COMPRESSION = PAGE) + ON PartitionScheme_ResourceTypeId (ResourceTypeId); + +CREATE TABLE dbo.WatchdogLeases ( + Watchdog VARCHAR (100) NOT NULL, + LeaseHolder VARCHAR (100) CONSTRAINT DF_WatchdogLeases_LeaseHolder DEFAULT '' NOT NULL, + LeaseEndTime DATETIME CONSTRAINT DF_WatchdogLeases_LeaseEndTime DEFAULT 0 NOT NULL, + RemainingLeaseTimeSec AS datediff(second, getUTCdate(), LeaseEndTime), + LeaseRequestor VARCHAR (100) CONSTRAINT DF_WatchdogLeases_LeaseRequestor DEFAULT '' NOT NULL, + LeaseRequestTime DATETIME CONSTRAINT DF_WatchdogLeases_LeaseRequestTime DEFAULT 0 NOT NULL CONSTRAINT PKC_WatchdogLeases_Watchdog PRIMARY KEY CLUSTERED (Watchdog) +); + +COMMIT +GO +CREATE PROCEDURE dbo.AcquireReindexJobs +@jobHeartbeatTimeoutThresholdInSeconds BIGINT, @maximumNumberOfConcurrentJobsAllowed INT +AS +SET NOCOUNT ON; +SET XACT_ABORT ON; +SET TRANSACTION ISOLATION LEVEL SERIALIZABLE; +BEGIN TRANSACTION; +DECLARE @expirationDateTime AS DATETIME2 (7); +SELECT @expirationDateTime = DATEADD(second, -@jobHeartbeatTimeoutThresholdInSeconds, SYSUTCDATETIME()); +DECLARE @numberOfRunningJobs AS INT; +SELECT @numberOfRunningJobs = COUNT(*) +FROM dbo.ReindexJob WITH (TABLOCKX) +WHERE Status = 'Running' + AND HeartbeatDateTime > @expirationDateTime; +DECLARE @limit AS INT = @maximumNumberOfConcurrentJobsAllowed - @numberOfRunningJobs; +IF (@limit > 0) + BEGIN + DECLARE @availableJobs TABLE ( + Id VARCHAR (64) COLLATE Latin1_General_100_CS_AS NOT NULL, + JobVersion BINARY (8) NOT NULL); + INSERT INTO @availableJobs + SELECT TOP (@limit) Id, + JobVersion + FROM dbo.ReindexJob + WHERE (Status = 'Queued' + OR (Status = 'Running' + AND HeartbeatDateTime <= @expirationDateTime)) + ORDER BY HeartbeatDateTime; + DECLARE @heartbeatDateTime AS DATETIME2 (7) = SYSUTCDATETIME(); + UPDATE dbo.ReindexJob + SET Status = 'Running', + HeartbeatDateTime = @heartbeatDateTime, + RawJobRecord = JSON_MODIFY(RawJobRecord, '$.status', 'Running') + OUTPUT inserted.RawJobRecord, inserted.JobVersion + FROM dbo.ReindexJob AS job + INNER JOIN + @availableJobs AS availableJob + ON job.Id = availableJob.Id + AND job.JobVersion = availableJob.JobVersion; + END +COMMIT TRANSACTION; + +GO +CREATE PROCEDURE dbo.AcquireWatchdogLease +@Watchdog VARCHAR (100), @Worker VARCHAR (100), @AllowRebalance BIT=1, @ForceAcquire BIT=0, @LeasePeriodSec FLOAT, @WorkerIsRunning BIT=0, @LeaseEndTime DATETIME OUTPUT, @IsAcquired BIT OUTPUT, @CurrentLeaseHolder VARCHAR (100)=NULL OUTPUT +AS +SET NOCOUNT ON; +SET XACT_ABORT ON; +DECLARE @SP AS VARCHAR (100) = 'AcquireWatchdogLease', @Mode AS VARCHAR (100), @msg AS VARCHAR (1000), @MyLeasesNumber AS INT, @OtherValidRequestsOrLeasesNumber AS INT, @MyValidRequestsOrLeasesNumber AS INT, @DesiredLeasesNumber AS INT, @NotLeasedWatchdogNumber AS INT, @WatchdogNumber AS INT, @Now AS DATETIME, @MyLastChangeTime AS DATETIME, @PreviousLeaseHolder AS VARCHAR (100), @Rows AS INT = 0, @NumberOfWorkers AS INT, @st AS DATETIME = getUTCdate(), @RowsInt AS INT, @Pattern AS VARCHAR (100); +BEGIN TRY + SET @Mode = 'R=' + isnull(@Watchdog, 'NULL') + ' W=' + isnull(@Worker, 'NULL') + ' F=' + isnull(CONVERT (VARCHAR, @ForceAcquire), 'NULL') + ' LP=' + isnull(CONVERT (VARCHAR, @LeasePeriodSec), 'NULL'); + SET @CurrentLeaseHolder = ''; + SET @IsAcquired = 0; + SET @Now = getUTCdate(); + SET @LeaseEndTime = @Now; + SET @Pattern = NULLIF ((SELECT Char + FROM dbo.Parameters + WHERE Id = 'WatchdogLeaseHolderIncludePatternFor' + @Watchdog), ''); + IF @Pattern IS NULL + SET @Pattern = NULLIF ((SELECT Char + FROM dbo.Parameters + WHERE Id = 'WatchdogLeaseHolderIncludePattern'), ''); + IF @Pattern IS NOT NULL + AND @Worker NOT LIKE @Pattern + BEGIN + SET @msg = 'Worker does not match include pattern=' + @Pattern; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @Rows, @Text = @msg; + SET @CurrentLeaseHolder = isnull((SELECT LeaseHolder + FROM dbo.WatchdogLeases + WHERE Watchdog = @Watchdog), ''); + RETURN; + END + SET @Pattern = NULLIF ((SELECT Char + FROM dbo.Parameters + WHERE Id = 'WatchdogLeaseHolderExcludePatternFor' + @Watchdog), ''); + IF @Pattern IS NULL + SET @Pattern = NULLIF ((SELECT Char + FROM dbo.Parameters + WHERE Id = 'WatchdogLeaseHolderExcludePattern'), ''); + IF @Pattern IS NOT NULL + AND @Worker LIKE @Pattern + BEGIN + SET @msg = 'Worker matches exclude pattern=' + @Pattern; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @Rows, @Text = @msg; + SET @CurrentLeaseHolder = isnull((SELECT LeaseHolder + FROM dbo.WatchdogLeases + WHERE Watchdog = @Watchdog), ''); + RETURN; + END + DECLARE @Watchdogs TABLE ( + Watchdog VARCHAR (100) PRIMARY KEY); + INSERT INTO @Watchdogs + SELECT Watchdog + FROM dbo.WatchdogLeases WITH (NOLOCK) + WHERE RemainingLeaseTimeSec * (-1) > 10 * @LeasePeriodSec + OR @ForceAcquire = 1 + AND Watchdog = @Watchdog + AND LeaseHolder <> @Worker; + IF @@rowcount > 0 + BEGIN + DELETE dbo.WatchdogLeases + WHERE Watchdog IN (SELECT Watchdog + FROM @Watchdogs); + SET @Rows += @@rowcount; + IF @Rows > 0 + BEGIN + SET @msg = ''; + SELECT @msg = CONVERT (VARCHAR (1000), @msg + CASE WHEN @msg = '' THEN '' ELSE ',' END + Watchdog) + FROM @Watchdogs; + SET @msg = CONVERT (VARCHAR (1000), 'Remove old/forced leases:' + @msg); + EXECUTE dbo.LogEvent @Process = 'AcquireWatchdogLease', @Status = 'Info', @Mode = @Mode, @Target = 'WatchdogLeases', @Action = 'Delete', @Rows = @Rows, @Text = @msg; + END + END + SET @NumberOfWorkers = 1 + (SELECT count(*) + FROM (SELECT LeaseHolder + FROM dbo.WatchdogLeases WITH (NOLOCK) + WHERE LeaseHolder <> @Worker + UNION + SELECT LeaseRequestor + FROM dbo.WatchdogLeases WITH (NOLOCK) + WHERE LeaseRequestor <> @Worker + AND LeaseRequestor <> '') AS A); + SET @Mode = CONVERT (VARCHAR (100), @Mode + ' N=' + CONVERT (VARCHAR (10), @NumberOfWorkers)); + IF NOT EXISTS (SELECT * + FROM dbo.WatchdogLeases WITH (NOLOCK) + WHERE Watchdog = @Watchdog) + INSERT INTO dbo.WatchdogLeases (Watchdog, LeaseEndTime, LeaseRequestTime) + SELECT @Watchdog, + dateadd(day, -10, @Now), + dateadd(day, -10, @Now) + WHERE NOT EXISTS (SELECT * + FROM dbo.WatchdogLeases WITH (TABLOCKX) + WHERE Watchdog = @Watchdog); + SET @LeaseEndTime = dateadd(second, @LeasePeriodSec, @Now); + SET @WatchdogNumber = (SELECT count(*) + FROM dbo.WatchdogLeases WITH (NOLOCK)); + SET @NotLeasedWatchdogNumber = (SELECT count(*) + FROM dbo.WatchdogLeases WITH (NOLOCK) + WHERE LeaseHolder = '' + OR LeaseEndTime < @Now); + SET @MyLeasesNumber = (SELECT count(*) + FROM dbo.WatchdogLeases WITH (NOLOCK) + WHERE LeaseHolder = @Worker + AND LeaseEndTime > @Now); + SET @OtherValidRequestsOrLeasesNumber = (SELECT count(*) + FROM dbo.WatchdogLeases WITH (NOLOCK) + WHERE LeaseHolder <> @Worker + AND LeaseEndTime > @Now + OR LeaseRequestor <> @Worker + AND datediff(second, LeaseRequestTime, @Now) < @LeasePeriodSec); + SET @MyValidRequestsOrLeasesNumber = (SELECT count(*) + FROM dbo.WatchdogLeases WITH (NOLOCK) + WHERE LeaseHolder = @Worker + AND LeaseEndTime > @Now + OR LeaseRequestor = @Worker + AND datediff(second, LeaseRequestTime, @Now) < @LeasePeriodSec); + SET @DesiredLeasesNumber = ceiling(1.0 * @WatchdogNumber / @NumberOfWorkers); + IF @DesiredLeasesNumber = 0 + SET @DesiredLeasesNumber = 1; + IF @DesiredLeasesNumber = 1 + AND @OtherValidRequestsOrLeasesNumber = 1 + AND @WatchdogNumber = 1 + SET @DesiredLeasesNumber = 0; + IF @MyValidRequestsOrLeasesNumber = floor(1.0 * @WatchdogNumber / @NumberOfWorkers) + AND @OtherValidRequestsOrLeasesNumber + @MyValidRequestsOrLeasesNumber = @WatchdogNumber + SET @DesiredLeasesNumber = @DesiredLeasesNumber - 1; + UPDATE dbo.WatchdogLeases + SET LeaseHolder = @Worker, + LeaseEndTime = @LeaseEndTime, + LeaseRequestor = '', + @PreviousLeaseHolder = LeaseHolder + WHERE Watchdog = @Watchdog + AND NOT (LeaseRequestor <> @Worker + AND datediff(second, LeaseRequestTime, @Now) < @LeasePeriodSec) + AND (LeaseHolder = @Worker + AND (LeaseEndTime > @Now + OR @WorkerIsRunning = 1) + OR LeaseEndTime < @Now + AND (@DesiredLeasesNumber > @MyLeasesNumber + OR @OtherValidRequestsOrLeasesNumber < @WatchdogNumber)); + IF @@rowcount > 0 + BEGIN + SET @IsAcquired = 1; + SET @msg = 'Lease holder changed from [' + isnull(@PreviousLeaseHolder, '') + '] to [' + @Worker + ']'; + IF @PreviousLeaseHolder <> @Worker + EXECUTE dbo.LogEvent @Process = 'AcquireWatchdogLease', @Status = 'Info', @Mode = @Mode, @Text = @msg; + END + ELSE + IF @AllowRebalance = 1 + BEGIN + SET @CurrentLeaseHolder = (SELECT LeaseHolder + FROM dbo.WatchdogLeases + WHERE Watchdog = @Watchdog); + UPDATE dbo.WatchdogLeases + SET LeaseRequestTime = @Now + WHERE Watchdog = @Watchdog + AND LeaseRequestor = @Worker + AND datediff(second, LeaseRequestTime, @Now) < @LeasePeriodSec; + IF @DesiredLeasesNumber > @MyValidRequestsOrLeasesNumber + BEGIN + UPDATE A + SET LeaseRequestor = @Worker, + LeaseRequestTime = @Now + FROM dbo.WatchdogLeases AS A + WHERE Watchdog = @Watchdog + AND NOT (LeaseRequestor <> @Worker + AND datediff(second, LeaseRequestTime, @Now) < @LeasePeriodSec) + AND @NotLeasedWatchdogNumber = 0 + AND (SELECT count(*) + FROM dbo.WatchdogLeases AS B + WHERE B.LeaseHolder = A.LeaseHolder + AND datediff(second, B.LeaseEndTime, @Now) < @LeasePeriodSec) > @DesiredLeasesNumber; + SET @RowsInt = @@rowcount; + SET @msg = '@DesiredLeasesNumber=[' + CONVERT (VARCHAR (10), @DesiredLeasesNumber) + '] > @MyValidRequestsOrLeasesNumber=[' + CONVERT (VARCHAR (10), @MyValidRequestsOrLeasesNumber) + ']'; + EXECUTE dbo.LogEvent @Process = 'AcquireWatchdogLease', @Status = 'Info', @Mode = @Mode, @Rows = @RowsInt, @Text = @msg; + END + END + SET @Mode = CONVERT (VARCHAR (100), @Mode + ' A=' + CONVERT (VARCHAR (1), @IsAcquired)); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @Rows; +END TRY +BEGIN CATCH + IF @@trancount > 0 + ROLLBACK; + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = 'AcquireWatchdogLease', @Status = 'Error', @Mode = @Mode; + THROW; +END CATCH + +GO +CREATE OR ALTER PROCEDURE dbo.AddPartitionOnResourceChanges +@partitionBoundary DATETIME2 (7) OUTPUT +AS +BEGIN + SET XACT_ABORT ON; + BEGIN TRANSACTION; + DECLARE @rightPartitionBoundary AS DATETIME2 (7) = CAST ((SELECT TOP (1) value + FROM sys.partition_range_values AS prv + INNER JOIN + sys.partition_functions AS pf + ON pf.function_id = prv.function_id + WHERE pf.name = N'PartitionFunction_ResourceChangeData_Timestamp' + ORDER BY prv.boundary_id DESC) AS DATETIME2 (7)); + DECLARE @timestamp AS DATETIME2 (7) = DATEADD(hour, DATEDIFF(hour, 0, sysutcdatetime()), 0); + IF (@rightPartitionBoundary < @timestamp) + BEGIN + SET @rightPartitionBoundary = @timestamp; + END + SET @rightPartitionBoundary = DATEADD(hour, 1, @rightPartitionBoundary); + ALTER PARTITION SCHEME PartitionScheme_ResourceChangeData_Timestamp NEXT USED [Primary]; + ALTER PARTITION FUNCTION PartitionFunction_ResourceChangeData_Timestamp( ) + SPLIT RANGE (@rightPartitionBoundary); + SET @partitionBoundary = @rightPartitionBoundary; + COMMIT TRANSACTION; +END + +GO +CREATE PROCEDURE dbo.ArchiveJobs +@QueueType TINYINT +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'ArchiveJobs', @Mode AS VARCHAR (100) = '', @st AS DATETIME = getUTCdate(), @Rows AS INT = 0, @PartitionId AS TINYINT, @MaxPartitions AS TINYINT = 16, @LookedAtPartitions AS TINYINT = 0, @InflightRows AS INT = 0, @Lock AS VARCHAR (100) = 'DequeueJob_' + CONVERT (VARCHAR, @QueueType); +BEGIN TRY + SET @PartitionId = @MaxPartitions * rand(); + BEGIN TRANSACTION; + EXECUTE sp_getapplock @Lock, 'Exclusive'; + WHILE @LookedAtPartitions <= @MaxPartitions + BEGIN + SET @InflightRows += (SELECT count(*) + FROM dbo.JobQueue + WHERE PartitionId = @PartitionId + AND QueueType = @QueueType + AND Status IN (0, 1)); + SET @PartitionId = CASE WHEN @PartitionId = 15 THEN 0 ELSE @PartitionId + 1 END; + SET @LookedAtPartitions = @LookedAtPartitions + 1; + END + IF @InflightRows = 0 + BEGIN + SET @LookedAtPartitions = 0; + WHILE @LookedAtPartitions <= @MaxPartitions + BEGIN + UPDATE dbo.JobQueue + SET Status = 5 + WHERE PartitionId = @PartitionId + AND QueueType = @QueueType + AND Status IN (2, 3, 4); + SET @Rows += @@rowcount; + SET @PartitionId = CASE WHEN @PartitionId = 15 THEN 0 ELSE @PartitionId + 1 END; + SET @LookedAtPartitions = @LookedAtPartitions + 1; + END + END + COMMIT TRANSACTION; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @Rows; +END TRY +BEGIN CATCH + IF @@trancount > 0 + ROLLBACK; + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error'; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.CaptureResourceChanges +@isDeleted BIT, @version INT, @resourceId VARCHAR (64), @resourceTypeId SMALLINT +AS +BEGIN + DECLARE @changeType AS SMALLINT; + IF (@isDeleted = 1) + BEGIN + SET @changeType = 2; + END + ELSE + BEGIN + IF (@version = 1) + BEGIN + SET @changeType = 0; + END + ELSE + BEGIN + SET @changeType = 1; + END + END + INSERT INTO dbo.ResourceChangeData (ResourceId, ResourceTypeId, ResourceVersion, ResourceChangeTypeId) + VALUES (@resourceId, @resourceTypeId, @version, @changeType); +END + +GO +CREATE PROCEDURE dbo.CaptureResourceIdsForChanges +@Resources dbo.ResourceList READONLY +AS +SET NOCOUNT ON; +INSERT INTO dbo.ResourceChangeData (ResourceId, ResourceTypeId, ResourceVersion, ResourceChangeTypeId) +SELECT ResourceId, + ResourceTypeId, + Version, + CASE WHEN IsDeleted = 1 THEN 2 WHEN Version > 1 THEN 1 ELSE 0 END +FROM @Resources +WHERE IsHistory = 0; + +GO +CREATE PROCEDURE dbo.CheckActiveReindexJobs +AS +SET NOCOUNT ON; +SELECT Id +FROM dbo.ReindexJob +WHERE Status = 'Running' + OR Status = 'Queued' + OR Status = 'Paused'; + +GO +CREATE PROCEDURE dbo.CleanupEventLog +WITH EXECUTE AS 'dbo' +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'CleanupEventLog', @Mode AS VARCHAR (100) = '', @MaxDeleteRows AS INT, @MaxAllowedRows AS BIGINT, @RetentionPeriodSecond AS INT, @DeletedRows AS INT, @TotalDeletedRows AS INT = 0, @TotalRows AS INT, @Now AS DATETIME = getUTCdate(); +EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Start'; +BEGIN TRY + SET @MaxDeleteRows = (SELECT Number + FROM dbo.Parameters + WHERE Id = 'CleanupEventLog.DeleteBatchSize'); + IF @MaxDeleteRows IS NULL + RAISERROR ('Cannot get Parameter.CleanupEventLog.DeleteBatchSize', 18, 127); + SET @MaxAllowedRows = (SELECT Number + FROM dbo.Parameters + WHERE Id = 'CleanupEventLog.AllowedRows'); + IF @MaxAllowedRows IS NULL + RAISERROR ('Cannot get Parameter.CleanupEventLog.AllowedRows', 18, 127); + SET @RetentionPeriodSecond = (SELECT Number * 24 * 60 * 60 + FROM dbo.Parameters + WHERE Id = 'CleanupEventLog.RetentionPeriodDay'); + IF @RetentionPeriodSecond IS NULL + RAISERROR ('Cannot get Parameter.CleanupEventLog.RetentionPeriodDay', 18, 127); + SET @TotalRows = (SELECT sum(row_count) + FROM sys.dm_db_partition_stats + WHERE object_id = object_id('EventLog') + AND index_id IN (0, 1)); + SET @DeletedRows = 1; + WHILE @DeletedRows > 0 + AND EXISTS (SELECT * + FROM dbo.Parameters + WHERE Id = 'CleanupEventLog.IsEnabled' + AND Number = 1) + BEGIN + SET @DeletedRows = 0; + IF @TotalRows - @TotalDeletedRows > @MaxAllowedRows + BEGIN + DELETE TOP (@MaxDeleteRows) + dbo.EventLog WITH (PAGLOCK) + WHERE EventDate <= dateadd(second, -@RetentionPeriodSecond, @Now); + SET @DeletedRows = @@rowcount; + SET @TotalDeletedRows += @DeletedRows; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Run', @Target = 'EventLog', @Action = 'Delete', @Rows = @DeletedRows, @Text = @TotalDeletedRows; + END + END + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @Now; +END TRY +BEGIN CATCH + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error'; + THROW; +END CATCH + +GO +CREATE OR ALTER PROCEDURE dbo.ConfigurePartitionOnResourceChanges +@numberOfFuturePartitionsToAdd INT +AS +BEGIN + SET XACT_ABORT ON; + BEGIN TRANSACTION; + DECLARE @partitionBoundary AS DATETIME2 (7) = DATEADD(hour, DATEDIFF(hour, 0, sysutcdatetime()), 0); + DECLARE @startingRightPartitionBoundary AS DATETIME2 (7) = CAST ((SELECT TOP (1) value + FROM sys.partition_range_values AS prv + INNER JOIN + sys.partition_functions AS pf + ON pf.function_id = prv.function_id + WHERE pf.name = N'PartitionFunction_ResourceChangeData_Timestamp' + ORDER BY prv.boundary_id DESC) AS DATETIME2 (7)); + DECLARE @numberOfPartitionsToAdd AS INT = @numberOfFuturePartitionsToAdd + 1; + WHILE @numberOfPartitionsToAdd > 0 + BEGIN + IF (@startingRightPartitionBoundary < @partitionBoundary) + BEGIN + ALTER PARTITION SCHEME PartitionScheme_ResourceChangeData_Timestamp NEXT USED [PRIMARY]; + ALTER PARTITION FUNCTION PartitionFunction_ResourceChangeData_Timestamp( ) + SPLIT RANGE (@partitionBoundary); + END + SET @partitionBoundary = DATEADD(hour, 1, @partitionBoundary); + SET @numberOfPartitionsToAdd -= 1; + END + COMMIT TRANSACTION; +END + +GO +CREATE PROCEDURE dbo.CreateReindexJob +@id VARCHAR (64), @status VARCHAR (10), @rawJobRecord VARCHAR (MAX) +AS +SET NOCOUNT ON; +SET XACT_ABORT ON; +BEGIN TRANSACTION; +DECLARE @heartbeatDateTime AS DATETIME2 (7) = SYSUTCDATETIME(); +INSERT INTO dbo.ReindexJob (Id, Status, HeartbeatDateTime, RawJobRecord) +VALUES (@id, @status, @heartbeatDateTime, @rawJobRecord); +SELECT CAST (MIN_ACTIVE_ROWVERSION() AS INT); +COMMIT TRANSACTION; + +GO +CREATE PROCEDURE dbo.CreateResourceSearchParamStats +@Table VARCHAR (100), @Column VARCHAR (100), @ResourceTypeId SMALLINT, @SearchParamId SMALLINT +WITH EXECUTE AS 'dbo' +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = object_name(@@procid), @Mode AS VARCHAR (200) = 'T=' + isnull(@Table, 'NULL') + ' C=' + isnull(@Column, 'NULL') + ' RT=' + isnull(CONVERT (VARCHAR, @ResourceTypeId), 'NULL') + ' SP=' + isnull(CONVERT (VARCHAR, @SearchParamId), 'NULL'), @st AS DATETIME = getUTCdate(); +BEGIN TRY + IF @Table IS NULL + OR @Column IS NULL + OR @ResourceTypeId IS NULL + OR @SearchParamId IS NULL + RAISERROR ('@TableName IS NULL OR @KeyColumn IS NULL OR @ResourceTypeId IS NULL OR @SearchParamId IS NULL', 18, 127); + EXECUTE ('CREATE STATISTICS ST_' + @Column + '_WHERE_ResourceTypeId_' + @ResourceTypeId + '_SearchParamId_' + @SearchParamId + ' ON dbo.' + @Table + ' (' + @Column + ') WHERE ResourceTypeId = ' + @ResourceTypeId + ' AND SearchParamId = ' + @SearchParamId); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Text = 'Stats created'; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + IF error_number() = 1927 + BEGIN + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st; + RETURN; + END + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error', @Start = @st; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.Defrag +@TableName VARCHAR (100), @IndexName VARCHAR (200), @PartitionNumber INT, @IsPartitioned BIT +WITH EXECUTE AS 'dbo' +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'Defrag', @Mode AS VARCHAR (200) = @TableName + '.' + @IndexName + '.' + CONVERT (VARCHAR, @PartitionNumber) + '.' + CONVERT (VARCHAR, @IsPartitioned), @st AS DATETIME = getUTCdate(), @SQL AS VARCHAR (3500), @msg AS VARCHAR (1000), @SizeBefore AS FLOAT, @SizeAfter AS FLOAT, @IndexId AS INT; +BEGIN TRY + SET @IndexId = (SELECT index_id + FROM sys.indexes + WHERE object_id = object_id(@TableName) + AND name = @IndexName); + SET @SizeBefore = (SELECT sum(reserved_page_count) + FROM sys.dm_db_partition_stats + WHERE object_id = object_id(@TableName) + AND index_id = @IndexId) * 8.0 / 1024 / 1024; + SET @msg = 'Size[GB] before=' + CONVERT (VARCHAR, @SizeBefore); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Start', @Text = @msg; + SET @Sql = 'ALTER INDEX ' + quotename(@IndexName) + ' ON dbo.' + quotename(@TableName) + ' REORGANIZE' + CASE WHEN @IsPartitioned = 1 THEN ' PARTITION = ' + CONVERT (VARCHAR, @PartitionNumber) ELSE '' END; + BEGIN TRY + EXECUTE (@Sql); + SET @SizeAfter = (SELECT sum(reserved_page_count) + FROM sys.dm_db_partition_stats + WHERE object_id = object_id(@TableName) + AND index_id = @IndexId) * 8.0 / 1024 / 1024; + SET @msg = 'Size[GB] before=' + CONVERT (VARCHAR, @SizeBefore) + ', after=' + CONVERT (VARCHAR, @SizeAfter) + ', reduced by=' + CONVERT (VARCHAR, @SizeBefore - @SizeAfter); + EXECUTE dbo.LogEvent @Process = @SP, @Status = 'End', @Mode = @Mode, @Action = 'Reorganize', @Start = @st, @Text = @msg; + END TRY + BEGIN CATCH + EXECUTE dbo.LogEvent @Process = @SP, @Status = 'Error', @Mode = @Mode, @Action = 'Reorganize', @Start = @st, @ReRaisError = 0; + END CATCH +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error'; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.DefragChangeDatabaseSettings +@IsOn BIT +WITH EXECUTE AS 'dbo' +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'DefragChangeDatabaseSettings', @Mode AS VARCHAR (200) = 'On=' + CONVERT (VARCHAR, @IsOn), @st AS DATETIME = getUTCdate(), @SQL AS VARCHAR (3500); +BEGIN TRY + EXECUTE dbo.LogEvent @Process = @SP, @Status = 'Start', @Mode = @Mode; + SET @SQL = 'ALTER DATABASE CURRENT SET AUTO_UPDATE_STATISTICS ' + CASE WHEN @IsOn = 1 THEN 'ON' ELSE 'OFF' END; + EXECUTE (@SQL); + EXECUTE dbo.LogEvent @Process = @SP, @Status = 'Run', @Mode = @Mode, @Text = @SQL; + SET @SQL = 'ALTER DATABASE CURRENT SET AUTO_CREATE_STATISTICS ' + CASE WHEN @IsOn = 1 THEN 'ON' ELSE 'OFF' END; + EXECUTE (@SQL); + EXECUTE dbo.LogEvent @Process = @SP, @Status = 'End', @Mode = @Mode, @Start = @st, @Text = @SQL; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error'; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.DeleteHistory +@DeleteResources BIT=0, @Reset BIT=0, @DisableLogEvent BIT=0 +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'DeleteHistory', @Mode AS VARCHAR (100) = 'D=' + isnull(CONVERT (VARCHAR, @DeleteResources), 'NULL') + ' R=' + isnull(CONVERT (VARCHAR, @Reset), 'NULL'), @st AS DATETIME = getUTCdate(), @Id AS VARCHAR (100) = 'DeleteHistory.LastProcessed.TypeId.SurrogateId', @ResourceTypeId AS SMALLINT, @SurrogateId AS BIGINT, @RowsToProcess AS INT, @ProcessedResources AS INT = 0, @DeletedResources AS INT = 0, @DeletedSearchParams AS INT = 0, @ReportDate AS DATETIME = getUTCdate(); +BEGIN TRY + IF @DisableLogEvent = 0 + INSERT INTO dbo.Parameters (Id, Char) + SELECT @SP, + 'LogEvent'; + ELSE + DELETE dbo.Parameters + WHERE Id = @SP; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Start'; + INSERT INTO dbo.Parameters (Id, Char) + SELECT @Id, + '0.0' + WHERE NOT EXISTS (SELECT * + FROM dbo.Parameters + WHERE Id = @Id); + DECLARE @LastProcessed AS VARCHAR (100) = CASE WHEN @Reset = 0 THEN (SELECT Char + FROM dbo.Parameters + WHERE Id = @Id) ELSE '0.0' END; + DECLARE @Types TABLE ( + ResourceTypeId SMALLINT PRIMARY KEY, + Name VARCHAR (100)); + DECLARE @SurrogateIds TABLE ( + ResourceSurrogateId BIGINT PRIMARY KEY, + IsHistory BIT ); + INSERT INTO @Types + EXECUTE dbo.GetUsedResourceTypes ; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Run', @Target = '@Types', @Action = 'Insert', @Rows = @@rowcount; + SET @ResourceTypeId = substring(@LastProcessed, 1, charindex('.', @LastProcessed) - 1); + SET @SurrogateId = substring(@LastProcessed, charindex('.', @LastProcessed) + 1, 255); + DELETE @Types + WHERE ResourceTypeId < @ResourceTypeId; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Run', @Target = '@Types', @Action = 'Delete', @Rows = @@rowcount; + WHILE EXISTS (SELECT * + FROM @Types) + BEGIN + SET @ResourceTypeId = (SELECT TOP 1 ResourceTypeId + FROM @Types + ORDER BY ResourceTypeId); + SET @ProcessedResources = 0; + SET @DeletedResources = 0; + SET @DeletedSearchParams = 0; + SET @RowsToProcess = 1; + WHILE @RowsToProcess > 0 + BEGIN + DELETE @SurrogateIds; + INSERT INTO @SurrogateIds + SELECT TOP 10000 ResourceSurrogateId, + IsHistory + FROM dbo.Resource + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceSurrogateId > @SurrogateId + ORDER BY ResourceSurrogateId; + SET @RowsToProcess = @@rowcount; + SET @ProcessedResources += @RowsToProcess; + IF @RowsToProcess > 0 + SET @SurrogateId = (SELECT max(ResourceSurrogateId) + FROM @SurrogateIds); + SET @LastProcessed = CONVERT (VARCHAR, @ResourceTypeId) + '.' + CONVERT (VARCHAR, @SurrogateId); + DELETE @SurrogateIds + WHERE IsHistory = 0; + IF EXISTS (SELECT * + FROM @SurrogateIds) + BEGIN + DELETE dbo.ResourceWriteClaim + WHERE ResourceSurrogateId IN (SELECT ResourceSurrogateId + FROM @SurrogateIds); + SET @DeletedSearchParams += @@rowcount; + DELETE dbo.CompartmentAssignment + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceSurrogateId IN (SELECT ResourceSurrogateId + FROM @SurrogateIds); + SET @DeletedSearchParams += @@rowcount; + DELETE dbo.ReferenceSearchParam + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceSurrogateId IN (SELECT ResourceSurrogateId + FROM @SurrogateIds); + SET @DeletedSearchParams += @@rowcount; + DELETE dbo.TokenSearchParam + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceSurrogateId IN (SELECT ResourceSurrogateId + FROM @SurrogateIds); + SET @DeletedSearchParams += @@rowcount; + DELETE dbo.TokenText + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceSurrogateId IN (SELECT ResourceSurrogateId + FROM @SurrogateIds); + SET @DeletedSearchParams += @@rowcount; + DELETE dbo.StringSearchParam + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceSurrogateId IN (SELECT ResourceSurrogateId + FROM @SurrogateIds); + SET @DeletedSearchParams += @@rowcount; + DELETE dbo.UriSearchParam + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceSurrogateId IN (SELECT ResourceSurrogateId + FROM @SurrogateIds); + SET @DeletedSearchParams += @@rowcount; + DELETE dbo.NumberSearchParam + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceSurrogateId IN (SELECT ResourceSurrogateId + FROM @SurrogateIds); + SET @DeletedSearchParams += @@rowcount; + DELETE dbo.QuantitySearchParam + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceSurrogateId IN (SELECT ResourceSurrogateId + FROM @SurrogateIds); + SET @DeletedSearchParams += @@rowcount; + DELETE dbo.DateTimeSearchParam + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceSurrogateId IN (SELECT ResourceSurrogateId + FROM @SurrogateIds); + SET @DeletedSearchParams += @@rowcount; + DELETE dbo.ReferenceTokenCompositeSearchParam + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceSurrogateId IN (SELECT ResourceSurrogateId + FROM @SurrogateIds); + SET @DeletedSearchParams += @@rowcount; + DELETE dbo.TokenTokenCompositeSearchParam + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceSurrogateId IN (SELECT ResourceSurrogateId + FROM @SurrogateIds); + SET @DeletedSearchParams += @@rowcount; + DELETE dbo.TokenDateTimeCompositeSearchParam + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceSurrogateId IN (SELECT ResourceSurrogateId + FROM @SurrogateIds); + SET @DeletedSearchParams += @@rowcount; + DELETE dbo.TokenQuantityCompositeSearchParam + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceSurrogateId IN (SELECT ResourceSurrogateId + FROM @SurrogateIds); + SET @DeletedSearchParams += @@rowcount; + DELETE dbo.TokenStringCompositeSearchParam + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceSurrogateId IN (SELECT ResourceSurrogateId + FROM @SurrogateIds); + SET @DeletedSearchParams += @@rowcount; + DELETE dbo.TokenNumberNumberCompositeSearchParam + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceSurrogateId IN (SELECT ResourceSurrogateId + FROM @SurrogateIds); + SET @DeletedSearchParams += @@rowcount; + IF @DeleteResources = 1 + BEGIN + DELETE dbo.Resource + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceSurrogateId IN (SELECT ResourceSurrogateId + FROM @SurrogateIds); + SET @DeletedResources += @@rowcount; + END + END + UPDATE dbo.Parameters + SET Char = @LastProcessed + WHERE Id = @Id; + IF datediff(second, @ReportDate, getUTCdate()) > 60 + BEGIN + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Run', @Target = 'Resource', @Action = 'Select', @Rows = @ProcessedResources, @Text = @LastProcessed; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Run', @Target = '*SearchParam', @Action = 'Delete', @Rows = @DeletedSearchParams, @Text = @LastProcessed; + IF @DeleteResources = 1 + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Run', @Target = 'Resource', @Action = 'Delete', @Rows = @DeletedResources, @Text = @LastProcessed; + SET @ReportDate = getUTCdate(); + SET @ProcessedResources = 0; + SET @DeletedSearchParams = 0; + SET @DeletedResources = 0; + END + END + DELETE @Types + WHERE ResourceTypeId = @ResourceTypeId; + SET @SurrogateId = 0; + END + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Run', @Target = 'Resource', @Action = 'Select', @Rows = @ProcessedResources, @Text = @LastProcessed; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Run', @Target = '*SearchParam', @Action = 'Delete', @Rows = @DeletedSearchParams, @Text = @LastProcessed; + IF @DeleteResources = 1 + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Run', @Target = 'Resource', @Action = 'Delete', @Rows = @DeletedResources, @Text = @LastProcessed; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error'; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.DequeueJob +@QueueType TINYINT, @Worker VARCHAR (100), @HeartbeatTimeoutSec INT, @InputJobId BIGINT=NULL, @CheckTimeoutJobs BIT=0 +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'DequeueJob', @Mode AS VARCHAR (100) = 'Q=' + isnull(CONVERT (VARCHAR, @QueueType), 'NULL') + ' H=' + isnull(CONVERT (VARCHAR, @HeartbeatTimeoutSec), 'NULL') + ' W=' + isnull(@Worker, 'NULL') + ' IJ=' + isnull(CONVERT (VARCHAR, @InputJobId), 'NULL') + ' T=' + isnull(CONVERT (VARCHAR, @CheckTimeoutJobs), 'NULL'), @Rows AS INT = 0, @st AS DATETIME = getUTCdate(), @JobId AS BIGINT, @msg AS VARCHAR (100), @Lock AS VARCHAR (100), @PartitionId AS TINYINT, @MaxPartitions AS TINYINT = 16, @LookedAtPartitions AS TINYINT = 0; +BEGIN TRY + IF EXISTS (SELECT * + FROM dbo.Parameters + WHERE Id = 'DequeueJobStop' + AND Number = 1) + BEGIN + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = 0, @Text = 'Skipped'; + RETURN; + END + IF @InputJobId IS NULL + SET @PartitionId = @MaxPartitions * rand(); + ELSE + SET @PartitionId = @InputJobId % 16; + SET TRANSACTION ISOLATION LEVEL READ COMMITTED; + WHILE @InputJobId IS NULL + AND @JobId IS NULL + AND @LookedAtPartitions < @MaxPartitions + AND @CheckTimeoutJobs = 0 + BEGIN + SET @Lock = 'DequeueJob_' + CONVERT (VARCHAR, @QueueType) + '_' + CONVERT (VARCHAR, @PartitionId); + BEGIN TRANSACTION; + EXECUTE sp_getapplock @Lock, 'Exclusive'; + UPDATE T + SET StartDate = getUTCdate(), + HeartbeatDate = getUTCdate(), + Worker = @Worker, + Status = 1, + Version = datediff_big(millisecond, '0001-01-01', getUTCdate()), + @JobId = T.JobId + FROM dbo.JobQueue AS T WITH (PAGLOCK) + INNER JOIN + (SELECT TOP 1 JobId + FROM dbo.JobQueue WITH (INDEX (IX_QueueType_PartitionId_Status_Priority)) + WHERE QueueType = @QueueType + AND PartitionId = @PartitionId + AND Status = 0 + ORDER BY Priority, JobId) AS S + ON QueueType = @QueueType + AND PartitionId = @PartitionId + AND T.JobId = S.JobId; + SET @Rows += @@rowcount; + COMMIT TRANSACTION; + IF @JobId IS NULL + BEGIN + SET @PartitionId = CASE WHEN @PartitionId = 15 THEN 0 ELSE @PartitionId + 1 END; + SET @LookedAtPartitions = @LookedAtPartitions + 1; + END + END + SET @LookedAtPartitions = 0; + WHILE @InputJobId IS NULL + AND @JobId IS NULL + AND @LookedAtPartitions < @MaxPartitions + BEGIN + SET @Lock = 'DequeueStoreCopyWorkUnit_' + CONVERT (VARCHAR, @PartitionId); + BEGIN TRANSACTION; + EXECUTE sp_getapplock @Lock, 'Exclusive'; + UPDATE T + SET StartDate = getUTCdate(), + HeartbeatDate = getUTCdate(), + Worker = @Worker, + Status = CASE WHEN CancelRequested = 0 THEN 1 ELSE 4 END, + Version = datediff_big(millisecond, '0001-01-01', getUTCdate()), + @JobId = CASE WHEN CancelRequested = 0 THEN T.JobId END, + Info = CONVERT (VARCHAR (1000), isnull(Info, '') + ' Prev: Worker=' + Worker + ' Start=' + CONVERT (VARCHAR, StartDate, 121)) + FROM dbo.JobQueue AS T WITH (PAGLOCK) + INNER JOIN + (SELECT TOP 1 JobId + FROM dbo.JobQueue WITH (INDEX (IX_QueueType_PartitionId_Status_Priority)) + WHERE QueueType = @QueueType + AND PartitionId = @PartitionId + AND Status = 1 + AND datediff(second, HeartbeatDate, getUTCdate()) > @HeartbeatTimeoutSec + ORDER BY Priority, JobId) AS S + ON QueueType = @QueueType + AND PartitionId = @PartitionId + AND T.JobId = S.JobId; + SET @Rows += @@rowcount; + COMMIT TRANSACTION; + IF @JobId IS NULL + BEGIN + SET @PartitionId = CASE WHEN @PartitionId = 15 THEN 0 ELSE @PartitionId + 1 END; + SET @LookedAtPartitions = @LookedAtPartitions + 1; + END + END + IF @InputJobId IS NOT NULL + BEGIN + UPDATE dbo.JobQueue WITH (PAGLOCK) + SET StartDate = getUTCdate(), + HeartbeatDate = getUTCdate(), + Worker = @Worker, + Status = 1, + Version = datediff_big(millisecond, '0001-01-01', getUTCdate()), + @JobId = JobId + WHERE QueueType = @QueueType + AND PartitionId = @PartitionId + AND Status = 0 + AND JobId = @InputJobId; + SET @Rows += @@rowcount; + IF @JobId IS NULL + BEGIN + UPDATE dbo.JobQueue WITH (PAGLOCK) + SET StartDate = getUTCdate(), + HeartbeatDate = getUTCdate(), + Worker = @Worker, + Status = 1, + Version = datediff_big(millisecond, '0001-01-01', getUTCdate()), + @JobId = JobId + WHERE QueueType = @QueueType + AND PartitionId = @PartitionId + AND Status = 1 + AND JobId = @InputJobId + AND datediff(second, HeartbeatDate, getUTCdate()) > @HeartbeatTimeoutSec; + SET @Rows += @@rowcount; + END + END + IF @JobId IS NOT NULL + EXECUTE dbo.GetJobs @QueueType = @QueueType, @JobId = @JobId; + SET @msg = 'J=' + isnull(CONVERT (VARCHAR, @JobId), 'NULL') + ' P=' + CONVERT (VARCHAR, @PartitionId); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @Rows, @Text = @msg; +END TRY +BEGIN CATCH + IF @@trancount > 0 + ROLLBACK; + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error'; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.DisableIndex +@tableName NVARCHAR (128), @indexName NVARCHAR (128) +WITH EXECUTE AS 'dbo' +AS +DECLARE @errorTxt AS VARCHAR (1000), @sql AS NVARCHAR (1000), @isDisabled AS BIT; +IF object_id(@tableName) IS NULL + BEGIN + SET @errorTxt = @tableName + ' does not exist or you don''t have permissions.'; + RAISERROR (@errorTxt, 18, 127); + END +SET @isDisabled = (SELECT is_disabled + FROM sys.indexes + WHERE object_id = object_id(@tableName) + AND name = @indexName); +IF @isDisabled IS NULL + BEGIN + SET @errorTxt = @indexName + ' does not exist or you don''t have permissions.'; + RAISERROR (@errorTxt, 18, 127); + END +IF @isDisabled = 0 + BEGIN + SET @sql = N'ALTER INDEX ' + QUOTENAME(@indexName) + N' on ' + @tableName + ' Disable'; + EXECUTE sp_executesql @sql; + END + +GO +CREATE PROCEDURE dbo.DisableIndexes +WITH EXECUTE AS 'dbo' +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'DisableIndexes', @Mode AS VARCHAR (200) = '', @st AS DATETIME = getUTCdate(), @Tbl AS VARCHAR (100), @Ind AS VARCHAR (200), @Txt AS VARCHAR (4000); +BEGIN TRY + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Start'; + DECLARE @Tables TABLE ( + Tbl VARCHAR (100) PRIMARY KEY, + Supported BIT ); + INSERT INTO @Tables + EXECUTE dbo.GetPartitionedTables @IncludeNotDisabled = 1, @IncludeNotSupported = 0; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = '@Tables', @Action = 'Insert', @Rows = @@rowcount; + DECLARE @Indexes TABLE ( + Tbl VARCHAR (100), + Ind VARCHAR (200), + TblId INT , + IndId INT PRIMARY KEY (Tbl, Ind)); + INSERT INTO @Indexes + SELECT Tbl, + I.Name, + TblId, + I.index_id + FROM (SELECT object_id(Tbl) AS TblId, + Tbl + FROM @Tables) AS O + INNER JOIN + sys.indexes AS I + ON I.object_id = TblId; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = '@Indexes', @Action = 'Insert', @Rows = @@rowcount; + INSERT INTO dbo.IndexProperties (TableName, IndexName, PropertyName, PropertyValue) + SELECT Tbl, + Ind, + 'DATA_COMPRESSION', + data_comp + FROM (SELECT Tbl, + Ind, + isnull((SELECT TOP 1 CASE WHEN data_compression_desc = 'PAGE' THEN 'PAGE' END + FROM sys.partitions + WHERE object_id = TblId + AND index_id = IndId), 'NONE') AS data_comp + FROM @Indexes) AS A + WHERE NOT EXISTS (SELECT * + FROM dbo.IndexProperties + WHERE TableName = Tbl + AND IndexName = Ind); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = 'IndexProperties', @Action = 'Insert', @Rows = @@rowcount; + DELETE @Indexes + WHERE Tbl = 'Resource' + OR IndId = 1; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = '@Indexes', @Action = 'Delete', @Rows = @@rowcount; + WHILE EXISTS (SELECT * + FROM @Indexes) + BEGIN + SELECT TOP 1 @Tbl = Tbl, + @Ind = Ind + FROM @Indexes; + SET @Txt = 'ALTER INDEX ' + @Ind + ' ON dbo.' + @Tbl + ' DISABLE'; + EXECUTE (@Txt); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = @Ind, @Action = 'Disable', @Text = @Txt; + DELETE @Indexes + WHERE Tbl = @Tbl + AND Ind = @Ind; + END + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error', @Start = @st; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.EnqueueJobs +@QueueType TINYINT, @Definitions StringList READONLY, @GroupId BIGINT=NULL, @ForceOneActiveJobGroup BIT=1, @IsCompleted BIT=NULL, @ReturnJobs BIT=1 +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'EnqueueJobs', @Mode AS VARCHAR (100) = 'Q=' + isnull(CONVERT (VARCHAR, @QueueType), 'NULL') + ' D=' + CONVERT (VARCHAR, (SELECT count(*) + FROM @Definitions)) + ' G=' + isnull(CONVERT (VARCHAR, @GroupId), 'NULL') + ' F=' + isnull(CONVERT (VARCHAR, @ForceOneActiveJobGroup), 'NULL') + ' C=' + isnull(CONVERT (VARCHAR, @IsCompleted), 'NULL'), @st AS DATETIME = getUTCdate(), @Lock AS VARCHAR (100) = 'EnqueueJobs_' + CONVERT (VARCHAR, @QueueType), @MaxJobId AS BIGINT, @Rows AS INT, @msg AS VARCHAR (1000), @JobIds AS BigintList, @InputRows AS INT; +BEGIN TRY + DECLARE @Input TABLE ( + DefinitionHash VARBINARY (20) PRIMARY KEY, + Definition VARCHAR (MAX) ); + INSERT INTO @Input + SELECT hashbytes('SHA1', String) AS DefinitionHash, + String AS Definition + FROM @Definitions; + SET @InputRows = @@rowcount; + INSERT INTO @JobIds + SELECT JobId + FROM @Input AS A + INNER JOIN + dbo.JobQueue AS B + ON B.QueueType = @QueueType + AND B.DefinitionHash = A.DefinitionHash + AND B.Status <> 5; + IF @@rowcount < @InputRows + BEGIN + BEGIN TRANSACTION; + EXECUTE sp_getapplock @Lock, 'Exclusive'; + IF @ForceOneActiveJobGroup = 1 + AND EXISTS (SELECT * + FROM dbo.JobQueue + WHERE QueueType = @QueueType + AND Status IN (0, 1) + AND (@GroupId IS NULL + OR GroupId <> @GroupId)) + RAISERROR ('There are other active job groups', 18, 127); + SET @MaxJobId = isnull((SELECT TOP 1 JobId + FROM dbo.JobQueue + WHERE QueueType = @QueueType + ORDER BY JobId DESC), 0); + INSERT INTO dbo.JobQueue (QueueType, GroupId, JobId, Definition, DefinitionHash, Status) + OUTPUT inserted.JobId INTO @JobIds + SELECT @QueueType, + isnull(@GroupId, @MaxJobId + 1) AS GroupId, + JobId, + Definition, + DefinitionHash, + CASE WHEN @IsCompleted = 1 THEN 2 ELSE 0 END AS Status + FROM (SELECT @MaxJobId + row_number() OVER (ORDER BY Dummy) AS JobId, + * + FROM (SELECT *, + 0 AS Dummy + FROM @Input) AS A) AS A + WHERE NOT EXISTS (SELECT * + FROM dbo.JobQueue AS B WITH (INDEX (IX_QueueType_DefinitionHash)) + WHERE B.QueueType = @QueueType + AND B.DefinitionHash = A.DefinitionHash + AND B.Status <> 5); + SET @Rows = @@rowcount; + COMMIT TRANSACTION; + END + IF @ReturnJobs = 1 + EXECUTE dbo.GetJobs @QueueType = @QueueType, @JobIds = @JobIds; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @Rows; +END TRY +BEGIN CATCH + IF @@trancount > 0 + ROLLBACK; + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error'; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.ExecuteCommandForRebuildIndexes +@Tbl VARCHAR (100), @Ind VARCHAR (1000), @Cmd VARCHAR (MAX) +WITH EXECUTE AS 'dbo' +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'ExecuteCommandForRebuildIndexes', @Mode AS VARCHAR (200) = 'Tbl=' + isnull(@Tbl, 'NULL'), @st AS DATETIME, @Retries AS INT = 0, @Action AS VARCHAR (100), @msg AS VARCHAR (1000); +RetryOnTempdbError: +BEGIN TRY + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Start', @Text = @Cmd; + SET @st = getUTCdate(); + IF @Tbl IS NULL + RAISERROR ('@Tbl IS NULL', 18, 127); + IF @Cmd IS NULL + RAISERROR ('@Cmd IS NULL', 18, 127); + SET @Action = CASE WHEN @Cmd LIKE 'UPDATE STAT%' THEN 'Update statistics' WHEN @Cmd LIKE 'CREATE%INDEX%' THEN 'Create Index' WHEN @Cmd LIKE 'ALTER%INDEX%REBUILD%' THEN 'Rebuild Index' WHEN @Cmd LIKE 'ALTER%TABLE%ADD%' THEN 'Add Constraint' END; + IF @Action IS NULL + BEGIN + SET @msg = 'Not supported command = ' + CONVERT (VARCHAR (900), @Cmd); + RAISERROR (@msg, 18, 127); + END + IF @Action = 'Create Index' + WAITFOR DELAY '00:00:05'; + EXECUTE (@Cmd); + SELECT @Ind; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Action = @Action, @Status = 'End', @Start = @st, @Text = @Cmd; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + IF error_number() = 40544 + BEGIN + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error', @Start = @st, @Retry = @Retries; + SET @Retries = @Retries + 1; + IF @Tbl = 'TokenText_96' + WAITFOR DELAY '01:00:00'; + ELSE + WAITFOR DELAY '00:10:00'; + GOTO RetryOnTempdbError; + END + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error', @Start = @st; + THROW; +END CATCH + +GO +CREATE OR ALTER PROCEDURE dbo.FetchEventAgentCheckpoint +@CheckpointId VARCHAR (64) +AS +BEGIN + SELECT TOP (1) CheckpointId, + LastProcessedDateTime, + LastProcessedIdentifier + FROM dbo.EventAgentCheckpoint + WHERE CheckpointId = @CheckpointId; +END + +GO +CREATE PROCEDURE dbo.FetchResourceChanges_3 +@startId BIGINT, @lastProcessedUtcDateTime DATETIME2 (7), @pageSize SMALLINT +AS +BEGIN + SET NOCOUNT ON; + DECLARE @precedingPartitionBoundary AS DATETIME2 (7) = (SELECT TOP (1) CAST (prv.value AS DATETIME2 (7)) AS value + FROM sys.partition_range_values AS prv WITH (NOLOCK) + INNER JOIN + sys.partition_functions AS pf WITH (NOLOCK) + ON pf.function_id = prv.function_id + WHERE pf.name = N'PartitionFunction_ResourceChangeData_Timestamp' + AND SQL_VARIANT_PROPERTY(prv.Value, 'BaseType') = 'datetime2' + AND CAST (prv.value AS DATETIME2 (7)) < DATEADD(HOUR, DATEDIFF(HOUR, 0, @lastProcessedUtcDateTime), 0) + ORDER BY prv.boundary_id DESC); + IF (@precedingPartitionBoundary IS NULL) + BEGIN + SET @precedingPartitionBoundary = CONVERT (DATETIME2 (7), N'1970-01-01T00:00:00.0000000'); + END + DECLARE @endDateTimeToFilter AS DATETIME2 (7) = DATEADD(HOUR, 1, SYSUTCDATETIME()); + WITH PartitionBoundaries + AS (SELECT CAST (prv.value AS DATETIME2 (7)) AS PartitionBoundary + FROM sys.partition_range_values AS prv WITH (NOLOCK) + INNER JOIN + sys.partition_functions AS pf WITH (NOLOCK) + ON pf.function_id = prv.function_id + WHERE pf.name = N'PartitionFunction_ResourceChangeData_Timestamp' + AND SQL_VARIANT_PROPERTY(prv.Value, 'BaseType') = 'datetime2' + AND CAST (prv.value AS DATETIME2 (7)) BETWEEN @precedingPartitionBoundary AND @endDateTimeToFilter) + SELECT TOP (@pageSize) Id, + Timestamp, + ResourceId, + ResourceTypeId, + ResourceVersion, + ResourceChangeTypeId + FROM PartitionBoundaries AS p CROSS APPLY (SELECT TOP (@pageSize) Id, + Timestamp, + ResourceId, + ResourceTypeId, + ResourceVersion, + ResourceChangeTypeId + FROM dbo.ResourceChangeData WITH (TABLOCK, HOLDLOCK) + WHERE Id >= @startId + AND $PARTITION.PartitionFunction_ResourceChangeData_Timestamp (Timestamp) = $PARTITION.PartitionFunction_ResourceChangeData_Timestamp (p.PartitionBoundary) + ORDER BY Id ASC) AS rcd + ORDER BY rcd.Id ASC; +END + +GO +CREATE PROCEDURE dbo.GetActiveJobs +@QueueType TINYINT, @GroupId BIGINT=NULL +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'GetActiveJobs', @Mode AS VARCHAR (100) = 'Q=' + isnull(CONVERT (VARCHAR, @QueueType), 'NULL') + ' G=' + isnull(CONVERT (VARCHAR, @GroupId), 'NULL'), @st AS DATETIME = getUTCdate(), @JobIds AS BigintList, @PartitionId AS TINYINT, @MaxPartitions AS TINYINT = 16, @LookedAtPartitions AS TINYINT = 0, @Rows AS INT = 0; +BEGIN TRY + SET @PartitionId = @MaxPartitions * rand(); + WHILE @LookedAtPartitions < @MaxPartitions + BEGIN + IF @GroupId IS NULL + INSERT INTO @JobIds + SELECT JobId + FROM dbo.JobQueue + WHERE PartitionId = @PartitionId + AND QueueType = @QueueType + AND Status IN (0, 1); + ELSE + INSERT INTO @JobIds + SELECT JobId + FROM dbo.JobQueue + WHERE PartitionId = @PartitionId + AND QueueType = @QueueType + AND GroupId = @GroupId + AND Status IN (0, 1); + SET @Rows += @@rowcount; + SET @PartitionId = CASE WHEN @PartitionId = 15 THEN 0 ELSE @PartitionId + 1 END; + SET @LookedAtPartitions += 1; + END + IF @Rows > 0 + EXECUTE dbo.GetJobs @QueueType = @QueueType, @JobIds = @JobIds; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @Rows; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error'; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.GetCommandsForRebuildIndexes +@RebuildClustered BIT +WITH EXECUTE AS 'dbo' +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'GetCommandsForRebuildIndexes', @Mode AS VARCHAR (200) = 'PS=PartitionScheme_ResourceTypeId RC=' + isnull(CONVERT (VARCHAR, @RebuildClustered), 'NULL'), @st AS DATETIME = getUTCdate(), @Tbl AS VARCHAR (100), @TblInt AS VARCHAR (100), @Ind AS VARCHAR (200), @IndId AS INT, @Supported AS BIT, @Txt AS VARCHAR (MAX), @Rows AS BIGINT, @Pages AS BIGINT, @ResourceTypeId AS SMALLINT, @IndexesCnt AS INT, @DataComp AS VARCHAR (100); +BEGIN TRY + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Start'; + DECLARE @Commands TABLE ( + Tbl VARCHAR (100), + Ind VARCHAR (200), + Txt VARCHAR (MAX), + Pages BIGINT ); + DECLARE @ResourceTypes TABLE ( + ResourceTypeId SMALLINT PRIMARY KEY); + DECLARE @Indexes TABLE ( + Ind VARCHAR (200) PRIMARY KEY, + IndId INT ); + DECLARE @Tables TABLE ( + name VARCHAR (100) PRIMARY KEY, + Supported BIT ); + INSERT INTO @Tables + EXECUTE dbo.GetPartitionedTables @IncludeNotDisabled = 1, @IncludeNotSupported = 1; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = '@Tables', @Action = 'Insert', @Rows = @@rowcount; + WHILE EXISTS (SELECT * + FROM @Tables) + BEGIN + SELECT TOP 1 @Tbl = name, + @Supported = Supported + FROM @Tables + ORDER BY name; + IF @Supported = 0 + BEGIN + INSERT INTO @Commands + SELECT @Tbl, + name, + 'ALTER INDEX ' + name + ' ON dbo.' + @Tbl + ' REBUILD' + CASE WHEN (SELECT PropertyValue + FROM dbo.IndexProperties + WHERE TableName = @Tbl + AND IndexName = name) = 'PAGE' THEN ' PARTITION = ALL WITH (DATA_COMPRESSION = PAGE)' ELSE '' END, + CONVERT (BIGINT, 9e18) + FROM sys.indexes + WHERE object_id = object_id(@Tbl) + AND (is_disabled = 1 + AND index_id > 1 + AND @RebuildClustered = 0 + OR index_id = 1 + AND @RebuildClustered = 1); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = '@Commands', @Action = 'Insert', @Rows = @@rowcount, @Text = 'Not supported tables with disabled indexes'; + END + ELSE + BEGIN + DELETE @ResourceTypes; + INSERT INTO @ResourceTypes + SELECT CONVERT (SMALLINT, substring(name, charindex('_', name) + 1, 6)) AS ResourceTypeId + FROM sys.sysobjects + WHERE name LIKE @Tbl + '[_]%'; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = '@ResourceTypes', @Action = 'Insert', @Rows = @@rowcount; + WHILE EXISTS (SELECT * + FROM @ResourceTypes) + BEGIN + SET @ResourceTypeId = (SELECT TOP 1 ResourceTypeId + FROM @ResourceTypes + ORDER BY ResourceTypeId); + SET @TblInt = @Tbl + '_' + CONVERT (VARCHAR, @ResourceTypeId); + SET @Pages = (SELECT dpages + FROM sysindexes + WHERE id = object_id(@TblInt) + AND indid IN (0, 1)); + DELETE @Indexes; + INSERT INTO @Indexes + SELECT name, + index_id + FROM sys.indexes + WHERE object_id = object_id(@Tbl) + AND (index_id > 1 + AND @RebuildClustered = 0 + OR index_id = 1 + AND @RebuildClustered = 1); + SET @IndexesCnt = 0; + WHILE EXISTS (SELECT * + FROM @Indexes) + BEGIN + SELECT TOP 1 @Ind = Ind, + @IndId = IndId + FROM @Indexes + ORDER BY Ind; + IF @IndId = 1 + BEGIN + SET @Txt = 'ALTER INDEX ' + @Ind + ' ON dbo.' + @TblInt + ' REBUILD' + CASE WHEN (SELECT PropertyValue + FROM dbo.IndexProperties + WHERE TableName = @Tbl + AND IndexName = @Ind) = 'PAGE' THEN ' PARTITION = ALL WITH (DATA_COMPRESSION = PAGE)' ELSE '' END; + INSERT INTO @Commands + SELECT @TblInt, + @Ind, + @Txt, + @Pages; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = @TblInt, @Action = 'Add command', @Rows = @@rowcount, @Text = @Txt; + END + ELSE + IF NOT EXISTS (SELECT * + FROM sys.indexes + WHERE object_id = object_id(@TblInt) + AND name = @Ind) + BEGIN + EXECUTE dbo.GetIndexCommands @Tbl = @Tbl, @Ind = @Ind, @AddPartClause = 0, @IncludeClustered = 0, @Txt = @Txt OUTPUT; + SET @Txt = replace(@Txt, '[' + @Tbl + ']', @TblInt); + IF @Txt IS NOT NULL + BEGIN + SET @IndexesCnt = @IndexesCnt + 1; + INSERT INTO @Commands + SELECT @TblInt, + @Ind, + @Txt, + @Pages; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = @TblInt, @Action = 'Add command', @Rows = @@rowcount, @Text = @Txt; + END + END + DELETE @Indexes + WHERE Ind = @Ind; + END + IF @IndexesCnt > 1 + BEGIN + INSERT INTO @Commands + SELECT @TblInt, + 'UPDATE STAT', + 'UPDATE STATISTICS dbo.' + @TblInt, + @Pages; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = @TblInt, @Action = 'Add command', @Rows = @@rowcount, @Text = 'Add stats update'; + END + DELETE @ResourceTypes + WHERE ResourceTypeId = @ResourceTypeId; + END + END + DELETE @Tables + WHERE name = @Tbl; + END + SELECT Tbl, + Ind, + Txt + FROM @Commands + ORDER BY Pages DESC, Tbl, CASE WHEN Txt LIKE 'UPDATE STAT%' THEN 0 ELSE 1 END; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = '@Commands', @Action = 'Select', @Rows = @@rowcount; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error', @Start = @st; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.GetIndexCommands +@Tbl VARCHAR (100), @Ind VARCHAR (200), @AddPartClause BIT, @IncludeClustered BIT, @Txt VARCHAR (MAX)=NULL OUTPUT +WITH EXECUTE AS 'dbo' +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'GetIndexCommands', @Mode AS VARCHAR (200) = 'Tbl=' + isnull(@Tbl, 'NULL') + ' Ind=' + isnull(@Ind, 'NULL'), @st AS DATETIME = getUTCdate(); +DECLARE @Indexes TABLE ( + Ind VARCHAR (200) PRIMARY KEY, + Txt VARCHAR (MAX)); +BEGIN TRY + IF @Tbl IS NULL + RAISERROR ('@Tbl IS NULL', 18, 127); + INSERT INTO @Indexes + SELECT Ind, + CASE WHEN is_primary_key = 1 THEN 'ALTER TABLE dbo.[' + Tbl + '] ADD PRIMARY KEY ' + CASE WHEN type = 1 THEN ' CLUSTERED' ELSE '' END ELSE 'CREATE' + CASE WHEN is_unique = 1 THEN ' UNIQUE' ELSE '' END + CASE WHEN type = 1 THEN ' CLUSTERED' ELSE '' END + ' INDEX ' + Ind + ' ON dbo.[' + Tbl + ']' END + ' (' + KeyCols + ')' + IncClause + CASE WHEN filter_def IS NOT NULL THEN ' WHERE ' + filter_def ELSE '' END + CASE WHEN data_comp IS NOT NULL THEN ' WITH (DATA_COMPRESSION = ' + data_comp + ')' ELSE '' END + CASE WHEN @AddPartClause = 1 THEN PartClause ELSE '' END + FROM (SELECT O.Name AS Tbl, + I.Name AS Ind, + isnull((SELECT TOP 1 CASE WHEN data_compression_desc = 'PAGE' THEN 'PAGE' END + FROM sys.partitions AS P + WHERE P.object_id = I.object_id + AND I.index_id = P.index_id), (SELECT NULLIF (PropertyValue, 'NONE') + FROM dbo.IndexProperties + WHERE TableName = O.Name + AND IndexName = I.Name + AND PropertyName = 'DATA_COMPRESSION')) AS data_comp, + replace(replace(replace(replace(I.filter_definition, '[', ''), ']', ''), '(', ''), ')', '') AS filter_def, + I.is_unique, + I.is_primary_key, + I.type, + KeyCols, + CASE WHEN IncCols IS NOT NULL THEN ' INCLUDE (' + IncCols + ')' ELSE '' END AS IncClause, + CASE WHEN EXISTS (SELECT * + FROM sys.partition_schemes AS S + WHERE S.data_space_id = I.data_space_id + AND name = 'PartitionScheme_ResourceTypeId') THEN ' ON PartitionScheme_ResourceTypeId (ResourceTypeId)' ELSE '' END AS PartClause + FROM sys.indexes AS I + INNER JOIN + sys.objects AS O + ON O.object_id = I.object_id CROSS APPLY (SELECT string_agg(CASE WHEN IC.key_ordinal > 0 + AND IC.is_included_column = 0 THEN C.name END, ',') WITHIN GROUP (ORDER BY key_ordinal) AS KeyCols, + string_agg(CASE WHEN IC.is_included_column = 1 THEN C.name END, ',') WITHIN GROUP (ORDER BY key_ordinal) AS IncCols + FROM sys.index_columns AS IC + INNER JOIN + sys.columns AS C + ON C.object_id = IC.object_id + AND C.column_id = IC.column_id + WHERE IC.object_id = I.object_id + AND IC.index_id = I.index_id + GROUP BY IC.object_id, IC.index_id) AS IC + WHERE O.name = @Tbl + AND (@Ind IS NULL + OR I.name = @Ind) + AND (@IncludeClustered = 1 + OR index_id > 1)) AS A; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = '@Indexes', @Action = 'Insert', @Rows = @@rowcount; + IF @Ind IS NULL + SELECT Ind, + Txt + FROM @Indexes; + ELSE + SET @Txt = (SELECT Txt + FROM @Indexes); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Text = @Txt; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error', @Start = @st; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.GetJobs +@QueueType TINYINT, @JobId BIGINT=NULL, @JobIds BigintList READONLY, @GroupId BIGINT=NULL, @ReturnDefinition BIT=1 +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'GetJobs', @Mode AS VARCHAR (100) = 'Q=' + isnull(CONVERT (VARCHAR, @QueueType), 'NULL') + ' J=' + isnull(CONVERT (VARCHAR, @JobId), 'NULL') + ' G=' + isnull(CONVERT (VARCHAR, @GroupId), 'NULL'), @st AS DATETIME = getUTCdate(), @PartitionId AS TINYINT = @JobId % 16; +BEGIN TRY + IF @JobId IS NULL + AND @GroupId IS NULL + AND NOT EXISTS (SELECT * + FROM @JobIds) + RAISERROR ('@JobId = NULL and @GroupId = NULL and @JobIds is empty', 18, 127); + IF @JobId IS NOT NULL + SELECT GroupId, + JobId, + CASE WHEN @ReturnDefinition = 1 THEN Definition ELSE NULL END AS Definition, + Version, + Status, + Priority, + Data, + Result, + CreateDate, + StartDate, + EndDate, + HeartbeatDate, + CancelRequested + FROM dbo.JobQueue + WHERE QueueType = @QueueType + AND PartitionId = @PartitionId + AND JobId = isnull(@JobId, -1) + AND Status <> 5; + ELSE + IF @GroupId IS NOT NULL + SELECT GroupId, + JobId, + CASE WHEN @ReturnDefinition = 1 THEN Definition ELSE NULL END AS Definition, + Version, + Status, + Priority, + Data, + Result, + CreateDate, + StartDate, + EndDate, + HeartbeatDate, + CancelRequested + FROM dbo.JobQueue WITH (INDEX (IX_QueueType_GroupId)) + WHERE QueueType = @QueueType + AND GroupId = isnull(@GroupId, -1) + AND Status <> 5; + ELSE + SELECT GroupId, + JobId, + CASE WHEN @ReturnDefinition = 1 THEN Definition ELSE NULL END AS Definition, + Version, + Status, + Priority, + Data, + Result, + CreateDate, + StartDate, + EndDate, + HeartbeatDate, + CancelRequested + FROM dbo.JobQueue + WHERE QueueType = @QueueType + AND JobId IN (SELECT Id + FROM @JobIds) + AND PartitionId = JobId % 16 + AND Status <> 5; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @@rowcount; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error'; + THROW; +END CATCH + +GO +CREATE OR ALTER PROCEDURE dbo.GetNonCompletedJobCountOfSpecificQueueType +@queueType TINYINT +AS +BEGIN + SET NOCOUNT ON; + SELECT COUNT(*) + FROM dbo.JobQueue + WHERE QueueType = @queueType + AND (Status = 0 + OR Status = 1); +END + +GO +CREATE PROCEDURE dbo.GetPartitionedTables +@IncludeNotDisabled BIT, @IncludeNotSupported BIT +WITH EXECUTE AS 'dbo' +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'GetPartitionedTables', @Mode AS VARCHAR (200) = 'PS=PartitionScheme_ResourceTypeId D=' + isnull(CONVERT (VARCHAR, @IncludeNotDisabled), 'NULL') + ' S=' + isnull(CONVERT (VARCHAR, @IncludeNotSupported), 'NULL'), @st AS DATETIME = getUTCdate(); +DECLARE @NotSupportedTables TABLE ( + id INT PRIMARY KEY); +BEGIN TRY + INSERT INTO @NotSupportedTables + SELECT DISTINCT O.object_id + FROM sys.indexes AS I + INNER JOIN + sys.objects AS O + ON O.object_id = I.object_id + WHERE O.type = 'u' + AND EXISTS (SELECT * + FROM sys.partition_schemes AS PS + WHERE PS.data_space_id = I.data_space_id + AND name = 'PartitionScheme_ResourceTypeId') + AND (NOT EXISTS (SELECT * + FROM sys.index_columns AS IC + INNER JOIN + sys.columns AS C + ON C.object_id = IC.object_id + AND C.column_id = IC.column_id + WHERE IC.object_id = I.object_id + AND IC.index_id = I.index_id + AND IC.key_ordinal > 0 + AND IC.is_included_column = 0 + AND C.name = 'ResourceTypeId') + OR EXISTS (SELECT * + FROM sys.indexes AS NSI + WHERE NSI.object_id = O.object_id + AND NOT EXISTS (SELECT * + FROM sys.partition_schemes AS PS + WHERE PS.data_space_id = NSI.data_space_id + AND name = 'PartitionScheme_ResourceTypeId'))); + SELECT CONVERT (VARCHAR (100), O.name), + CONVERT (BIT, CASE WHEN EXISTS (SELECT * + FROM @NotSupportedTables AS NSI + WHERE NSI.id = O.object_id) THEN 0 ELSE 1 END) + FROM sys.indexes AS I + INNER JOIN + sys.objects AS O + ON O.object_id = I.object_id + WHERE O.type = 'u' + AND I.index_id IN (0, 1) + AND EXISTS (SELECT * + FROM sys.partition_schemes AS PS + WHERE PS.data_space_id = I.data_space_id + AND name = 'PartitionScheme_ResourceTypeId') + AND EXISTS (SELECT * + FROM sys.index_columns AS IC + INNER JOIN + sys.columns AS C + ON C.object_id = I.object_id + AND C.column_id = IC.column_id + AND IC.is_included_column = 0 + AND C.name = 'ResourceTypeId') + AND (@IncludeNotSupported = 1 + OR NOT EXISTS (SELECT * + FROM @NotSupportedTables AS NSI + WHERE NSI.id = O.object_id)) + AND (@IncludeNotDisabled = 1 + OR EXISTS (SELECT * + FROM sys.indexes AS D + WHERE D.object_id = O.object_id + AND D.is_disabled = 1)) + ORDER BY 1; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @@rowcount; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error', @Start = @st; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.GetReindexJobById +@id VARCHAR (64) +AS +SET NOCOUNT ON; +SELECT RawJobRecord, + JobVersion +FROM dbo.ReindexJob +WHERE Id = @id; + +GO +CREATE PROCEDURE dbo.GetResources +@ResourceKeys dbo.ResourceKeyList READONLY +AS +SET NOCOUNT ON; +DECLARE @st AS DATETIME = getUTCdate(), @SP AS VARCHAR (100) = 'GetResources', @InputRows AS INT, @DummyTop AS BIGINT = 9223372036854775807, @NotNullVersionExists AS BIT, @NullVersionExists AS BIT, @MinRT AS SMALLINT, @MaxRT AS SMALLINT; +SELECT @MinRT = min(ResourceTypeId), + @MaxRT = max(ResourceTypeId), + @InputRows = count(*), + @NotNullVersionExists = max(CASE WHEN Version IS NOT NULL THEN 1 ELSE 0 END), + @NullVersionExists = max(CASE WHEN Version IS NULL THEN 1 ELSE 0 END) +FROM @ResourceKeys; +DECLARE @Mode AS VARCHAR (100) = 'RT=[' + CONVERT (VARCHAR, @MinRT) + ',' + CONVERT (VARCHAR, @MaxRT) + '] Cnt=' + CONVERT (VARCHAR, @InputRows) + ' NNVE=' + CONVERT (VARCHAR, @NotNullVersionExists) + ' NVE=' + CONVERT (VARCHAR, @NullVersionExists); +BEGIN TRY + IF @NotNullVersionExists = 1 + IF @NullVersionExists = 0 + SELECT B.ResourceTypeId, + B.ResourceId, + ResourceSurrogateId, + B.Version, + IsDeleted, + IsHistory, + RawResource, + IsRawResourceMetaSet, + SearchParamHash + FROM (SELECT TOP (@DummyTop) * + FROM @ResourceKeys) AS A + INNER JOIN + dbo.Resource AS B WITH (INDEX (IX_Resource_ResourceTypeId_ResourceId_Version)) + ON B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceId = A.ResourceId + AND B.Version = A.Version + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)); + ELSE + SELECT * + FROM (SELECT B.ResourceTypeId, + B.ResourceId, + ResourceSurrogateId, + B.Version, + IsDeleted, + IsHistory, + RawResource, + IsRawResourceMetaSet, + SearchParamHash + FROM (SELECT TOP (@DummyTop) * + FROM @ResourceKeys + WHERE Version IS NOT NULL) AS A + INNER JOIN + dbo.Resource AS B WITH (INDEX (IX_Resource_ResourceTypeId_ResourceId_Version)) + ON B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceId = A.ResourceId + AND B.Version = A.Version + UNION ALL + SELECT B.ResourceTypeId, + B.ResourceId, + ResourceSurrogateId, + B.Version, + IsDeleted, + IsHistory, + RawResource, + IsRawResourceMetaSet, + SearchParamHash + FROM (SELECT TOP (@DummyTop) * + FROM @ResourceKeys + WHERE Version IS NULL) AS A + INNER JOIN + dbo.Resource AS B WITH (INDEX (IX_Resource_ResourceTypeId_ResourceId)) + ON B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceId = A.ResourceId + WHERE IsHistory = 0) AS A + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)); + ELSE + SELECT B.ResourceTypeId, + B.ResourceId, + ResourceSurrogateId, + B.Version, + IsDeleted, + IsHistory, + RawResource, + IsRawResourceMetaSet, + SearchParamHash + FROM (SELECT TOP (@DummyTop) * + FROM @ResourceKeys) AS A + INNER JOIN + dbo.Resource AS B WITH (INDEX (IX_Resource_ResourceTypeId_ResourceId)) + ON B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceId = A.ResourceId + WHERE IsHistory = 0 + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @@rowcount; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error', @Start = @st; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.GetResourcesByTransactionId +@TransactionId BIGINT, @IncludeHistory BIT=0, @ReturnResourceKeysOnly BIT=0 +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = object_name(@@procid), @Mode AS VARCHAR (100) = 'T=' + CONVERT (VARCHAR, @TransactionId) + ' H=' + CONVERT (VARCHAR, @IncludeHistory), @st AS DATETIME = getUTCdate(), @DummyTop AS BIGINT = 9223372036854775807, @TypeId AS SMALLINT; +BEGIN TRY + DECLARE @Types TABLE ( + TypeId SMALLINT PRIMARY KEY, + Name VARCHAR (100)); + INSERT INTO @Types + EXECUTE dbo.GetUsedResourceTypes ; + DECLARE @Keys TABLE ( + TypeId SMALLINT, + SurrogateId BIGINT PRIMARY KEY (TypeId, SurrogateId)); + WHILE EXISTS (SELECT * + FROM @Types) + BEGIN + SET @TypeId = (SELECT TOP 1 TypeId + FROM @Types + ORDER BY TypeId); + INSERT INTO @Keys + SELECT @TypeId, + ResourceSurrogateId + FROM dbo.Resource + WHERE ResourceTypeId = @TypeId + AND TransactionId = @TransactionId; + DELETE @Types + WHERE TypeId = @TypeId; + END + IF @ReturnResourceKeysOnly = 0 + SELECT ResourceTypeId, + ResourceId, + ResourceSurrogateId, + Version, + IsDeleted, + IsHistory, + RawResource, + IsRawResourceMetaSet, + SearchParamHash, + RequestMethod + FROM (SELECT TOP (@DummyTop) * + FROM @Keys) AS A + INNER JOIN + dbo.Resource AS B + ON ResourceTypeId = TypeId + AND ResourceSurrogateId = SurrogateId + WHERE IsHistory = 0 + OR @IncludeHistory = 1 + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)); + ELSE + SELECT ResourceTypeId, + ResourceId, + ResourceSurrogateId, + Version, + IsDeleted + FROM (SELECT TOP (@DummyTop) * + FROM @Keys) AS A + INNER JOIN + dbo.Resource AS B + ON ResourceTypeId = TypeId + AND ResourceSurrogateId = SurrogateId + WHERE IsHistory = 0 + OR @IncludeHistory = 1 + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @@rowcount; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error'; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.GetResourcesByTypeAndSurrogateIdRange +@ResourceTypeId SMALLINT, @StartId BIGINT, @EndId BIGINT, @GlobalEndId BIGINT=NULL, @IncludeHistory BIT=0, @IncludeDeleted BIT=0 +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'GetResourcesByTypeAndSurrogateIdRange', @Mode AS VARCHAR (100) = 'RT=' + isnull(CONVERT (VARCHAR, @ResourceTypeId), 'NULL') + ' S=' + isnull(CONVERT (VARCHAR, @StartId), 'NULL') + ' E=' + isnull(CONVERT (VARCHAR, @EndId), 'NULL') + ' GE=' + isnull(CONVERT (VARCHAR, @GlobalEndId), 'NULL') + ' HI=' + isnull(CONVERT (VARCHAR, @IncludeHistory), 'NULL') + ' DE' + isnull(CONVERT (VARCHAR, @IncludeDeleted), 'NULL'), @st AS DATETIME = getUTCdate(), @DummyTop AS BIGINT = 9223372036854775807; +BEGIN TRY + DECLARE @ResourceIds TABLE ( + ResourceId VARCHAR (64) COLLATE Latin1_General_100_CS_AS PRIMARY KEY); + DECLARE @SurrogateIds TABLE ( + MaxSurrogateId BIGINT PRIMARY KEY); + IF @GlobalEndId IS NOT NULL + AND @IncludeHistory = 0 + BEGIN + INSERT INTO @ResourceIds + SELECT DISTINCT ResourceId + FROM dbo.Resource + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceSurrogateId BETWEEN @StartId AND @EndId + AND IsHistory = 1 + AND (IsDeleted = 0 + OR @IncludeDeleted = 1) + OPTION (MAXDOP 1); + IF @@rowcount > 0 + INSERT INTO @SurrogateIds + SELECT ResourceSurrogateId + FROM (SELECT ResourceId, + ResourceSurrogateId, + row_number() OVER (PARTITION BY ResourceId ORDER BY ResourceSurrogateId DESC) AS RowId + FROM dbo.Resource WITH (INDEX (IX_Resource_ResourceTypeId_ResourceId_Version)) + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceId IN (SELECT TOP (@DummyTop) ResourceId + FROM @ResourceIds) + AND ResourceSurrogateId BETWEEN @StartId AND @GlobalEndId) AS A + WHERE RowId = 1 + AND ResourceSurrogateId BETWEEN @StartId AND @EndId + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)); + END + SELECT ResourceTypeId, + ResourceId, + Version, + IsDeleted, + ResourceSurrogateId, + RequestMethod, + CONVERT (BIT, 1) AS IsMatch, + CONVERT (BIT, 0) AS IsPartial, + IsRawResourceMetaSet, + SearchParamHash, + RawResource + FROM dbo.Resource + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceSurrogateId BETWEEN @StartId AND @EndId + AND (IsHistory = 0 + OR @IncludeHistory = 1) + AND (IsDeleted = 0 + OR @IncludeDeleted = 1) + UNION ALL + SELECT ResourceTypeId, + ResourceId, + Version, + IsDeleted, + ResourceSurrogateId, + RequestMethod, + CONVERT (BIT, 1) AS IsMatch, + CONVERT (BIT, 0) AS IsPartial, + IsRawResourceMetaSet, + SearchParamHash, + RawResource + FROM @SurrogateIds + INNER JOIN + dbo.Resource + ON ResourceTypeId = @ResourceTypeId + AND ResourceSurrogateId = MaxSurrogateId + WHERE IsHistory = 1 + AND (IsDeleted = 0 + OR @IncludeDeleted = 1) + OPTION (MAXDOP 1); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @@rowcount; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error'; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.GetResourceSearchParamStats +@Table VARCHAR (100)=NULL, @ResourceTypeId SMALLINT=NULL, @SearchParamId SMALLINT=NULL +WITH EXECUTE AS 'dbo' +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = object_name(@@procid), @Mode AS VARCHAR (200) = 'T=' + isnull(@Table, 'NULL') + ' RT=' + isnull(CONVERT (VARCHAR, @ResourceTypeId), 'NULL') + ' SP=' + isnull(CONVERT (VARCHAR, @SearchParamId), 'NULL'), @st AS DATETIME = getUTCdate(); +BEGIN TRY + SELECT T.name AS TableName, + S.name AS StatsName, + db_name() AS DatabaseName + FROM sys.stats AS S + INNER JOIN + sys.tables AS T + ON T.object_id = S.object_id + WHERE T.name LIKE '%SearchParam' + AND T.name <> 'SearchParam' + AND S.name LIKE 'ST[_]%' + AND (T.name LIKE @Table + OR @Table IS NULL) + AND (S.name LIKE '%ResourceTypeId[_]' + CONVERT (VARCHAR, @ResourceTypeId) + '[_]%' + OR @ResourceTypeId IS NULL) + AND (S.name LIKE '%SearchParamId[_]' + CONVERT (VARCHAR, @SearchParamId) + OR @SearchParamId IS NULL); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Rows = @@rowcount, @Start = @st; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error', @Start = @st; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.GetResourceSurrogateIdRanges +@ResourceTypeId SMALLINT, @StartId BIGINT, @EndId BIGINT, @RangeSize INT, @NumberOfRanges INT=100, @Up BIT=1 +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'GetResourceSurrogateIdRanges', @Mode AS VARCHAR (100) = 'RT=' + isnull(CONVERT (VARCHAR, @ResourceTypeId), 'NULL') + ' S=' + isnull(CONVERT (VARCHAR, @StartId), 'NULL') + ' E=' + isnull(CONVERT (VARCHAR, @EndId), 'NULL') + ' R=' + isnull(CONVERT (VARCHAR, @RangeSize), 'NULL') + ' UP=' + isnull(CONVERT (VARCHAR, @Up), 'NULL'), @st AS DATETIME = getUTCdate(); +BEGIN TRY + IF @Up = 1 + SELECT RangeId, + min(ResourceSurrogateId), + max(ResourceSurrogateId), + count(*) + FROM (SELECT isnull(CONVERT (INT, (row_number() OVER (ORDER BY ResourceSurrogateId) - 1) / @RangeSize), 0) AS RangeId, + ResourceSurrogateId + FROM (SELECT TOP (@RangeSize * @NumberOfRanges) ResourceSurrogateId + FROM dbo.Resource + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceSurrogateId >= @StartId + AND ResourceSurrogateId <= @EndId + ORDER BY ResourceSurrogateId) AS A) AS A + GROUP BY RangeId + OPTION (MAXDOP 1); + ELSE + SELECT RangeId, + min(ResourceSurrogateId), + max(ResourceSurrogateId), + count(*) + FROM (SELECT isnull(CONVERT (INT, (row_number() OVER (ORDER BY ResourceSurrogateId) - 1) / @RangeSize), 0) AS RangeId, + ResourceSurrogateId + FROM (SELECT TOP (@RangeSize * @NumberOfRanges) ResourceSurrogateId + FROM dbo.Resource + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceSurrogateId >= @StartId + AND ResourceSurrogateId <= @EndId + ORDER BY ResourceSurrogateId DESC) AS A) AS A + GROUP BY RangeId + OPTION (MAXDOP 1); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @@rowcount; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error'; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.GetResourceVersions +@ResourceDateKeys dbo.ResourceDateKeyList READONLY +AS +SET NOCOUNT ON; +DECLARE @st AS DATETIME = getUTCdate(), @SP AS VARCHAR (100) = 'GetResourceVersions', @Mode AS VARCHAR (100) = 'Rows=' + CONVERT (VARCHAR, (SELECT count(*) + FROM @ResourceDateKeys)), @DummyTop AS BIGINT = 9223372036854775807; +BEGIN TRY + SELECT A.ResourceTypeId, + A.ResourceId, + A.ResourceSurrogateId, + CASE WHEN EXISTS (SELECT * + FROM dbo.Resource AS B + WHERE B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId) THEN 0 WHEN isnull(U.Version, 1) - isnull(L.Version, 0) > 1 THEN isnull(U.Version, 1) - 1 ELSE 0 END AS Version + FROM (SELECT TOP (@DummyTop) * + FROM @ResourceDateKeys) AS A OUTER APPLY (SELECT TOP 1 * + FROM dbo.Resource AS B WITH (INDEX (IX_Resource_ResourceTypeId_ResourceId_Version)) + WHERE B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceId = A.ResourceId + AND B.ResourceSurrogateId < A.ResourceSurrogateId + ORDER BY B.ResourceSurrogateId DESC) AS L OUTER APPLY (SELECT TOP 1 * + FROM dbo.Resource AS B WITH (INDEX (IX_Resource_ResourceTypeId_ResourceId_Version)) + WHERE B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceId = A.ResourceId + AND B.ResourceSurrogateId > A.ResourceSurrogateId + ORDER BY B.ResourceSurrogateId) AS U + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @@rowcount; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error', @Start = @st; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.GetSearchParamStatuses +AS +SET NOCOUNT ON; +SELECT SearchParamId, + Uri, + Status, + LastUpdated, + IsPartiallySupported +FROM dbo.SearchParam; + +GO +CREATE PROCEDURE dbo.GetTransactions +@StartNotInclusiveTranId BIGINT, @EndInclusiveTranId BIGINT, @EndDate DATETIME=NULL +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = object_name(@@procid), @Mode AS VARCHAR (100) = 'ST=' + CONVERT (VARCHAR, @StartNotInclusiveTranId) + ' ET=' + CONVERT (VARCHAR, @EndInclusiveTranId) + ' ED=' + isnull(CONVERT (VARCHAR, @EndDate, 121), 'NULL'), @st AS DATETIME = getUTCdate(); +IF @EndDate IS NULL + SET @EndDate = getUTCdate(); +SELECT SurrogateIdRangeFirstValue, + VisibleDate, + InvisibleHistoryRemovedDate +FROM dbo.Transactions +WHERE SurrogateIdRangeFirstValue > @StartNotInclusiveTranId + AND SurrogateIdRangeFirstValue <= @EndInclusiveTranId + AND EndDate <= @EndDate +ORDER BY SurrogateIdRangeFirstValue; +EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @@rowcount; + +GO +CREATE PROCEDURE dbo.GetUsedResourceTypes +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'GetUsedResourceTypes', @Mode AS VARCHAR (100) = '', @st AS DATETIME = getUTCdate(); +BEGIN TRY + SELECT ResourceTypeId, + Name + FROM dbo.ResourceType AS A + WHERE EXISTS (SELECT * + FROM dbo.Resource AS B + WHERE B.ResourceTypeId = A.ResourceTypeId); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @@rowcount; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error'; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.HardDeleteResource +@ResourceTypeId SMALLINT, @ResourceId VARCHAR (64), @KeepCurrentVersion BIT, @IsResourceChangeCaptureEnabled BIT +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = object_name(@@procid), @Mode AS VARCHAR (200) = 'RT=' + CONVERT (VARCHAR, @ResourceTypeId) + ' R=' + @ResourceId + ' V=' + CONVERT (VARCHAR, @KeepCurrentVersion) + ' CC=' + CONVERT (VARCHAR, @IsResourceChangeCaptureEnabled), @st AS DATETIME = getUTCdate(), @TransactionId AS BIGINT; +BEGIN TRY + IF @IsResourceChangeCaptureEnabled = 1 + EXECUTE dbo.MergeResourcesBeginTransaction @Count = 1, @TransactionId = @TransactionId OUTPUT; + IF @KeepCurrentVersion = 0 + BEGIN TRANSACTION; + DECLARE @SurrogateIds TABLE ( + ResourceSurrogateId BIGINT NOT NULL); + IF @IsResourceChangeCaptureEnabled = 1 + AND NOT EXISTS (SELECT * + FROM dbo.Parameters + WHERE Id = 'InvisibleHistory.IsEnabled' + AND Number = 0) + UPDATE dbo.Resource + SET IsDeleted = 1, + RawResource = 0xF, + SearchParamHash = NULL, + HistoryTransactionId = @TransactionId + OUTPUT deleted.ResourceSurrogateId INTO @SurrogateIds + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceId = @ResourceId + AND (@KeepCurrentVersion = 0 + OR IsHistory = 1) + AND RawResource <> 0xF; + ELSE + DELETE dbo.Resource + OUTPUT deleted.ResourceSurrogateId INTO @SurrogateIds + WHERE ResourceTypeId = @ResourceTypeId + AND ResourceId = @ResourceId + AND (@KeepCurrentVersion = 0 + OR IsHistory = 1) + AND RawResource <> 0xF; + IF @KeepCurrentVersion = 0 + BEGIN + DELETE B + FROM @SurrogateIds AS A + INNER LOOP JOIN + dbo.ResourceWriteClaim AS B WITH (INDEX (1), FORCESEEK, PAGLOCK) + ON B.ResourceSurrogateId = A.ResourceSurrogateId + OPTION (MAXDOP 1); + DELETE B + FROM @SurrogateIds AS A + INNER LOOP JOIN + dbo.ReferenceSearchParam AS B WITH (INDEX (1), FORCESEEK, PAGLOCK) + ON B.ResourceTypeId = @ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId + OPTION (MAXDOP 1); + DELETE B + FROM @SurrogateIds AS A + INNER LOOP JOIN + dbo.TokenSearchParam AS B WITH (INDEX (1), FORCESEEK, PAGLOCK) + ON B.ResourceTypeId = @ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId + OPTION (MAXDOP 1); + DELETE B + FROM @SurrogateIds AS A + INNER LOOP JOIN + dbo.TokenText AS B WITH (INDEX (1), FORCESEEK, PAGLOCK) + ON B.ResourceTypeId = @ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId + OPTION (MAXDOP 1); + DELETE B + FROM @SurrogateIds AS A + INNER LOOP JOIN + dbo.StringSearchParam AS B WITH (INDEX (1), FORCESEEK, PAGLOCK) + ON B.ResourceTypeId = @ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId + OPTION (MAXDOP 1); + DELETE B + FROM @SurrogateIds AS A + INNER LOOP JOIN + dbo.UriSearchParam AS B WITH (INDEX (1), FORCESEEK, PAGLOCK) + ON B.ResourceTypeId = @ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId + OPTION (MAXDOP 1); + DELETE B + FROM @SurrogateIds AS A + INNER LOOP JOIN + dbo.NumberSearchParam AS B WITH (INDEX (1), FORCESEEK, PAGLOCK) + ON B.ResourceTypeId = @ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId + OPTION (MAXDOP 1); + DELETE B + FROM @SurrogateIds AS A + INNER LOOP JOIN + dbo.QuantitySearchParam AS B WITH (INDEX (1), FORCESEEK, PAGLOCK) + ON B.ResourceTypeId = @ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId + OPTION (MAXDOP 1); + DELETE B + FROM @SurrogateIds AS A + INNER LOOP JOIN + dbo.DateTimeSearchParam AS B WITH (INDEX (1), FORCESEEK, PAGLOCK) + ON B.ResourceTypeId = @ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId + OPTION (MAXDOP 1); + DELETE B + FROM @SurrogateIds AS A + INNER LOOP JOIN + dbo.ReferenceTokenCompositeSearchParam AS B WITH (INDEX (1), FORCESEEK, PAGLOCK) + ON B.ResourceTypeId = @ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId + OPTION (MAXDOP 1); + DELETE B + FROM @SurrogateIds AS A + INNER LOOP JOIN + dbo.TokenTokenCompositeSearchParam AS B WITH (INDEX (1), FORCESEEK, PAGLOCK) + ON B.ResourceTypeId = @ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId + OPTION (MAXDOP 1); + DELETE B + FROM @SurrogateIds AS A + INNER LOOP JOIN + dbo.TokenDateTimeCompositeSearchParam AS B WITH (INDEX (1), FORCESEEK, PAGLOCK) + ON B.ResourceTypeId = @ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId + OPTION (MAXDOP 1); + DELETE B + FROM @SurrogateIds AS A + INNER LOOP JOIN + dbo.TokenQuantityCompositeSearchParam AS B WITH (INDEX (1), FORCESEEK, PAGLOCK) + ON B.ResourceTypeId = @ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId + OPTION (MAXDOP 1); + DELETE B + FROM @SurrogateIds AS A + INNER LOOP JOIN + dbo.TokenStringCompositeSearchParam AS B WITH (INDEX (1), FORCESEEK, PAGLOCK) + ON B.ResourceTypeId = @ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId + OPTION (MAXDOP 1); + DELETE B + FROM @SurrogateIds AS A + INNER LOOP JOIN + dbo.TokenNumberNumberCompositeSearchParam AS B WITH (INDEX (1), FORCESEEK, PAGLOCK) + ON B.ResourceTypeId = @ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId + OPTION (MAXDOP 1); + END + IF @@trancount > 0 + COMMIT TRANSACTION; + IF @IsResourceChangeCaptureEnabled = 1 + EXECUTE dbo.MergeResourcesCommitTransaction @TransactionId; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st; +END TRY +BEGIN CATCH + IF @@trancount > 0 + ROLLBACK; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error', @Start = @st; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.InitDefrag +@QueueType TINYINT, @GroupId BIGINT, @DefragItems INT=NULL OUTPUT +WITH EXECUTE AS 'dbo' +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'InitDefrag', @st AS DATETIME = getUTCdate(), @ObjectId AS INT, @msg AS VARCHAR (1000), @Rows AS INT, @MinFragPct AS INT = isnull((SELECT Number + FROM dbo.Parameters + WHERE Id = 'Defrag.MinFragPct'), 10), @MinSizeGB AS FLOAT = isnull((SELECT Number + FROM dbo.Parameters + WHERE Id = 'Defrag.MinSizeGB'), 0.1), @DefinitionsSorted AS StringList; +DECLARE @Mode AS VARCHAR (200) = 'G=' + CONVERT (VARCHAR, @GroupId) + ' MF=' + CONVERT (VARCHAR, @MinFragPct) + ' MS=' + CONVERT (VARCHAR, @MinSizeGB); +DECLARE @Definitions AS TABLE ( + Def VARCHAR (900) PRIMARY KEY, + FragGB FLOAT ); +BEGIN TRY + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Start'; + SELECT * + INTO #filter + FROM (SELECT object_id, + sum(reserved_page_count * 8.0 / 1024 / 1024) AS ReservedGB + FROM sys.dm_db_partition_stats AS A + WHERE object_id IN (SELECT object_id + FROM sys.objects + WHERE type = 'U' + AND name NOT IN ('EventLog')) + GROUP BY object_id) AS A + WHERE ReservedGB > @MinSizeGB; + WHILE EXISTS (SELECT * + FROM #filter) + BEGIN + SET @ObjectId = (SELECT TOP 1 object_id + FROM #filter + ORDER BY ReservedGB DESC); + INSERT INTO @Definitions + SELECT object_name(@ObjectId) + ';' + I.name + ';' + CONVERT (VARCHAR, partition_number) + ';' + CONVERT (VARCHAR, CASE WHEN EXISTS (SELECT * + FROM sys.partition_schemes AS PS + WHERE PS.data_space_id = I.data_space_id) THEN 1 ELSE 0 END) + ';' + CONVERT (VARCHAR, (SELECT sum(reserved_page_count) + FROM sys.dm_db_partition_stats AS S + WHERE S.object_id = A.object_id + AND S.index_id = A.index_id + AND S.partition_number = A.partition_number) * 8.0 / 1024 / 1024), + FragGB + FROM (SELECT object_id, + index_id, + partition_number, + A.avg_fragmentation_in_percent * A.page_count * 8.0 / 1024 / 1024 / 100 AS FragGB + FROM sys.dm_db_index_physical_stats(db_id(), @ObjectId, NULL, NULL, 'LIMITED') AS A + WHERE index_id > 0 + AND avg_fragmentation_in_percent >= @MinFragPct + AND A.page_count > 500) AS A + INNER JOIN + sys.indexes AS I + ON I.object_id = A.object_id + AND I.index_id = A.index_id; + SET @Rows = @@rowcount; + SET @msg = object_name(@ObjectId); + EXECUTE dbo.LogEvent @Process = @SP, @Status = 'Run', @Mode = @Mode, @Target = '@Definitions', @Action = 'Insert', @Rows = @Rows, @Text = @msg; + DELETE #filter + WHERE object_id = @ObjectId; + END + INSERT INTO @DefinitionsSorted + SELECT Def + ';' + CONVERT (VARCHAR, FragGB) + FROM @Definitions + ORDER BY FragGB DESC; + SET @DefragItems = @@rowcount; + IF @DefragItems > 0 + EXECUTE dbo.EnqueueJobs @QueueType = @QueueType, @Definitions = @DefinitionsSorted, @GroupId = @GroupId, @ForceOneActiveJobGroup = 1; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error'; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.InitializeIndexProperties +AS +SET NOCOUNT ON; +INSERT INTO dbo.IndexProperties (TableName, IndexName, PropertyName, PropertyValue) +SELECT Tbl, + Ind, + 'DATA_COMPRESSION', + isnull(data_comp, 'NONE') +FROM (SELECT O.Name AS Tbl, + I.Name AS Ind, + (SELECT TOP 1 CASE WHEN data_compression_desc = 'PAGE' THEN 'PAGE' END + FROM sys.partitions AS P + WHERE P.object_id = I.object_id + AND I.index_id = P.index_id) AS data_comp + FROM sys.indexes AS I + INNER JOIN + sys.objects AS O + ON O.object_id = I.object_id + WHERE O.type = 'u' + AND EXISTS (SELECT * + FROM sys.partition_schemes AS PS + WHERE PS.data_space_id = I.data_space_id + AND name = 'PartitionScheme_ResourceTypeId')) AS A +WHERE NOT EXISTS (SELECT * + FROM dbo.IndexProperties + WHERE TableName = Tbl + AND IndexName = Ind); + +GO +CREATE PROCEDURE dbo.LogEvent +@Process VARCHAR (100), @Status VARCHAR (10), @Mode VARCHAR (200)=NULL, @Action VARCHAR (20)=NULL, @Target VARCHAR (100)=NULL, @Rows BIGINT=NULL, @Start DATETIME=NULL, @Text NVARCHAR (3500)=NULL, @EventId BIGINT=NULL OUTPUT, @Retry INT=NULL +AS +SET NOCOUNT ON; +DECLARE @ErrorNumber AS INT = error_number(), @ErrorMessage AS VARCHAR (1000) = '', @TranCount AS INT = @@trancount, @DoWork AS BIT = 0, @NumberAdded AS BIT; +IF @ErrorNumber IS NOT NULL + OR @Status IN ('Warn', 'Error') + SET @DoWork = 1; +IF @DoWork = 0 + SET @DoWork = CASE WHEN EXISTS (SELECT * + FROM dbo.Parameters + WHERE Id = isnull(@Process, '') + AND Char = 'LogEvent') THEN 1 ELSE 0 END; +IF @DoWork = 0 + RETURN; +IF @ErrorNumber IS NOT NULL + SET @ErrorMessage = CASE WHEN @Retry IS NOT NULL THEN 'Retry ' + CONVERT (VARCHAR, @Retry) + ', ' ELSE '' END + 'Error ' + CONVERT (VARCHAR, error_number()) + ': ' + CONVERT (VARCHAR (1000), error_message()) + ', Level ' + CONVERT (VARCHAR, error_severity()) + ', State ' + CONVERT (VARCHAR, error_state()) + CASE WHEN error_procedure() IS NOT NULL THEN ', Procedure ' + error_procedure() ELSE '' END + ', Line ' + CONVERT (VARCHAR, error_line()); +IF @TranCount > 0 + AND @ErrorNumber IS NOT NULL + ROLLBACK; +IF databasepropertyex(db_name(), 'UpdateAbility') = 'READ_WRITE' + BEGIN + INSERT INTO dbo.EventLog (Process, Status, Mode, Action, Target, Rows, Milliseconds, EventDate, EventText, SPID, HostName) + SELECT @Process, + @Status, + @Mode, + @Action, + @Target, + @Rows, + datediff(millisecond, @Start, getUTCdate()), + getUTCdate() AS EventDate, + CASE WHEN @ErrorNumber IS NULL THEN @Text ELSE @ErrorMessage + CASE WHEN isnull(@Text, '') <> '' THEN '. ' + @Text ELSE '' END END AS Text, + @@SPID, + host_name() AS HostName; + SET @EventId = scope_identity(); + END +IF @TranCount > 0 + AND @ErrorNumber IS NOT NULL + BEGIN TRANSACTION; + +GO +CREATE PROCEDURE dbo.LogSchemaMigrationProgress +@message VARCHAR (MAX) +AS +INSERT INTO dbo.SchemaMigrationProgress (Message) +VALUES (@message); + +GO +CREATE PROCEDURE dbo.MergeResources +@AffectedRows INT=0 OUTPUT, @RaiseExceptionOnConflict BIT=1, @IsResourceChangeCaptureEnabled BIT=0, @TransactionId BIGINT=NULL, @SingleTransaction BIT=1, @Resources dbo.ResourceList READONLY, @ResourceWriteClaims dbo.ResourceWriteClaimList READONLY, @ReferenceSearchParams dbo.ReferenceSearchParamList READONLY, @TokenSearchParams dbo.TokenSearchParamList READONLY, @TokenTexts dbo.TokenTextList READONLY, @StringSearchParams dbo.StringSearchParamList READONLY, @UriSearchParams dbo.UriSearchParamList READONLY, @NumberSearchParams dbo.NumberSearchParamList READONLY, @QuantitySearchParams dbo.QuantitySearchParamList READONLY, @DateTimeSearchParms dbo.DateTimeSearchParamList READONLY, @ReferenceTokenCompositeSearchParams dbo.ReferenceTokenCompositeSearchParamList READONLY, @TokenTokenCompositeSearchParams dbo.TokenTokenCompositeSearchParamList READONLY, @TokenDateTimeCompositeSearchParams dbo.TokenDateTimeCompositeSearchParamList READONLY, @TokenQuantityCompositeSearchParams dbo.TokenQuantityCompositeSearchParamList READONLY, @TokenStringCompositeSearchParams dbo.TokenStringCompositeSearchParamList READONLY, @TokenNumberNumberCompositeSearchParams dbo.TokenNumberNumberCompositeSearchParamList READONLY +AS +SET NOCOUNT ON; +DECLARE @st AS DATETIME = getUTCdate(), @SP AS VARCHAR (100) = object_name(@@procid), @DummyTop AS BIGINT = 9223372036854775807, @InitialTranCount AS INT = @@trancount, @IsRetry AS BIT = 0; +DECLARE @Mode AS VARCHAR (200) = isnull((SELECT 'RT=[' + CONVERT (VARCHAR, min(ResourceTypeId)) + ',' + CONVERT (VARCHAR, max(ResourceTypeId)) + '] Sur=[' + CONVERT (VARCHAR, min(ResourceSurrogateId)) + ',' + CONVERT (VARCHAR, max(ResourceSurrogateId)) + '] V=' + CONVERT (VARCHAR, max(Version)) + ' Rows=' + CONVERT (VARCHAR, count(*)) + FROM @Resources), 'Input=Empty'); +SET @Mode += ' E=' + CONVERT (VARCHAR, @RaiseExceptionOnConflict) + ' CC=' + CONVERT (VARCHAR, @IsResourceChangeCaptureEnabled) + ' IT=' + CONVERT (VARCHAR, @InitialTranCount) + ' T=' + isnull(CONVERT (VARCHAR, @TransactionId), 'NULL'); +SET @AffectedRows = 0; +BEGIN TRY + DECLARE @Existing AS TABLE ( + ResourceTypeId SMALLINT NOT NULL, + SurrogateId BIGINT NOT NULL PRIMARY KEY (ResourceTypeId, SurrogateId)); + DECLARE @ResourceInfos AS TABLE ( + ResourceTypeId SMALLINT NOT NULL, + SurrogateId BIGINT NOT NULL, + Version INT NOT NULL, + KeepHistory BIT NOT NULL, + PreviousVersion INT NULL, + PreviousSurrogateId BIGINT NULL PRIMARY KEY (ResourceTypeId, SurrogateId)); + DECLARE @PreviousSurrogateIds AS TABLE ( + TypeId SMALLINT NOT NULL, + SurrogateId BIGINT NOT NULL PRIMARY KEY (TypeId, SurrogateId), + KeepHistory BIT ); + IF @SingleTransaction = 0 + AND isnull((SELECT Number + FROM dbo.Parameters + WHERE Id = 'MergeResources.NoTransaction.IsEnabled'), 0) = 0 + SET @SingleTransaction = 1; + SET @Mode += ' ST=' + CONVERT (VARCHAR, @SingleTransaction); + IF @InitialTranCount = 0 + BEGIN + IF EXISTS (SELECT * + FROM @Resources AS A + INNER JOIN + dbo.Resource AS B + ON B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId) + BEGIN + BEGIN TRANSACTION; + INSERT INTO @Existing (ResourceTypeId, SurrogateId) + SELECT B.ResourceTypeId, + B.ResourceSurrogateId + FROM (SELECT TOP (@DummyTop) * + FROM @Resources) AS A + INNER JOIN + dbo.Resource AS B WITH (ROWLOCK, HOLDLOCK) + ON B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId + WHERE B.IsHistory = 0 + AND B.ResourceId = A.ResourceId + AND B.Version = A.Version + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)); + IF @@rowcount = (SELECT count(*) + FROM @Resources) + SET @IsRetry = 1; + IF @IsRetry = 0 + COMMIT TRANSACTION; + END + END + SET @Mode += ' R=' + CONVERT (VARCHAR, @IsRetry); + IF @SingleTransaction = 1 + AND @@trancount = 0 + BEGIN TRANSACTION; + IF @IsRetry = 0 + BEGIN + INSERT INTO @ResourceInfos (ResourceTypeId, SurrogateId, Version, KeepHistory, PreviousVersion, PreviousSurrogateId) + SELECT A.ResourceTypeId, + A.ResourceSurrogateId, + A.Version, + A.KeepHistory, + B.Version, + B.ResourceSurrogateId + FROM (SELECT TOP (@DummyTop) * + FROM @Resources + WHERE HasVersionToCompare = 1) AS A + LEFT OUTER JOIN + dbo.Resource AS B + ON B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceId = A.ResourceId + AND B.IsHistory = 0 + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)); + IF @RaiseExceptionOnConflict = 1 + AND EXISTS (SELECT * + FROM @ResourceInfos + WHERE PreviousVersion IS NOT NULL + AND Version <= PreviousVersion) + THROW 50409, 'Resource has been recently updated or added, please compare the resource content in code for any duplicate updates', 1; + INSERT INTO @PreviousSurrogateIds + SELECT ResourceTypeId, + PreviousSurrogateId, + KeepHistory + FROM @ResourceInfos + WHERE PreviousSurrogateId IS NOT NULL; + IF @@rowcount > 0 + BEGIN + UPDATE dbo.Resource + SET IsHistory = 1 + WHERE EXISTS (SELECT * + FROM @PreviousSurrogateIds + WHERE TypeId = ResourceTypeId + AND SurrogateId = ResourceSurrogateId + AND KeepHistory = 1); + SET @AffectedRows += @@rowcount; + IF @IsResourceChangeCaptureEnabled = 1 + AND NOT EXISTS (SELECT * + FROM dbo.Parameters + WHERE Id = 'InvisibleHistory.IsEnabled' + AND Number = 0) + UPDATE dbo.Resource + SET IsHistory = 1, + RawResource = 0xF, + SearchParamHash = NULL, + HistoryTransactionId = @TransactionId + WHERE EXISTS (SELECT * + FROM @PreviousSurrogateIds + WHERE TypeId = ResourceTypeId + AND SurrogateId = ResourceSurrogateId + AND KeepHistory = 0); + ELSE + DELETE dbo.Resource + WHERE EXISTS (SELECT * + FROM @PreviousSurrogateIds + WHERE TypeId = ResourceTypeId + AND SurrogateId = ResourceSurrogateId + AND KeepHistory = 0); + SET @AffectedRows += @@rowcount; + DELETE dbo.ResourceWriteClaim + WHERE EXISTS (SELECT * + FROM @PreviousSurrogateIds + WHERE SurrogateId = ResourceSurrogateId); + SET @AffectedRows += @@rowcount; + DELETE dbo.ReferenceSearchParam + WHERE EXISTS (SELECT * + FROM @PreviousSurrogateIds + WHERE TypeId = ResourceTypeId + AND SurrogateId = ResourceSurrogateId); + SET @AffectedRows += @@rowcount; + DELETE dbo.TokenSearchParam + WHERE EXISTS (SELECT * + FROM @PreviousSurrogateIds + WHERE TypeId = ResourceTypeId + AND SurrogateId = ResourceSurrogateId); + SET @AffectedRows += @@rowcount; + DELETE dbo.TokenText + WHERE EXISTS (SELECT * + FROM @PreviousSurrogateIds + WHERE TypeId = ResourceTypeId + AND SurrogateId = ResourceSurrogateId); + SET @AffectedRows += @@rowcount; + DELETE dbo.StringSearchParam + WHERE EXISTS (SELECT * + FROM @PreviousSurrogateIds + WHERE TypeId = ResourceTypeId + AND SurrogateId = ResourceSurrogateId); + SET @AffectedRows += @@rowcount; + DELETE dbo.UriSearchParam + WHERE EXISTS (SELECT * + FROM @PreviousSurrogateIds + WHERE TypeId = ResourceTypeId + AND SurrogateId = ResourceSurrogateId); + SET @AffectedRows += @@rowcount; + DELETE dbo.NumberSearchParam + WHERE EXISTS (SELECT * + FROM @PreviousSurrogateIds + WHERE TypeId = ResourceTypeId + AND SurrogateId = ResourceSurrogateId); + SET @AffectedRows += @@rowcount; + DELETE dbo.QuantitySearchParam + WHERE EXISTS (SELECT * + FROM @PreviousSurrogateIds + WHERE TypeId = ResourceTypeId + AND SurrogateId = ResourceSurrogateId); + SET @AffectedRows += @@rowcount; + DELETE dbo.DateTimeSearchParam + WHERE EXISTS (SELECT * + FROM @PreviousSurrogateIds + WHERE TypeId = ResourceTypeId + AND SurrogateId = ResourceSurrogateId); + SET @AffectedRows += @@rowcount; + DELETE dbo.ReferenceTokenCompositeSearchParam + WHERE EXISTS (SELECT * + FROM @PreviousSurrogateIds + WHERE TypeId = ResourceTypeId + AND SurrogateId = ResourceSurrogateId); + SET @AffectedRows += @@rowcount; + DELETE dbo.TokenTokenCompositeSearchParam + WHERE EXISTS (SELECT * + FROM @PreviousSurrogateIds + WHERE TypeId = ResourceTypeId + AND SurrogateId = ResourceSurrogateId); + SET @AffectedRows += @@rowcount; + DELETE dbo.TokenDateTimeCompositeSearchParam + WHERE EXISTS (SELECT * + FROM @PreviousSurrogateIds + WHERE TypeId = ResourceTypeId + AND SurrogateId = ResourceSurrogateId); + SET @AffectedRows += @@rowcount; + DELETE dbo.TokenQuantityCompositeSearchParam + WHERE EXISTS (SELECT * + FROM @PreviousSurrogateIds + WHERE TypeId = ResourceTypeId + AND SurrogateId = ResourceSurrogateId); + SET @AffectedRows += @@rowcount; + DELETE dbo.TokenStringCompositeSearchParam + WHERE EXISTS (SELECT * + FROM @PreviousSurrogateIds + WHERE TypeId = ResourceTypeId + AND SurrogateId = ResourceSurrogateId); + SET @AffectedRows += @@rowcount; + DELETE dbo.TokenNumberNumberCompositeSearchParam + WHERE EXISTS (SELECT * + FROM @PreviousSurrogateIds + WHERE TypeId = ResourceTypeId + AND SurrogateId = ResourceSurrogateId); + SET @AffectedRows += @@rowcount; + END + INSERT INTO dbo.Resource (ResourceTypeId, ResourceId, Version, IsHistory, ResourceSurrogateId, IsDeleted, RequestMethod, RawResource, IsRawResourceMetaSet, SearchParamHash, TransactionId) + SELECT ResourceTypeId, + ResourceId, + Version, + IsHistory, + ResourceSurrogateId, + IsDeleted, + RequestMethod, + RawResource, + IsRawResourceMetaSet, + SearchParamHash, + @TransactionId + FROM @Resources; + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.ResourceWriteClaim (ResourceSurrogateId, ClaimTypeId, ClaimValue) + SELECT ResourceSurrogateId, + ClaimTypeId, + ClaimValue + FROM @ResourceWriteClaims; + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.ReferenceSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, BaseUri, ReferenceResourceTypeId, ReferenceResourceId, ReferenceResourceVersion) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + BaseUri, + ReferenceResourceTypeId, + ReferenceResourceId, + ReferenceResourceVersion + FROM @ReferenceSearchParams; + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.TokenSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId, Code, CodeOverflow) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + SystemId, + Code, + CodeOverflow + FROM @TokenSearchParams; + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.TokenText (ResourceTypeId, ResourceSurrogateId, SearchParamId, Text) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + Text + FROM @TokenTexts; + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.StringSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, Text, TextOverflow, IsMin, IsMax) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + Text, + TextOverflow, + IsMin, + IsMax + FROM @StringSearchParams; + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.UriSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, Uri) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + Uri + FROM @UriSearchParams; + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.NumberSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, SingleValue, LowValue, HighValue) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + SingleValue, + LowValue, + HighValue + FROM @NumberSearchParams; + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.QuantitySearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId, QuantityCodeId, SingleValue, LowValue, HighValue) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + SystemId, + QuantityCodeId, + SingleValue, + LowValue, + HighValue + FROM @QuantitySearchParams; + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.DateTimeSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, StartDateTime, EndDateTime, IsLongerThanADay, IsMin, IsMax) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + StartDateTime, + EndDateTime, + IsLongerThanADay, + IsMin, + IsMax + FROM @DateTimeSearchParms; + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.ReferenceTokenCompositeSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, BaseUri1, ReferenceResourceTypeId1, ReferenceResourceId1, ReferenceResourceVersion1, SystemId2, Code2, CodeOverflow2) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + BaseUri1, + ReferenceResourceTypeId1, + ReferenceResourceId1, + ReferenceResourceVersion1, + SystemId2, + Code2, + CodeOverflow2 + FROM @ReferenceTokenCompositeSearchParams; + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.TokenTokenCompositeSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId1, Code1, CodeOverflow1, SystemId2, Code2, CodeOverflow2) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + SystemId1, + Code1, + CodeOverflow1, + SystemId2, + Code2, + CodeOverflow2 + FROM @TokenTokenCompositeSearchParams; + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.TokenDateTimeCompositeSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId1, Code1, CodeOverflow1, StartDateTime2, EndDateTime2, IsLongerThanADay2) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + SystemId1, + Code1, + CodeOverflow1, + StartDateTime2, + EndDateTime2, + IsLongerThanADay2 + FROM @TokenDateTimeCompositeSearchParams; + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.TokenQuantityCompositeSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId1, Code1, CodeOverflow1, SingleValue2, SystemId2, QuantityCodeId2, LowValue2, HighValue2) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + SystemId1, + Code1, + CodeOverflow1, + SingleValue2, + SystemId2, + QuantityCodeId2, + LowValue2, + HighValue2 + FROM @TokenQuantityCompositeSearchParams; + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.TokenStringCompositeSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId1, Code1, CodeOverflow1, Text2, TextOverflow2) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + SystemId1, + Code1, + CodeOverflow1, + Text2, + TextOverflow2 + FROM @TokenStringCompositeSearchParams; + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.TokenNumberNumberCompositeSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId1, Code1, CodeOverflow1, SingleValue2, LowValue2, HighValue2, SingleValue3, LowValue3, HighValue3, HasRange) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + SystemId1, + Code1, + CodeOverflow1, + SingleValue2, + LowValue2, + HighValue2, + SingleValue3, + LowValue3, + HighValue3, + HasRange + FROM @TokenNumberNumberCompositeSearchParams; + SET @AffectedRows += @@rowcount; + END + ELSE + BEGIN + INSERT INTO dbo.ResourceWriteClaim (ResourceSurrogateId, ClaimTypeId, ClaimValue) + SELECT ResourceSurrogateId, + ClaimTypeId, + ClaimValue + FROM (SELECT TOP (@DummyTop) * + FROM @ResourceWriteClaims) AS A + WHERE EXISTS (SELECT * + FROM @Existing AS B + WHERE B.SurrogateId = A.ResourceSurrogateId) + AND NOT EXISTS (SELECT * + FROM dbo.ResourceWriteClaim AS C + WHERE C.ResourceSurrogateId = A.ResourceSurrogateId) + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)); + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.ReferenceSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, BaseUri, ReferenceResourceTypeId, ReferenceResourceId, ReferenceResourceVersion) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + BaseUri, + ReferenceResourceTypeId, + ReferenceResourceId, + ReferenceResourceVersion + FROM (SELECT TOP (@DummyTop) * + FROM @ReferenceSearchParams) AS A + WHERE EXISTS (SELECT * + FROM @Existing AS B + WHERE B.ResourceTypeId = A.ResourceTypeId + AND B.SurrogateId = A.ResourceSurrogateId) + AND NOT EXISTS (SELECT * + FROM dbo.ReferenceSearchParam AS C + WHERE C.ResourceTypeId = A.ResourceTypeId + AND C.ResourceSurrogateId = A.ResourceSurrogateId) + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)); + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.TokenSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId, Code, CodeOverflow) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + SystemId, + Code, + CodeOverflow + FROM (SELECT TOP (@DummyTop) * + FROM @TokenSearchParams) AS A + WHERE EXISTS (SELECT * + FROM @Existing AS B + WHERE B.ResourceTypeId = A.ResourceTypeId + AND B.SurrogateId = A.ResourceSurrogateId) + AND NOT EXISTS (SELECT * + FROM dbo.TokenSearchParam AS C + WHERE C.ResourceTypeId = A.ResourceTypeId + AND C.ResourceSurrogateId = A.ResourceSurrogateId) + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)); + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.TokenText (ResourceTypeId, ResourceSurrogateId, SearchParamId, Text) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + Text + FROM (SELECT TOP (@DummyTop) * + FROM @TokenTexts) AS A + WHERE EXISTS (SELECT * + FROM @Existing AS B + WHERE B.ResourceTypeId = A.ResourceTypeId + AND B.SurrogateId = A.ResourceSurrogateId) + AND NOT EXISTS (SELECT * + FROM dbo.TokenSearchParam AS C + WHERE C.ResourceTypeId = A.ResourceTypeId + AND C.ResourceSurrogateId = A.ResourceSurrogateId) + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)); + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.StringSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, Text, TextOverflow, IsMin, IsMax) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + Text, + TextOverflow, + IsMin, + IsMax + FROM (SELECT TOP (@DummyTop) * + FROM @StringSearchParams) AS A + WHERE EXISTS (SELECT * + FROM @Existing AS B + WHERE B.ResourceTypeId = A.ResourceTypeId + AND B.SurrogateId = A.ResourceSurrogateId) + AND NOT EXISTS (SELECT * + FROM dbo.TokenText AS C + WHERE C.ResourceTypeId = A.ResourceTypeId + AND C.ResourceSurrogateId = A.ResourceSurrogateId) + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)); + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.UriSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, Uri) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + Uri + FROM (SELECT TOP (@DummyTop) * + FROM @UriSearchParams) AS A + WHERE EXISTS (SELECT * + FROM @Existing AS B + WHERE B.ResourceTypeId = A.ResourceTypeId + AND B.SurrogateId = A.ResourceSurrogateId) + AND NOT EXISTS (SELECT * + FROM dbo.UriSearchParam AS C + WHERE C.ResourceTypeId = A.ResourceTypeId + AND C.ResourceSurrogateId = A.ResourceSurrogateId) + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)); + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.NumberSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, SingleValue, LowValue, HighValue) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + SingleValue, + LowValue, + HighValue + FROM (SELECT TOP (@DummyTop) * + FROM @NumberSearchParams) AS A + WHERE EXISTS (SELECT * + FROM @Existing AS B + WHERE B.ResourceTypeId = A.ResourceTypeId + AND B.SurrogateId = A.ResourceSurrogateId) + AND NOT EXISTS (SELECT * + FROM dbo.NumberSearchParam AS C + WHERE C.ResourceTypeId = A.ResourceTypeId + AND C.ResourceSurrogateId = A.ResourceSurrogateId) + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)); + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.QuantitySearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId, QuantityCodeId, SingleValue, LowValue, HighValue) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + SystemId, + QuantityCodeId, + SingleValue, + LowValue, + HighValue + FROM (SELECT TOP (@DummyTop) * + FROM @QuantitySearchParams) AS A + WHERE EXISTS (SELECT * + FROM @Existing AS B + WHERE B.ResourceTypeId = A.ResourceTypeId + AND B.SurrogateId = A.ResourceSurrogateId) + AND NOT EXISTS (SELECT * + FROM dbo.QuantitySearchParam AS C + WHERE C.ResourceTypeId = A.ResourceTypeId + AND C.ResourceSurrogateId = A.ResourceSurrogateId) + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)); + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.DateTimeSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, StartDateTime, EndDateTime, IsLongerThanADay, IsMin, IsMax) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + StartDateTime, + EndDateTime, + IsLongerThanADay, + IsMin, + IsMax + FROM (SELECT TOP (@DummyTop) * + FROM @DateTimeSearchParms) AS A + WHERE EXISTS (SELECT * + FROM @Existing AS B + WHERE B.ResourceTypeId = A.ResourceTypeId + AND B.SurrogateId = A.ResourceSurrogateId) + AND NOT EXISTS (SELECT * + FROM dbo.TokenSearchParam AS C + WHERE C.ResourceTypeId = A.ResourceTypeId + AND C.ResourceSurrogateId = A.ResourceSurrogateId) + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)); + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.ReferenceTokenCompositeSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, BaseUri1, ReferenceResourceTypeId1, ReferenceResourceId1, ReferenceResourceVersion1, SystemId2, Code2, CodeOverflow2) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + BaseUri1, + ReferenceResourceTypeId1, + ReferenceResourceId1, + ReferenceResourceVersion1, + SystemId2, + Code2, + CodeOverflow2 + FROM (SELECT TOP (@DummyTop) * + FROM @ReferenceTokenCompositeSearchParams) AS A + WHERE EXISTS (SELECT * + FROM @Existing AS B + WHERE B.ResourceTypeId = A.ResourceTypeId + AND B.SurrogateId = A.ResourceSurrogateId) + AND NOT EXISTS (SELECT * + FROM dbo.DateTimeSearchParam AS C + WHERE C.ResourceTypeId = A.ResourceTypeId + AND C.ResourceSurrogateId = A.ResourceSurrogateId) + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)); + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.TokenTokenCompositeSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId1, Code1, CodeOverflow1, SystemId2, Code2, CodeOverflow2) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + SystemId1, + Code1, + CodeOverflow1, + SystemId2, + Code2, + CodeOverflow2 + FROM (SELECT TOP (@DummyTop) * + FROM @TokenTokenCompositeSearchParams) AS A + WHERE EXISTS (SELECT * + FROM @Existing AS B + WHERE B.ResourceTypeId = A.ResourceTypeId + AND B.SurrogateId = A.ResourceSurrogateId) + AND NOT EXISTS (SELECT * + FROM dbo.TokenTokenCompositeSearchParam AS C + WHERE C.ResourceTypeId = A.ResourceTypeId + AND C.ResourceSurrogateId = A.ResourceSurrogateId) + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)); + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.TokenDateTimeCompositeSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId1, Code1, CodeOverflow1, StartDateTime2, EndDateTime2, IsLongerThanADay2) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + SystemId1, + Code1, + CodeOverflow1, + StartDateTime2, + EndDateTime2, + IsLongerThanADay2 + FROM (SELECT TOP (@DummyTop) * + FROM @TokenDateTimeCompositeSearchParams) AS A + WHERE EXISTS (SELECT * + FROM @Existing AS B + WHERE B.ResourceTypeId = A.ResourceTypeId + AND B.SurrogateId = A.ResourceSurrogateId) + AND NOT EXISTS (SELECT * + FROM dbo.TokenDateTimeCompositeSearchParam AS C + WHERE C.ResourceTypeId = A.ResourceTypeId + AND C.ResourceSurrogateId = A.ResourceSurrogateId) + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)); + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.TokenQuantityCompositeSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId1, Code1, CodeOverflow1, SingleValue2, SystemId2, QuantityCodeId2, LowValue2, HighValue2) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + SystemId1, + Code1, + CodeOverflow1, + SingleValue2, + SystemId2, + QuantityCodeId2, + LowValue2, + HighValue2 + FROM (SELECT TOP (@DummyTop) * + FROM @TokenQuantityCompositeSearchParams) AS A + WHERE EXISTS (SELECT * + FROM @Existing AS B + WHERE B.ResourceTypeId = A.ResourceTypeId + AND B.SurrogateId = A.ResourceSurrogateId) + AND NOT EXISTS (SELECT * + FROM dbo.TokenQuantityCompositeSearchParam AS C + WHERE C.ResourceTypeId = A.ResourceTypeId + AND C.ResourceSurrogateId = A.ResourceSurrogateId) + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)); + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.TokenStringCompositeSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId1, Code1, CodeOverflow1, Text2, TextOverflow2) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + SystemId1, + Code1, + CodeOverflow1, + Text2, + TextOverflow2 + FROM (SELECT TOP (@DummyTop) * + FROM @TokenStringCompositeSearchParams) AS A + WHERE EXISTS (SELECT * + FROM @Existing AS B + WHERE B.ResourceTypeId = A.ResourceTypeId + AND B.SurrogateId = A.ResourceSurrogateId) + AND NOT EXISTS (SELECT * + FROM dbo.TokenStringCompositeSearchParam AS C + WHERE C.ResourceTypeId = A.ResourceTypeId + AND C.ResourceSurrogateId = A.ResourceSurrogateId) + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)); + SET @AffectedRows += @@rowcount; + INSERT INTO dbo.TokenNumberNumberCompositeSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId1, Code1, CodeOverflow1, SingleValue2, LowValue2, HighValue2, SingleValue3, LowValue3, HighValue3, HasRange) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + SystemId1, + Code1, + CodeOverflow1, + SingleValue2, + LowValue2, + HighValue2, + SingleValue3, + LowValue3, + HighValue3, + HasRange + FROM (SELECT TOP (@DummyTop) * + FROM @TokenNumberNumberCompositeSearchParams) AS A + WHERE EXISTS (SELECT * + FROM @Existing AS B + WHERE B.ResourceTypeId = A.ResourceTypeId + AND B.SurrogateId = A.ResourceSurrogateId) + AND NOT EXISTS (SELECT * + FROM dbo.TokenNumberNumberCompositeSearchParam AS C + WHERE C.ResourceTypeId = A.ResourceTypeId + AND C.ResourceSurrogateId = A.ResourceSurrogateId) + OPTION (MAXDOP 1, OPTIMIZE FOR (@DummyTop = 1)); + SET @AffectedRows += @@rowcount; + END + IF @IsResourceChangeCaptureEnabled = 1 + EXECUTE dbo.CaptureResourceIdsForChanges @Resources; + IF @TransactionId IS NOT NULL + EXECUTE dbo.MergeResourcesCommitTransaction @TransactionId; + IF @InitialTranCount = 0 + AND @@trancount > 0 + COMMIT TRANSACTION; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @AffectedRows; +END TRY +BEGIN CATCH + IF @InitialTranCount = 0 + AND @@trancount > 0 + ROLLBACK; + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error', @Start = @st; + IF @RaiseExceptionOnConflict = 1 + AND error_number() IN (2601, 2627) + AND error_message() LIKE '%''dbo.Resource''%' + THROW 50409, 'Resource has been recently updated or added, please compare the resource content in code for any duplicate updates', 1; + ELSE + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.MergeResourcesAdvanceTransactionVisibility +@AffectedRows INT=0 OUTPUT +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = object_name(@@procid), @Mode AS VARCHAR (100) = '', @st AS DATETIME = getUTCdate(), @msg AS VARCHAR (1000), @MaxTransactionId AS BIGINT, @MinTransactionId AS BIGINT, @MinNotCompletedTransactionId AS BIGINT, @CurrentTransactionId AS BIGINT; +SET @AffectedRows = 0; +BEGIN TRY + EXECUTE dbo.MergeResourcesGetTransactionVisibility @MinTransactionId OUTPUT; + SET @MinTransactionId += 1; + SET @CurrentTransactionId = (SELECT TOP 1 SurrogateIdRangeFirstValue + FROM dbo.Transactions + ORDER BY SurrogateIdRangeFirstValue DESC); + SET @MinNotCompletedTransactionId = isnull((SELECT TOP 1 SurrogateIdRangeFirstValue + FROM dbo.Transactions + WHERE IsCompleted = 0 + AND SurrogateIdRangeFirstValue BETWEEN @MinTransactionId AND @CurrentTransactionId + ORDER BY SurrogateIdRangeFirstValue), @CurrentTransactionId + 1); + SET @MaxTransactionId = (SELECT TOP 1 SurrogateIdRangeFirstValue + FROM dbo.Transactions + WHERE IsCompleted = 1 + AND SurrogateIdRangeFirstValue BETWEEN @MinTransactionId AND @CurrentTransactionId + AND SurrogateIdRangeFirstValue < @MinNotCompletedTransactionId + ORDER BY SurrogateIdRangeFirstValue DESC); + IF @MaxTransactionId >= @MinTransactionId + BEGIN + UPDATE A + SET IsVisible = 1, + VisibleDate = getUTCdate() + FROM dbo.Transactions AS A WITH (INDEX (1)) + WHERE SurrogateIdRangeFirstValue BETWEEN @MinTransactionId AND @CurrentTransactionId + AND SurrogateIdRangeFirstValue <= @MaxTransactionId; + SET @AffectedRows += @@rowcount; + END + SET @msg = 'Min=' + CONVERT (VARCHAR, @MinTransactionId) + ' C=' + CONVERT (VARCHAR, @CurrentTransactionId) + ' MinNC=' + CONVERT (VARCHAR, @MinNotCompletedTransactionId) + ' Max=' + CONVERT (VARCHAR, @MaxTransactionId); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @AffectedRows, @Text = @msg; +END TRY +BEGIN CATCH + IF @@trancount > 0 + ROLLBACK; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error'; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.MergeResourcesBeginTransaction +@Count INT, @TransactionId BIGINT OUTPUT, @SequenceRangeFirstValue INT=NULL OUTPUT, @HeartbeatDate DATETIME=NULL +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'MergeResourcesBeginTransaction', @Mode AS VARCHAR (200) = 'Cnt=' + CONVERT (VARCHAR, @Count), @st AS DATETIME = getUTCdate(), @FirstValueVar AS SQL_VARIANT, @LastValueVar AS SQL_VARIANT; +BEGIN TRY + SET @TransactionId = NULL; + IF @@trancount > 0 + RAISERROR ('MergeResourcesBeginTransaction cannot be called inside outer transaction.', 18, 127); + SET @FirstValueVar = NULL; + WHILE @FirstValueVar IS NULL + BEGIN + EXECUTE sys.sp_sequence_get_range @sequence_name = 'dbo.ResourceSurrogateIdUniquifierSequence', @range_size = @Count, @range_first_value = @FirstValueVar OUTPUT, @range_last_value = @LastValueVar OUTPUT; + SET @SequenceRangeFirstValue = CONVERT (INT, @FirstValueVar); + IF @SequenceRangeFirstValue > CONVERT (INT, @LastValueVar) + SET @FirstValueVar = NULL; + END + SET @TransactionId = datediff_big(millisecond, '0001-01-01', sysUTCdatetime()) * 80000 + @SequenceRangeFirstValue; + INSERT INTO dbo.Transactions (SurrogateIdRangeFirstValue, SurrogateIdRangeLastValue, HeartbeatDate) + SELECT @TransactionId, + @TransactionId + @Count - 1, + isnull(@HeartbeatDate, getUTCdate()); +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + IF @@trancount > 0 + ROLLBACK; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error'; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.MergeResourcesCommitTransaction +@TransactionId BIGINT, @FailureReason VARCHAR (MAX)=NULL, @OverrideIsControlledByClientCheck BIT=0 +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'MergeResourcesCommitTransaction', @st AS DATETIME = getUTCdate(), @InitialTranCount AS INT = @@trancount, @IsCompletedBefore AS BIT, @Rows AS INT, @msg AS VARCHAR (1000); +DECLARE @Mode AS VARCHAR (200) = 'TR=' + CONVERT (VARCHAR, @TransactionId) + ' OC=' + isnull(CONVERT (VARCHAR, @OverrideIsControlledByClientCheck), 'NULL'); +BEGIN TRY + IF @InitialTranCount = 0 + BEGIN TRANSACTION; + UPDATE dbo.Transactions + SET IsCompleted = 1, + @IsCompletedBefore = IsCompleted, + EndDate = getUTCdate(), + IsSuccess = CASE WHEN @FailureReason IS NULL THEN 1 ELSE 0 END, + FailureReason = @FailureReason + WHERE SurrogateIdRangeFirstValue = @TransactionId + AND (IsControlledByClient = 1 + OR @OverrideIsControlledByClientCheck = 1); + SET @Rows = @@rowcount; + IF @Rows = 0 + BEGIN + SET @msg = 'Transaction [' + CONVERT (VARCHAR (20), @TransactionId) + '] is not controlled by client or does not exist.'; + RAISERROR (@msg, 18, 127); + END + IF @IsCompletedBefore = 1 + BEGIN + IF @InitialTranCount = 0 + ROLLBACK; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @Rows, @Target = '@IsCompletedBefore', @Text = '=1'; + RETURN; + END + IF @InitialTranCount = 0 + COMMIT TRANSACTION; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @Rows; +END TRY +BEGIN CATCH + IF @InitialTranCount = 0 + AND @@trancount > 0 + ROLLBACK; + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error'; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.MergeResourcesDeleteInvisibleHistory +@TransactionId BIGINT, @AffectedRows INT=NULL OUTPUT +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = object_name(@@procid), @Mode AS VARCHAR (100) = 'T=' + CONVERT (VARCHAR, @TransactionId), @st AS DATETIME = getUTCdate(), @TypeId AS SMALLINT; +SET @AffectedRows = 0; +BEGIN TRY + DECLARE @Types TABLE ( + TypeId SMALLINT PRIMARY KEY, + Name VARCHAR (100)); + INSERT INTO @Types + EXECUTE dbo.GetUsedResourceTypes ; + WHILE EXISTS (SELECT * + FROM @Types) + BEGIN + SET @TypeId = (SELECT TOP 1 TypeId + FROM @Types + ORDER BY TypeId); + DELETE dbo.Resource + WHERE ResourceTypeId = @TypeId + AND HistoryTransactionId = @TransactionId + AND RawResource = 0xF; + SET @AffectedRows += @@rowcount; + DELETE @Types + WHERE TypeId = @TypeId; + END + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @AffectedRows; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error'; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.MergeResourcesGetTimeoutTransactions +@TimeoutSec INT +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = object_name(@@procid), @Mode AS VARCHAR (100) = 'T=' + CONVERT (VARCHAR, @TimeoutSec), @st AS DATETIME = getUTCdate(), @MinTransactionId AS BIGINT; +BEGIN TRY + EXECUTE dbo.MergeResourcesGetTransactionVisibility @MinTransactionId OUTPUT; + SELECT SurrogateIdRangeFirstValue + FROM dbo.Transactions + WHERE SurrogateIdRangeFirstValue > @MinTransactionId + AND IsCompleted = 0 + AND datediff(second, HeartbeatDate, getUTCdate()) > @TimeoutSec + ORDER BY SurrogateIdRangeFirstValue; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @@rowcount; +END TRY +BEGIN CATCH + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error'; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.MergeResourcesGetTransactionVisibility +@TransactionId BIGINT OUTPUT +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = object_name(@@procid), @Mode AS VARCHAR (100) = '', @st AS DATETIME = getUTCdate(); +SET @TransactionId = isnull((SELECT TOP 1 SurrogateIdRangeFirstValue + FROM dbo.Transactions + WHERE IsVisible = 1 + ORDER BY SurrogateIdRangeFirstValue DESC), -1); +EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @@rowcount, @Text = @TransactionId; + +GO +CREATE PROCEDURE dbo.MergeResourcesPutTransactionHeartbeat +@TransactionId BIGINT +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'MergeResourcesPutTransactionHeartbeat', @Mode AS VARCHAR (100) = 'TR=' + CONVERT (VARCHAR, @TransactionId); +BEGIN TRY + UPDATE dbo.Transactions + SET HeartbeatDate = getUTCdate() + WHERE SurrogateIdRangeFirstValue = @TransactionId + AND IsControlledByClient = 1; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error'; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.MergeResourcesPutTransactionInvisibleHistory +@TransactionId BIGINT +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = object_name(@@procid), @Mode AS VARCHAR (100) = 'TR=' + CONVERT (VARCHAR, @TransactionId), @st AS DATETIME = getUTCdate(); +BEGIN TRY + UPDATE dbo.Transactions + SET InvisibleHistoryRemovedDate = getUTCdate() + WHERE SurrogateIdRangeFirstValue = @TransactionId + AND InvisibleHistoryRemovedDate IS NULL; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @@rowcount; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error'; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.PutJobCancelation +@QueueType TINYINT, @GroupId BIGINT=NULL, @JobId BIGINT=NULL +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'PutJobCancelation', @Mode AS VARCHAR (100) = 'Q=' + isnull(CONVERT (VARCHAR, @QueueType), 'NULL') + ' G=' + isnull(CONVERT (VARCHAR, @GroupId), 'NULL') + ' J=' + isnull(CONVERT (VARCHAR, @JobId), 'NULL'), @st AS DATETIME = getUTCdate(), @Rows AS INT, @PartitionId AS TINYINT = @JobId % 16; +BEGIN TRY + IF @JobId IS NULL + AND @GroupId IS NULL + RAISERROR ('@JobId = NULL and @GroupId = NULL', 18, 127); + IF @JobId IS NOT NULL + BEGIN + UPDATE dbo.JobQueue + SET Status = 4, + EndDate = getUTCdate(), + Version = datediff_big(millisecond, '0001-01-01', getUTCdate()) + WHERE QueueType = @QueueType + AND PartitionId = @PartitionId + AND JobId = @JobId + AND Status = 0; + SET @Rows = @@rowcount; + IF @Rows = 0 + BEGIN + UPDATE dbo.JobQueue + SET CancelRequested = 1 + WHERE QueueType = @QueueType + AND PartitionId = @PartitionId + AND JobId = @JobId + AND Status = 1; + SET @Rows = @@rowcount; + END + END + ELSE + BEGIN + UPDATE dbo.JobQueue + SET Status = 4, + EndDate = getUTCdate(), + Version = datediff_big(millisecond, '0001-01-01', getUTCdate()) + WHERE QueueType = @QueueType + AND GroupId = @GroupId + AND Status = 0; + SET @Rows = @@rowcount; + UPDATE dbo.JobQueue + SET CancelRequested = 1 + WHERE QueueType = @QueueType + AND GroupId = @GroupId + AND Status = 1; + SET @Rows += @@rowcount; + END + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @Rows; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error'; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.PutJobHeartbeat +@QueueType TINYINT, @JobId BIGINT, @Version BIGINT, @Data BIGINT=NULL, @CancelRequested BIT=0 OUTPUT +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'PutJobHeartbeat', @Mode AS VARCHAR (100), @st AS DATETIME = getUTCdate(), @Rows AS INT = 0, @PartitionId AS TINYINT = @JobId % 16; +SET @Mode = 'Q=' + CONVERT (VARCHAR, @QueueType) + ' J=' + CONVERT (VARCHAR, @JobId) + ' P=' + CONVERT (VARCHAR, @PartitionId) + ' V=' + CONVERT (VARCHAR, @Version) + ' D=' + isnull(CONVERT (VARCHAR, @Data), 'NULL'); +BEGIN TRY + UPDATE dbo.JobQueue + SET @CancelRequested = CancelRequested, + HeartbeatDate = getUTCdate() + WHERE QueueType = @QueueType + AND PartitionId = @PartitionId + AND JobId = @JobId + AND Status = 1 + AND Version = @Version; + SET @Rows = @@rowcount; + IF @Rows = 0 + AND NOT EXISTS (SELECT * + FROM dbo.JobQueue + WHERE QueueType = @QueueType + AND PartitionId = @PartitionId + AND JobId = @JobId + AND Version = @Version + AND Status IN (2, 3, 4)) + BEGIN + IF EXISTS (SELECT * + FROM dbo.JobQueue + WHERE QueueType = @QueueType + AND PartitionId = @PartitionId + AND JobId = @JobId) + THROW 50412, 'Precondition failed', 1; + ELSE + THROW 50404, 'Job record not found', 1; + END + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @Rows; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error'; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.PutJobStatus +@QueueType TINYINT, @JobId BIGINT, @Version BIGINT, @Failed BIT, @Data BIGINT, @FinalResult VARCHAR (MAX), @RequestCancellationOnFailure BIT +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'PutJobStatus', @Mode AS VARCHAR (100), @st AS DATETIME = getUTCdate(), @Rows AS INT = 0, @PartitionId AS TINYINT = @JobId % 16, @GroupId AS BIGINT; +SET @Mode = 'Q=' + CONVERT (VARCHAR, @QueueType) + ' J=' + CONVERT (VARCHAR, @JobId) + ' P=' + CONVERT (VARCHAR, @PartitionId) + ' V=' + CONVERT (VARCHAR, @Version) + ' F=' + CONVERT (VARCHAR, @Failed) + ' R=' + isnull(@FinalResult, 'NULL'); +BEGIN TRY + UPDATE dbo.JobQueue + SET EndDate = getUTCdate(), + Status = CASE WHEN @Failed = 1 THEN 3 WHEN CancelRequested = 1 THEN 4 ELSE 2 END, + Data = @Data, + Result = @FinalResult, + @GroupId = GroupId + WHERE QueueType = @QueueType + AND PartitionId = @PartitionId + AND JobId = @JobId + AND Status = 1 + AND Version = @Version; + SET @Rows = @@rowcount; + IF @Rows = 0 + BEGIN + SET @GroupId = (SELECT GroupId + FROM dbo.JobQueue + WHERE QueueType = @QueueType + AND PartitionId = @PartitionId + AND JobId = @JobId + AND Version = @Version + AND Status IN (2, 3, 4)); + IF @GroupId IS NULL + IF EXISTS (SELECT * + FROM dbo.JobQueue + WHERE QueueType = @QueueType + AND PartitionId = @PartitionId + AND JobId = @JobId) + THROW 50412, 'Precondition failed', 1; + ELSE + THROW 50404, 'Job record not found', 1; + END + IF @Failed = 1 + AND @RequestCancellationOnFailure = 1 + EXECUTE dbo.PutJobCancelation @QueueType = @QueueType, @GroupId = @GroupId; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @Rows; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error'; + THROW; +END CATCH + +GO +CREATE OR ALTER PROCEDURE dbo.RemovePartitionFromResourceChanges_2 +@partitionNumberToSwitchOut INT, @partitionBoundaryToMerge DATETIME2 (7) +AS +BEGIN + TRUNCATE TABLE dbo.ResourceChangeDataStaging; + ALTER TABLE dbo.ResourceChangeData SWITCH PARTITION @partitionNumberToSwitchOut TO dbo.ResourceChangeDataStaging; + ALTER PARTITION FUNCTION PartitionFunction_ResourceChangeData_Timestamp( ) + MERGE RANGE (@partitionBoundaryToMerge); + TRUNCATE TABLE dbo.ResourceChangeDataStaging; +END + +GO +CREATE PROCEDURE dbo.SwitchPartitionsIn +@Tbl VARCHAR (100) +WITH EXECUTE AS 'dbo' +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'SwitchPartitionsIn', @Mode AS VARCHAR (200) = 'Tbl=' + isnull(@Tbl, 'NULL'), @st AS DATETIME = getUTCdate(), @ResourceTypeId AS SMALLINT, @Rows AS BIGINT, @Txt AS VARCHAR (1000), @TblInt AS VARCHAR (100), @Ind AS VARCHAR (200), @IndId AS INT, @DataComp AS VARCHAR (100); +DECLARE @Indexes TABLE ( + IndId INT PRIMARY KEY, + name VARCHAR (200)); +DECLARE @ResourceTypes TABLE ( + ResourceTypeId SMALLINT PRIMARY KEY); +BEGIN TRY + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Start'; + IF @Tbl IS NULL + RAISERROR ('@Tbl IS NULL', 18, 127); + INSERT INTO @Indexes + SELECT index_id, + name + FROM sys.indexes + WHERE object_id = object_id(@Tbl) + AND is_disabled = 1; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = '@Indexes', @Action = 'Insert', @Rows = @@rowcount; + WHILE EXISTS (SELECT * + FROM @Indexes) + BEGIN + SELECT TOP 1 @IndId = IndId, + @Ind = name + FROM @Indexes + ORDER BY IndId; + SET @DataComp = CASE WHEN (SELECT PropertyValue + FROM dbo.IndexProperties + WHERE TableName = @Tbl + AND IndexName = @Ind) = 'PAGE' THEN ' PARTITION = ALL WITH (DATA_COMPRESSION = PAGE)' ELSE '' END; + SET @Txt = 'IF EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id(''' + @Tbl + ''') AND name = ''' + @Ind + ''' AND is_disabled = 1) ALTER INDEX ' + @Ind + ' ON dbo.' + @Tbl + ' REBUILD' + @DataComp; + EXECUTE (@Txt); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = @Ind, @Action = 'Rebuild', @Text = @Txt; + DELETE @Indexes + WHERE IndId = @IndId; + END + INSERT INTO @ResourceTypes + SELECT CONVERT (SMALLINT, substring(name, charindex('_', name) + 1, 6)) AS ResourceTypeId + FROM sys.objects AS O + WHERE name LIKE @Tbl + '[_]%' + AND EXISTS (SELECT * + FROM sysindexes + WHERE id = O.object_id + AND indid IN (0, 1) + AND rows > 0); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = '#ResourceTypes', @Action = 'Select Into', @Rows = @@rowcount; + WHILE EXISTS (SELECT * + FROM @ResourceTypes) + BEGIN + SET @ResourceTypeId = (SELECT TOP 1 ResourceTypeId + FROM @ResourceTypes); + SET @TblInt = @Tbl + '_' + CONVERT (VARCHAR, @ResourceTypeId); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = @TblInt; + SET @Txt = 'ALTER TABLE dbo.' + @TblInt + ' SWITCH TO dbo.' + @Tbl + ' PARTITION $partition.PartitionFunction_ResourceTypeId(' + CONVERT (VARCHAR, @ResourceTypeId) + ')'; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = @Tbl, @Action = 'Switch in start', @Text = @Txt; + EXECUTE (@Txt); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = @Tbl, @Action = 'Switch in', @Text = @Txt; + IF EXISTS (SELECT * + FROM sysindexes + WHERE id = object_id(@TblInt) + AND rows > 0) + BEGIN + SET @Txt = @TblInt + ' is not empty after switch'; + RAISERROR (@Txt, 18, 127); + END + EXECUTE ('DROP TABLE dbo.' + @TblInt); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = @TblInt, @Action = 'Drop'; + DELETE @ResourceTypes + WHERE ResourceTypeId = @ResourceTypeId; + END + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error', @Start = @st; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.SwitchPartitionsInAllTables +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'SwitchPartitionsInAllTables', @Mode AS VARCHAR (200) = 'PS=PartitionScheme_ResourceTypeId', @st AS DATETIME = getUTCdate(), @Tbl AS VARCHAR (100); +BEGIN TRY + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Start'; + DECLARE @Tables TABLE ( + name VARCHAR (100) PRIMARY KEY, + supported BIT ); + INSERT INTO @Tables + EXECUTE dbo.GetPartitionedTables @IncludeNotDisabled = 1, @IncludeNotSupported = 0; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = '@Tables', @Action = 'Insert', @Rows = @@rowcount; + WHILE EXISTS (SELECT * + FROM @Tables) + BEGIN + SET @Tbl = (SELECT TOP 1 name + FROM @Tables + ORDER BY name); + EXECUTE dbo.SwitchPartitionsIn @Tbl = @Tbl; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = 'SwitchPartitionsIn', @Action = 'Execute', @Text = @Tbl; + DELETE @Tables + WHERE name = @Tbl; + END + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error', @Start = @st; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.SwitchPartitionsOut +@Tbl VARCHAR (100), @RebuildClustered BIT +WITH EXECUTE AS 'dbo' +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'SwitchPartitionsOut', @Mode AS VARCHAR (200) = 'Tbl=' + isnull(@Tbl, 'NULL') + ' ND=' + isnull(CONVERT (VARCHAR, @RebuildClustered), 'NULL'), @st AS DATETIME = getUTCdate(), @ResourceTypeId AS SMALLINT, @Rows AS BIGINT, @Txt AS VARCHAR (MAX), @TblInt AS VARCHAR (100), @IndId AS INT, @Ind AS VARCHAR (200), @Name AS VARCHAR (100), @checkName AS VARCHAR (200), @definition AS VARCHAR (200); +DECLARE @Indexes TABLE ( + IndId INT PRIMARY KEY, + name VARCHAR (200), + IsDisabled BIT ); +DECLARE @IndexesRT TABLE ( + IndId INT PRIMARY KEY, + name VARCHAR (200), + IsDisabled BIT ); +DECLARE @ResourceTypes TABLE ( + ResourceTypeId SMALLINT PRIMARY KEY, + partition_number_roundtrip INT , + partition_number INT , + row_count BIGINT ); +DECLARE @Names TABLE ( + name VARCHAR (100) PRIMARY KEY); +DECLARE @CheckConstraints TABLE ( + CheckName VARCHAR (200), + CheckDefinition VARCHAR (200)); +BEGIN TRY + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Start'; + IF @Tbl IS NULL + RAISERROR ('@Tbl IS NULL', 18, 127); + IF @RebuildClustered IS NULL + RAISERROR ('@RebuildClustered IS NULL', 18, 127); + INSERT INTO @Indexes + SELECT index_id, + name, + is_disabled + FROM sys.indexes + WHERE object_id = object_id(@Tbl) + AND (is_disabled = 0 + OR @RebuildClustered = 1); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = '@Indexes', @Action = 'Insert', @Rows = @@rowcount; + INSERT INTO @ResourceTypes + SELECT partition_number - 1 AS ResourceTypeId, + $PARTITION.PartitionFunction_ResourceTypeId (partition_number - 1) AS partition_number_roundtrip, + partition_number, + row_count + FROM sys.dm_db_partition_stats + WHERE object_id = object_id(@Tbl) + AND index_id = 1 + AND row_count > 0; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = '@ResourceTypes', @Action = 'Insert', @Rows = @@rowcount, @Text = 'For partition switch'; + IF EXISTS (SELECT * + FROM @ResourceTypes + WHERE partition_number_roundtrip <> partition_number) + RAISERROR ('Partition sanity check failed', 18, 127); + WHILE EXISTS (SELECT * + FROM @ResourceTypes) + BEGIN + SELECT TOP 1 @ResourceTypeId = ResourceTypeId, + @Rows = row_count + FROM @ResourceTypes + ORDER BY ResourceTypeId; + SET @TblInt = @Tbl + '_' + CONVERT (VARCHAR, @ResourceTypeId); + SET @Txt = 'Starting @ResourceTypeId=' + CONVERT (VARCHAR, @ResourceTypeId) + ' row_count=' + CONVERT (VARCHAR, @Rows); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Text = @Txt; + IF NOT EXISTS (SELECT * + FROM sysindexes + WHERE id = object_id(@TblInt) + AND rows > 0) + BEGIN + IF object_id(@TblInt) IS NOT NULL + BEGIN + EXECUTE ('DROP TABLE dbo.' + @TblInt); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = @TblInt, @Action = 'Drop'; + END + EXECUTE ('SELECT * INTO dbo.' + @TblInt + ' FROM dbo.' + @Tbl + ' WHERE 1 = 2'); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = @TblInt, @Action = 'Select Into', @Rows = @@rowcount; + DELETE @CheckConstraints; + INSERT INTO @CheckConstraints + SELECT name, + definition + FROM sys.check_constraints + WHERE parent_object_id = object_id(@Tbl); + WHILE EXISTS (SELECT * + FROM @CheckConstraints) + BEGIN + SELECT TOP 1 @checkName = CheckName, + @definition = CheckDefinition + FROM @CheckConstraints; + SET @Txt = 'ALTER TABLE ' + @TblInt + ' ADD CHECK ' + @definition; + EXECUTE (@Txt); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = @TblInt, @Action = 'ALTER', @Text = @Txt; + DELETE @CheckConstraints + WHERE CheckName = @checkName; + END + DELETE @Names; + INSERT INTO @Names + SELECT name + FROM sys.columns + WHERE object_id = object_id(@Tbl) + AND is_sparse = 1; + WHILE EXISTS (SELECT * + FROM @Names) + BEGIN + SET @Name = (SELECT TOP 1 name + FROM @Names + ORDER BY name); + SET @Txt = (SELECT 'ALTER TABLE dbo.' + @TblInt + ' ALTER COLUMN ' + @Name + ' ' + T.name + '(' + CONVERT (VARCHAR, C.precision) + ',' + CONVERT (VARCHAR, C.scale) + ') SPARSE NULL' + FROM sys.types AS T + INNER JOIN + sys.columns AS C + ON C.system_type_id = T.system_type_id + WHERE C.object_id = object_id(@Tbl) + AND C.name = @Name); + EXECUTE (@Txt); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = @TblInt, @Action = 'ALTER', @Text = @Txt; + DELETE @Names + WHERE name = @Name; + END + END + INSERT INTO @IndexesRT + SELECT * + FROM @Indexes + WHERE IsDisabled = 0; + WHILE EXISTS (SELECT * + FROM @IndexesRT) + BEGIN + SELECT TOP 1 @IndId = IndId, + @Ind = name + FROM @IndexesRT + ORDER BY IndId; + IF NOT EXISTS (SELECT * + FROM sys.indexes + WHERE object_id = object_id(@TblInt) + AND name = @Ind) + BEGIN + EXECUTE dbo.GetIndexCommands @Tbl = @Tbl, @Ind = @Ind, @AddPartClause = 0, @IncludeClustered = 1, @Txt = @Txt OUTPUT; + SET @Txt = replace(@Txt, '[' + @Tbl + ']', @TblInt); + EXECUTE (@Txt); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = @TblInt, @Action = 'Create Index', @Text = @Txt; + END + DELETE @IndexesRT + WHERE IndId = @IndId; + END + SET @Txt = 'ALTER TABLE dbo.' + @TblInt + ' ADD CHECK (ResourceTypeId >= ' + CONVERT (VARCHAR, @ResourceTypeId) + ' AND ResourceTypeId < ' + CONVERT (VARCHAR, @ResourceTypeId) + ' + 1)'; + EXECUTE (@Txt); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = @Tbl, @Action = 'Add check', @Text = @Txt; + SET @Txt = 'ALTER TABLE dbo.' + @Tbl + ' SWITCH PARTITION $partition.PartitionFunction_ResourceTypeId(' + CONVERT (VARCHAR, @ResourceTypeId) + ') TO dbo.' + @TblInt; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = @Tbl, @Action = 'Switch out start', @Text = @Txt; + EXECUTE (@Txt); + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = @Tbl, @Action = 'Switch out end', @Text = @Txt; + DELETE @ResourceTypes + WHERE ResourceTypeId = @ResourceTypeId; + END + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error', @Start = @st; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.SwitchPartitionsOutAllTables +@RebuildClustered BIT +AS +SET NOCOUNT ON; +DECLARE @SP AS VARCHAR (100) = 'SwitchPartitionsOutAllTables', @Mode AS VARCHAR (200) = 'PS=PartitionScheme_ResourceTypeId ND=' + isnull(CONVERT (VARCHAR, @RebuildClustered), 'NULL'), @st AS DATETIME = getUTCdate(), @Tbl AS VARCHAR (100); +BEGIN TRY + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Start'; + DECLARE @Tables TABLE ( + name VARCHAR (100) PRIMARY KEY, + supported BIT ); + INSERT INTO @Tables + EXECUTE dbo.GetPartitionedTables @IncludeNotDisabled = @RebuildClustered, @IncludeNotSupported = 0; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = '@Tables', @Action = 'Insert', @Rows = @@rowcount; + WHILE EXISTS (SELECT * + FROM @Tables) + BEGIN + SET @Tbl = (SELECT TOP 1 name + FROM @Tables + ORDER BY name); + EXECUTE dbo.SwitchPartitionsOut @Tbl = @Tbl, @RebuildClustered = @RebuildClustered; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Info', @Target = 'SwitchPartitionsOut', @Action = 'Execute', @Text = @Tbl; + DELETE @Tables + WHERE name = @Tbl; + END + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st; +END TRY +BEGIN CATCH + IF error_number() = 1750 + THROW; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error', @Start = @st; + THROW; +END CATCH + +GO +GO +CREATE OR ALTER PROCEDURE dbo.UpdateEventAgentCheckpoint +@CheckpointId VARCHAR (64), @LastProcessedDateTime DATETIMEOFFSET (7)=NULL, @LastProcessedIdentifier VARCHAR (64)=NULL +AS +BEGIN + IF EXISTS (SELECT * + FROM dbo.EventAgentCheckpoint + WHERE CheckpointId = @CheckpointId) + UPDATE dbo.EventAgentCheckpoint + SET CheckpointId = @CheckpointId, + LastProcessedDateTime = @LastProcessedDateTime, + LastProcessedIdentifier = @LastProcessedIdentifier, + UpdatedOn = sysutcdatetime() + WHERE CheckpointId = @CheckpointId; + ELSE + INSERT INTO dbo.EventAgentCheckpoint (CheckpointId, LastProcessedDateTime, LastProcessedIdentifier, UpdatedOn) + VALUES (@CheckpointId, @LastProcessedDateTime, @LastProcessedIdentifier, sysutcdatetime()); +END + +GO +CREATE PROCEDURE dbo.UpdateReindexJob +@id VARCHAR (64), @status VARCHAR (10), @rawJobRecord VARCHAR (MAX), @jobVersion BINARY (8) +AS +SET NOCOUNT ON; +SET XACT_ABORT ON; +BEGIN TRANSACTION; +DECLARE @currentJobVersion AS BINARY (8); +SELECT @currentJobVersion = JobVersion +FROM dbo.ReindexJob WITH (UPDLOCK, HOLDLOCK) +WHERE Id = @id; +IF (@currentJobVersion IS NULL) + BEGIN + THROW 50404, 'Reindex job record not found', 1; + END +IF (@jobVersion <> @currentJobVersion) + BEGIN + THROW 50412, 'Precondition failed', 1; + END +DECLARE @heartbeatDateTime AS DATETIME2 (7) = SYSUTCDATETIME(); +UPDATE dbo.ReindexJob +SET Status = @status, + HeartbeatDateTime = @heartbeatDateTime, + RawJobRecord = @rawJobRecord +WHERE Id = @id; +SELECT @@DBTS; +COMMIT TRANSACTION; + +GO +CREATE PROCEDURE dbo.UpdateResourceSearchParams +@FailedResources INT=0 OUTPUT, @Resources dbo.ResourceList READONLY, @ResourceWriteClaims dbo.ResourceWriteClaimList READONLY, @ReferenceSearchParams dbo.ReferenceSearchParamList READONLY, @TokenSearchParams dbo.TokenSearchParamList READONLY, @TokenTexts dbo.TokenTextList READONLY, @StringSearchParams dbo.StringSearchParamList READONLY, @UriSearchParams dbo.UriSearchParamList READONLY, @NumberSearchParams dbo.NumberSearchParamList READONLY, @QuantitySearchParams dbo.QuantitySearchParamList READONLY, @DateTimeSearchParams dbo.DateTimeSearchParamList READONLY, @ReferenceTokenCompositeSearchParams dbo.ReferenceTokenCompositeSearchParamList READONLY, @TokenTokenCompositeSearchParams dbo.TokenTokenCompositeSearchParamList READONLY, @TokenDateTimeCompositeSearchParams dbo.TokenDateTimeCompositeSearchParamList READONLY, @TokenQuantityCompositeSearchParams dbo.TokenQuantityCompositeSearchParamList READONLY, @TokenStringCompositeSearchParams dbo.TokenStringCompositeSearchParamList READONLY, @TokenNumberNumberCompositeSearchParams dbo.TokenNumberNumberCompositeSearchParamList READONLY +AS +SET NOCOUNT ON; +DECLARE @st AS DATETIME = getUTCdate(), @SP AS VARCHAR (100) = object_name(@@procid), @Mode AS VARCHAR (200) = isnull((SELECT 'RT=[' + CONVERT (VARCHAR, min(ResourceTypeId)) + ',' + CONVERT (VARCHAR, max(ResourceTypeId)) + '] Sur=[' + CONVERT (VARCHAR, min(ResourceSurrogateId)) + ',' + CONVERT (VARCHAR, max(ResourceSurrogateId)) + '] V=' + CONVERT (VARCHAR, max(Version)) + ' Rows=' + CONVERT (VARCHAR, count(*)) + FROM @Resources), 'Input=Empty'), @Rows AS INT; +BEGIN TRY + DECLARE @Ids TABLE ( + ResourceTypeId SMALLINT NOT NULL, + ResourceSurrogateId BIGINT NOT NULL); + BEGIN TRANSACTION; + UPDATE B + SET SearchParamHash = A.SearchParamHash + OUTPUT deleted.ResourceTypeId, deleted.ResourceSurrogateId INTO @Ids + FROM @Resources AS A + INNER JOIN + dbo.Resource AS B + ON B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId + WHERE B.IsHistory = 0; + SET @Rows = @@rowcount; + DELETE B + FROM @Ids AS A + INNER JOIN + dbo.ResourceWriteClaim AS B + ON B.ResourceSurrogateId = A.ResourceSurrogateId; + DELETE B + FROM @Ids AS A + INNER JOIN + dbo.ReferenceSearchParam AS B + ON B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId; + DELETE B + FROM @Ids AS A + INNER JOIN + dbo.TokenSearchParam AS B + ON B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId; + DELETE B + FROM @Ids AS A + INNER JOIN + dbo.TokenText AS B + ON B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId; + DELETE B + FROM @Ids AS A + INNER JOIN + dbo.StringSearchParam AS B + ON B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId; + DELETE B + FROM @Ids AS A + INNER JOIN + dbo.UriSearchParam AS B + ON B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId; + DELETE B + FROM @Ids AS A + INNER JOIN + dbo.NumberSearchParam AS B + ON B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId; + DELETE B + FROM @Ids AS A + INNER JOIN + dbo.QuantitySearchParam AS B + ON B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId; + DELETE B + FROM @Ids AS A + INNER JOIN + dbo.DateTimeSearchParam AS B + ON B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId; + DELETE B + FROM @Ids AS A + INNER JOIN + dbo.ReferenceTokenCompositeSearchParam AS B + ON B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId; + DELETE B + FROM @Ids AS A + INNER JOIN + dbo.TokenTokenCompositeSearchParam AS B + ON B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId; + DELETE B + FROM @Ids AS A + INNER JOIN + dbo.TokenDateTimeCompositeSearchParam AS B + ON B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId; + DELETE B + FROM @Ids AS A + INNER JOIN + dbo.TokenQuantityCompositeSearchParam AS B + ON B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId; + DELETE B + FROM @Ids AS A + INNER JOIN + dbo.TokenStringCompositeSearchParam AS B + ON B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId; + DELETE B + FROM @Ids AS A + INNER JOIN + dbo.TokenNumberNumberCompositeSearchParam AS B + ON B.ResourceTypeId = A.ResourceTypeId + AND B.ResourceSurrogateId = A.ResourceSurrogateId; + INSERT INTO dbo.ResourceWriteClaim (ResourceSurrogateId, ClaimTypeId, ClaimValue) + SELECT ResourceSurrogateId, + ClaimTypeId, + ClaimValue + FROM @ResourceWriteClaims; + INSERT INTO dbo.ReferenceSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, BaseUri, ReferenceResourceTypeId, ReferenceResourceId, ReferenceResourceVersion) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + BaseUri, + ReferenceResourceTypeId, + ReferenceResourceId, + ReferenceResourceVersion + FROM @ReferenceSearchParams; + INSERT INTO dbo.TokenSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId, Code, CodeOverflow) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + SystemId, + Code, + CodeOverflow + FROM @TokenSearchParams; + INSERT INTO dbo.TokenText (ResourceTypeId, ResourceSurrogateId, SearchParamId, Text) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + Text + FROM @TokenTexts; + INSERT INTO dbo.StringSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, Text, TextOverflow, IsMin, IsMax) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + Text, + TextOverflow, + IsMin, + IsMax + FROM @StringSearchParams; + INSERT INTO dbo.UriSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, Uri) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + Uri + FROM @UriSearchParams; + INSERT INTO dbo.NumberSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, SingleValue, LowValue, HighValue) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + SingleValue, + LowValue, + HighValue + FROM @NumberSearchParams; + INSERT INTO dbo.QuantitySearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId, QuantityCodeId, SingleValue, LowValue, HighValue) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + SystemId, + QuantityCodeId, + SingleValue, + LowValue, + HighValue + FROM @QuantitySearchParams; + INSERT INTO dbo.DateTimeSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, StartDateTime, EndDateTime, IsLongerThanADay, IsMin, IsMax) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + StartDateTime, + EndDateTime, + IsLongerThanADay, + IsMin, + IsMax + FROM @DateTimeSearchParams; + INSERT INTO dbo.ReferenceTokenCompositeSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, BaseUri1, ReferenceResourceTypeId1, ReferenceResourceId1, ReferenceResourceVersion1, SystemId2, Code2, CodeOverflow2) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + BaseUri1, + ReferenceResourceTypeId1, + ReferenceResourceId1, + ReferenceResourceVersion1, + SystemId2, + Code2, + CodeOverflow2 + FROM @ReferenceTokenCompositeSearchParams; + INSERT INTO dbo.TokenTokenCompositeSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId1, Code1, CodeOverflow1, SystemId2, Code2, CodeOverflow2) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + SystemId1, + Code1, + CodeOverflow1, + SystemId2, + Code2, + CodeOverflow2 + FROM @TokenTokenCompositeSearchParams; + INSERT INTO dbo.TokenDateTimeCompositeSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId1, Code1, CodeOverflow1, StartDateTime2, EndDateTime2, IsLongerThanADay2) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + SystemId1, + Code1, + CodeOverflow1, + StartDateTime2, + EndDateTime2, + IsLongerThanADay2 + FROM @TokenDateTimeCompositeSearchParams; + INSERT INTO dbo.TokenQuantityCompositeSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId1, Code1, CodeOverflow1, SingleValue2, SystemId2, QuantityCodeId2, LowValue2, HighValue2) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + SystemId1, + Code1, + CodeOverflow1, + SingleValue2, + SystemId2, + QuantityCodeId2, + LowValue2, + HighValue2 + FROM @TokenQuantityCompositeSearchParams; + INSERT INTO dbo.TokenStringCompositeSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId1, Code1, CodeOverflow1, Text2, TextOverflow2) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + SystemId1, + Code1, + CodeOverflow1, + Text2, + TextOverflow2 + FROM @TokenStringCompositeSearchParams; + INSERT INTO dbo.TokenNumberNumberCompositeSearchParam (ResourceTypeId, ResourceSurrogateId, SearchParamId, SystemId1, Code1, CodeOverflow1, SingleValue2, LowValue2, HighValue2, SingleValue3, LowValue3, HighValue3, HasRange) + SELECT ResourceTypeId, + ResourceSurrogateId, + SearchParamId, + SystemId1, + Code1, + CodeOverflow1, + SingleValue2, + LowValue2, + HighValue2, + SingleValue3, + LowValue3, + HighValue3, + HasRange + FROM @TokenNumberNumberCompositeSearchParams; + COMMIT TRANSACTION; + SET @FailedResources = (SELECT count(*) + FROM @Resources) - @Rows; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'End', @Start = @st, @Rows = @Rows; +END TRY +BEGIN CATCH + IF @@trancount > 0 + ROLLBACK; + EXECUTE dbo.LogEvent @Process = @SP, @Mode = @Mode, @Status = 'Error', @Start = @st; + THROW; +END CATCH + +GO +CREATE PROCEDURE dbo.UpsertSearchParams +@searchParams dbo.SearchParamTableType_2 READONLY +AS +SET NOCOUNT ON; +SET XACT_ABORT ON; +SET TRANSACTION ISOLATION LEVEL SERIALIZABLE; +BEGIN TRANSACTION; +DECLARE @lastUpdated AS DATETIMEOFFSET (7) = SYSDATETIMEOFFSET(); +DECLARE @summaryOfChanges TABLE ( + Uri VARCHAR (128) COLLATE Latin1_General_100_CS_AS NOT NULL, + Action VARCHAR (20) NOT NULL); +MERGE INTO dbo.SearchParam WITH (TABLOCKX) + AS target +USING @searchParams AS source ON target.Uri = source.Uri +WHEN MATCHED THEN UPDATE +SET Status = source.Status, + LastUpdated = @lastUpdated, + IsPartiallySupported = source.IsPartiallySupported +WHEN NOT MATCHED BY TARGET THEN INSERT (Uri, Status, LastUpdated, IsPartiallySupported) VALUES (source.Uri, source.Status, @lastUpdated, source.IsPartiallySupported) +OUTPUT source.Uri, $ACTION INTO @summaryOfChanges; +SELECT SearchParamId, + SearchParam.Uri +FROM dbo.SearchParam AS searchParam + INNER JOIN + @summaryOfChanges AS upsertedSearchParam + ON searchParam.Uri = upsertedSearchParam.Uri +WHERE upsertedSearchParam.Action = 'INSERT'; +COMMIT TRANSACTION; + +GO diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/SchemaVersion.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/SchemaVersion.cs index 84330af27a..d9653561c8 100644 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/SchemaVersion.cs +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/SchemaVersion.cs @@ -88,5 +88,6 @@ public enum SchemaVersion V76 = 76, V77 = 77, V78 = 78, + V79 = 79, } } diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/SchemaVersionConstants.cs b/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/SchemaVersionConstants.cs index f69471481b..085a002545 100644 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/SchemaVersionConstants.cs +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/SchemaVersionConstants.cs @@ -8,7 +8,7 @@ namespace Microsoft.Health.Fhir.SqlServer.Features.Schema public static class SchemaVersionConstants { public const int Min = (int)SchemaVersion.V73; - public const int Max = (int)SchemaVersion.V78; + public const int Max = (int)SchemaVersion.V79; public const int MinForUpgrade = (int)SchemaVersion.V73; // this is used for upgrade tests only public const int SearchParameterStatusSchemaVersion = (int)SchemaVersion.V6; public const int SupportForReferencesWithMissingTypeVersion = (int)SchemaVersion.V7; diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/Sql/Scripts/TransactionCheckWithInitialiScript.sql b/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/Sql/Scripts/TransactionCheckWithInitialiScript.sql index cc9eb2d8fa..03c7d6fab8 100644 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/Sql/Scripts/TransactionCheckWithInitialiScript.sql +++ b/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/Sql/Scripts/TransactionCheckWithInitialiScript.sql @@ -19,6 +19,6 @@ Go INSERT INTO dbo.SchemaVersion VALUES - (78, 'started') + (79, 'started') Go diff --git a/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/Sql/Sprocs/GetNonCompletedJobCountOfSpecificQueueType.sql b/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/Sql/Sprocs/GetNonCompletedJobCountOfSpecificQueueType.sql deleted file mode 100644 index 718b5e45c9..0000000000 --- a/src/Microsoft.Health.Fhir.SqlServer/Features/Schema/Sql/Sprocs/GetNonCompletedJobCountOfSpecificQueueType.sql +++ /dev/null @@ -1,26 +0,0 @@ -/*************************************************************************** - Stored procedures for get NonCompleted Job Count Of SpecificQueueType -****************************************************************************/ --- This stored proc is used for import functionality in Health-PaaS --- STORED PROCEDURE --- GetNonCompletedJobCountOfSpecificQueueType --- --- DESCRIPTION --- Count the number of non-completed jobs of specific type. --- --- PARAMETERS --- @@queueType --- * The type of queue --- -CREATE OR ALTER PROCEDURE dbo.GetNonCompletedJobCountOfSpecificQueueType - @queueType tinyint - -AS -BEGIN - SET NOCOUNT ON - - SELECT COUNT(*) - FROM dbo.JobQueue - WHERE QueueType = @queueType AND (Status = 0 or Status = 1) -END -GO diff --git a/src/Microsoft.Health.Fhir.SqlServer/Microsoft.Health.Fhir.SqlServer.csproj b/src/Microsoft.Health.Fhir.SqlServer/Microsoft.Health.Fhir.SqlServer.csproj index 3e7a1db1fc..f18907b4a3 100644 --- a/src/Microsoft.Health.Fhir.SqlServer/Microsoft.Health.Fhir.SqlServer.csproj +++ b/src/Microsoft.Health.Fhir.SqlServer/Microsoft.Health.Fhir.SqlServer.csproj @@ -1,7 +1,7 @@  - 78 + 79 Features\Schema\Migrations\$(LatestSchemaVersion).sql diff --git a/test/Microsoft.Health.Fhir.Shared.Tests.Integration/Microsoft.Health.Fhir.Shared.Tests.Integration.projitems b/test/Microsoft.Health.Fhir.Shared.Tests.Integration/Microsoft.Health.Fhir.Shared.Tests.Integration.projitems index 2e2ab43ff6..5dfac3e821 100644 --- a/test/Microsoft.Health.Fhir.Shared.Tests.Integration/Microsoft.Health.Fhir.Shared.Tests.Integration.projitems +++ b/test/Microsoft.Health.Fhir.Shared.Tests.Integration/Microsoft.Health.Fhir.Shared.Tests.Integration.projitems @@ -30,6 +30,7 @@ + diff --git a/test/Microsoft.Health.Fhir.Shared.Tests.Integration/Persistence/SqlServerColumnTypeChangeTests.cs b/test/Microsoft.Health.Fhir.Shared.Tests.Integration/Persistence/SqlServerColumnTypeChangeTests.cs new file mode 100644 index 0000000000..f6712d319f --- /dev/null +++ b/test/Microsoft.Health.Fhir.Shared.Tests.Integration/Persistence/SqlServerColumnTypeChangeTests.cs @@ -0,0 +1,282 @@ +// ------------------------------------------------------------------------------------------------- +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License (MIT). See LICENSE in the repo root for license information. +// ------------------------------------------------------------------------------------------------- + +using System; +using System.Collections.Generic; +using System.Data; +using System.Linq; +using Microsoft.Data.SqlClient; +using Microsoft.Data.SqlClient.Server; +using Microsoft.Health.Fhir.Tests.Common; +using Microsoft.Health.Test.Utilities; +using Xunit; + +namespace Microsoft.Health.Fhir.Tests.Integration.Persistence +{ + [Trait(Traits.OwningTeam, OwningTeam.Fhir)] + [Trait(Traits.Category, Categories.DataSourceValidation)] + public class SqlServerColumnTypeChangeTests : IClassFixture + { + private readonly SqlServerFhirStorageTestsFixture _fixture; + + public SqlServerColumnTypeChangeTests(SqlServerFhirStorageTestsFixture fixture) + { + _fixture = fixture; + } + + [Fact] + public void GivenColumnTypeChange_InsertAndSelectWork() + { + try + { + // int in the database + ExecuteSql("CREATE TABLE dbo.TestTbl (Id int NOT NULL, Col int NOT NULL)"); + ExecuteSql("CREATE TYPE dbo.TestList AS TABLE (Id int NOT NULL, Col int NOT NULL)"); + + Insert([new TestInt(1, 1), new TestInt(2, 2)]); + Insert([new TestLong(3, 3), new TestLong(4, 4)]); + Insert([new TestString(5, "5"), new TestString(6, "6")]); + var rowsInt = Select((reader) => new TestInt(reader.GetInt32(0), reader.GetInt32(1))).ToList(); + Assert.Equal(6, rowsInt.Count); + var rowsLong = Select((reader) => new TestLong(reader.GetInt32(0), reader.GetInt32(1))).ToList(); + Assert.Equal(6, rowsInt.Count); + var rowsString = Select((reader) => new TestString(reader.GetInt32(0), reader.GetInt32(1).ToString())).ToList(); + Assert.Equal(6, rowsString.Count); + + // long in the database + ExecuteSql("DROP TABLE dbo.TestTbl"); + ExecuteSql("DROP TYPE dbo.TestList"); + ExecuteSql("CREATE TABLE dbo.TestTbl (Id int NOT NULL, Col bigint NOT NULL)"); + ExecuteSql("CREATE TYPE dbo.TestList AS TABLE (Id int NOT NULL, Col bigint NOT NULL)"); + + Insert([new TestInt(1, 1), new TestInt(2, 2)]); + Insert([new TestLong(3, 3), new TestLong(4, 4)]); + Insert([new TestString(5, "5"), new TestString(6, "6")]); + rowsInt = Select((reader) => new TestInt(reader.GetInt32(0), (int)reader.GetInt64(1))).ToList(); + Assert.Equal(6, rowsInt.Count); + rowsLong = Select((reader) => new TestLong(reader.GetInt32(0), reader.GetInt64(1))).ToList(); + Assert.Equal(6, rowsInt.Count); + rowsString = Select((reader) => new TestString(reader.GetInt32(0), reader.GetInt64(1).ToString())).ToList(); + Assert.Equal(6, rowsString.Count); + + // string in the database + ExecuteSql("DROP TABLE dbo.TestTbl"); + ExecuteSql("DROP TYPE dbo.TestList"); + ExecuteSql("CREATE TABLE dbo.TestTbl (Id int NOT NULL, Col varchar(64) NOT NULL)"); + ExecuteSql("CREATE TYPE dbo.TestList AS TABLE (Id int NOT NULL, Col varchar(64) NOT NULL)"); + + Insert([new TestInt(1, 1), new TestInt(2, 2)]); + Insert([new TestLong(3, 3), new TestLong(4, 4)]); + Insert([new TestString(5, "5"), new TestString(6, "6")]); + rowsInt = Select((reader) => new TestInt(reader.GetInt32(0), int.Parse(reader.GetString(1)))).ToList(); + Assert.Equal(6, rowsInt.Count); + rowsLong = Select((reader) => new TestLong(reader.GetInt32(0), long.Parse(reader.GetString(1)))).ToList(); + Assert.Equal(6, rowsInt.Count); + rowsString = Select((reader) => new TestString(reader.GetInt32(0), reader.GetString(1))).ToList(); + Assert.Equal(6, rowsString.Count); + } + finally + { + ExecuteSql("IF object_id('dbo.TestTbl') IS NOT NULL DROP TABLE dbo.TestTbl"); + ExecuteSql("IF EXISTS (SELECT * FROM sys.types WHERE name = 'TestList') DROP TYPE dbo.TestList"); + } + } + + private void ExecuteSql(string sql) + { + using var conn = new SqlConnection(_fixture.TestConnectionString); + conn.Open(); + using var cmd = new SqlCommand(sql, conn); + cmd.ExecuteNonQuery(); + } + + private IEnumerable Select(Func toT) + { + using var conn = new SqlConnection(_fixture.TestConnectionString); + conn.Open(); + using var cmd = new SqlCommand("SELECT * FROM dbo.TestTbl", conn); + using var reader = cmd.ExecuteReader(); + while (reader.Read()) + { + yield return toT(reader); + } + } + + private void Insert(IEnumerable rows) + { + using var conn = new SqlConnection(_fixture.TestConnectionString); + conn.Open(); + using var cmd = new SqlCommand("INSERT INTO dbo.TestTbl SELECT * FROM @TestList", conn); + var param = new SqlParameter { ParameterName = "@TestList" }; + param.AddTestIntList(rows); + cmd.Parameters.Add(param); + cmd.ExecuteNonQuery(); + } + + private void Insert(IEnumerable rows) + { + using var conn = new SqlConnection(_fixture.TestConnectionString); + conn.Open(); + using var cmd = new SqlCommand("INSERT INTO dbo.TestTbl SELECT * FROM @TestList", conn); + var param = new SqlParameter { ParameterName = "@TestList" }; + param.AddTestLongList(rows); + cmd.Parameters.Add(param); + cmd.ExecuteNonQuery(); + } + + private void Insert(IEnumerable rows) + { + using var conn = new SqlConnection(_fixture.TestConnectionString); + conn.Open(); + using var cmd = new SqlCommand("INSERT INTO dbo.TestTbl SELECT * FROM @TestList", conn); + var param = new SqlParameter { ParameterName = "@TestList" }; + param.AddTestStringList(rows); + cmd.Parameters.Add(param); + cmd.ExecuteNonQuery(); + } + } + +#pragma warning disable SA1402 // File may only contain a single type + public class TestString + { + public TestString(SqlDataReader reader) + { + Id = reader.GetInt32(0); + Col = reader.GetString(1); + } + + public TestString(int id, string col) + { + Id = id; + Col = col; + } + + public int Id { get; } + + public string Col { get; } + } + + public class TestInt + { + public TestInt(SqlDataReader reader) + { + Id = reader.GetInt32(0); + Col = reader.GetInt32(1); + } + + public TestInt(int id, int col) + { + Id = id; + Col = col; + } + + public int Id { get; } + + public int Col { get; } + } + + public class TestLong + { + public TestLong(SqlDataReader reader) + { + Id = reader.GetInt32(0); + Col = reader.GetInt64(1); + } + + public TestLong(int id, long col) + { + Id = id; + Col = col; + } + + public int Id { get; } + + public long Col { get; } + } + + public static class SqlParamaterTestIntExtension + { + static SqlParamaterTestIntExtension() + { + MetaData = [new SqlMetaData("Id", SqlDbType.Int), new SqlMetaData("Col", SqlDbType.Int)]; + } + + private static SqlMetaData[] MetaData { get; } + + public static void AddTestIntList(this SqlParameter param, IEnumerable rows) + { + param.SqlDbType = SqlDbType.Structured; + param.TypeName = "dbo.TestList"; + param.Value = GetSqlDataRecords(rows); + } + + private static IEnumerable GetSqlDataRecords(IEnumerable rows) + { + var record = new SqlDataRecord(MetaData); + foreach (var row in rows) + { + record.SetSqlInt32(0, row.Id); + record.SetSqlInt32(1, row.Col); + yield return record; + } + } + } + + public static class SqlParamaterTestLongExtension + { + static SqlParamaterTestLongExtension() + { + MetaData = [new SqlMetaData("Id", SqlDbType.Int), new SqlMetaData("Col", SqlDbType.BigInt)]; + } + + private static SqlMetaData[] MetaData { get; } + + public static void AddTestLongList(this SqlParameter param, IEnumerable rows) + { + param.SqlDbType = SqlDbType.Structured; + param.TypeName = "dbo.TestList"; + param.Value = GetSqlDataRecords(rows); + } + + private static IEnumerable GetSqlDataRecords(IEnumerable rows) + { + var record = new SqlDataRecord(MetaData); + foreach (var row in rows) + { + record.SetSqlInt32(0, row.Id); + record.SetSqlInt64(1, row.Col); + yield return record; + } + } + } + + public static class SqlParamaterTestStringExtension + { + static SqlParamaterTestStringExtension() + { + MetaData = [new SqlMetaData("Id", SqlDbType.Int), new SqlMetaData("Col", SqlDbType.VarChar, 64)]; + } + + private static SqlMetaData[] MetaData { get; } + + public static void AddTestStringList(this SqlParameter param, IEnumerable rows) + { + param.SqlDbType = SqlDbType.Structured; + param.TypeName = "dbo.TestList"; + param.Value = GetSqlDataRecords(rows); + } + + private static IEnumerable GetSqlDataRecords(IEnumerable rows) + { + var record = new SqlDataRecord(MetaData); + foreach (var row in rows) + { + record.SetSqlInt32(0, row.Id); + record.SetSqlString(1, row.Col); + yield return record; + } + } + } +} From 0fe856e9c9da911a5b8aa944e822c043b5c671d9 Mon Sep 17 00:00:00 2001 From: "Nathan Lemma (Waferwire LLC)" Date: Tue, 23 Apr 2024 12:20:55 -0700 Subject: [PATCH 154/155] - remove unused reference in test integration project --- .../Persistence/SqlCustomQueryTests.cs | 1 - 1 file changed, 1 deletion(-) diff --git a/test/Microsoft.Health.Fhir.Shared.Tests.Integration/Persistence/SqlCustomQueryTests.cs b/test/Microsoft.Health.Fhir.Shared.Tests.Integration/Persistence/SqlCustomQueryTests.cs index de22234693..9f7631c945 100644 --- a/test/Microsoft.Health.Fhir.Shared.Tests.Integration/Persistence/SqlCustomQueryTests.cs +++ b/test/Microsoft.Health.Fhir.Shared.Tests.Integration/Persistence/SqlCustomQueryTests.cs @@ -16,7 +16,6 @@ using Microsoft.Health.Fhir.Tests.Common; using Microsoft.Health.Fhir.Tests.Common.FixtureParameters; using Microsoft.Health.Test.Utilities; -using Microsoft.SqlServer.Management.Sdk.Sfc; using Xunit; using Xunit.Abstractions; using Xunit.Sdk; From 2819ca0b19b9ae79de6e01301790ace3cf36eed7 Mon Sep 17 00:00:00 2001 From: "Nathan Lemma (Waferwire LLC)" Date: Thu, 25 Apr 2024 14:09:03 -0700 Subject: [PATCH 155/155] - Specify .net version for build and check version installed before building --- .github/actions/dotnet-build/action.yml | 4 ++++ .github/workflows/fhir-oss-ci-pipeline.yml | 3 +++ 2 files changed, 7 insertions(+) diff --git a/.github/actions/dotnet-build/action.yml b/.github/actions/dotnet-build/action.yml index b9127d813a..01d875b7e3 100644 --- a/.github/actions/dotnet-build/action.yml +++ b/.github/actions/dotnet-build/action.yml @@ -17,6 +17,10 @@ inputs: majorMinorPatch: description: The major.minor.patch version to use. required: true + dotnet-version: + description: 'The version of the .NET SDK to use' + required: true + default: '8.0.202' # Default version if not specified runs: using: composite steps: diff --git a/.github/workflows/fhir-oss-ci-pipeline.yml b/.github/workflows/fhir-oss-ci-pipeline.yml index 6c85a8fdc8..e3117043a2 100644 --- a/.github/workflows/fhir-oss-ci-pipeline.yml +++ b/.github/workflows/fhir-oss-ci-pipeline.yml @@ -79,6 +79,9 @@ jobs: with: fetch-depth: 0 + - name: Check DotNet Version + run: dotnet --version + - name: Build uses: ./.github/actions/dotnet-build with: