diff --git a/.github/workflows/cli-ci.yml b/.github/workflows/cli-ci.yml index d4c5bf24065..a6e585d1b45 100644 --- a/.github/workflows/cli-ci.yml +++ b/.github/workflows/cli-ci.yml @@ -6,7 +6,7 @@ on: - "cli/**" - ".github/workflows/cli-ci.yml" - "go.mod" - branches: [main] + branches: [main, feature/sjad] permissions: contents: read @@ -27,7 +27,7 @@ jobs: uses: golangci/golangci-lint-action@v3 with: version: v1.60.1 - args: -v --timeout 10m0s + args: --out-format=colored-line-number -v --timeout 10m0s working-directory: cli/azd cspell-lint: diff --git a/.github/workflows/cspell-misc.yml b/.github/workflows/cspell-misc.yml index ca97973a812..6686685741c 100644 --- a/.github/workflows/cspell-misc.yml +++ b/.github/workflows/cspell-misc.yml @@ -2,7 +2,7 @@ name: misc on: pull_request: - branches: [main] + branches: [main, feature/sjad] paths-ignore: # Changes here should be kept in-sync with projects listed in cspell.misc.yaml - 'eng/**' # Not required diff --git a/.github/workflows/event.yml b/.github/workflows/event.yml index 601aa66568c..6479128b97f 100644 --- a/.github/workflows/event.yml +++ b/.github/workflows/event.yml @@ -12,7 +12,7 @@ on: # entirely of github actions won't trigger this action. workflow_run: types: [completed] - workflows: ["cli-ci", "templates-ci", "vscode-ci"] + workflows: ["cli-ci"] permissions: {} diff --git a/.github/workflows/go-test-for-sjad-branch.yml b/.github/workflows/go-test-for-sjad-branch.yml new file mode 100644 index 00000000000..31b7c897341 --- /dev/null +++ b/.github/workflows/go-test-for-sjad-branch.yml @@ -0,0 +1,42 @@ +name: Go Test + +on: + pull_request: + branches: + - feature/sjad + +jobs: + test: + runs-on: ubuntu-latest + + steps: + - name: Checkout code + uses: actions/checkout@v2 + + - name: Set up Go + uses: actions/setup-go@v2 + with: + go-version: 1.23.1 + + - name: Cache Maven repository + uses: actions/cache@v4 + with: + path: ~/.m2/repository + key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }} + restore-keys: | + ${{ runner.os }}-maven- + + - name: Cache Go modules + uses: actions/cache@v4 + with: + path: | + ~/.cache/go-build + ~/go/pkg/mod + key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }} + restore-keys: | + ${{ runner.os }}-go- + + - name: Run tests + run: | + cd ./cli/azd + go test $(go list ./... | grep -v github.com/azure/azure-dev/cli/azd/test/functional | grep -v github.com/azure/azure-dev/cli/azd/pkg/infra/provisioning/terraform) -cover -v diff --git a/.vscode/cspell.misc.yaml b/.vscode/cspell.misc.yaml index b32078de70c..f23b3bf5fa6 100644 --- a/.vscode/cspell.misc.yaml +++ b/.vscode/cspell.misc.yaml @@ -41,3 +41,5 @@ overrides: - myimage - azureai - entra + - servicebus + - eventhubs diff --git a/cli/azd/.vscode/cspell.yaml b/cli/azd/.vscode/cspell.yaml index a10ce775337..889925332db 100644 --- a/cli/azd/.vscode/cspell.yaml +++ b/cli/azd/.vscode/cspell.yaml @@ -16,6 +16,17 @@ words: - runcontext - unmarshals - usgovcloudapi + - springframework + - eventhubs + - jdbc + - datasource + - passwordless + - postgre + - mysqladmin + - sjad + - configserver + - chardata + - webflux languageSettings: - languageId: go ignoreRegExpList: @@ -38,6 +49,9 @@ overrides: - cloudapp - mediaservices - msecnd + - filename: internal/tracing/fields/fields.go + words: + - azuredeps - filename: docs/docgen.go words: - alexwolf @@ -119,6 +133,9 @@ overrides: - filename: internal/vsrpc/handler.go words: - arity + - filename: internal/appdetect/spring_boot.go + words: + - eirslett - filename: test/internal/tfoidc/main.go words: - tfoidc diff --git a/cli/azd/internal/appdetect/appdetect.go b/cli/azd/internal/appdetect/appdetect.go index e6103d3cbd7..38bbc546824 100644 --- a/cli/azd/internal/appdetect/appdetect.go +++ b/cli/azd/internal/appdetect/appdetect.go @@ -14,6 +14,7 @@ import ( "github.com/azure/azure-dev/cli/azd/pkg/exec" "github.com/azure/azure-dev/cli/azd/pkg/tools/dotnet" + "github.com/azure/azure-dev/cli/azd/pkg/tools/maven" "github.com/bmatcuk/doublestar/v4" ) @@ -59,13 +60,16 @@ const ( PyFlask Dependency = "flask" PyDjango Dependency = "django" PyFastApi Dependency = "fastapi" + + SpringFrontend Dependency = "springFrontend" ) var WebUIFrameworks = map[Dependency]struct{}{ - JsReact: {}, - JsAngular: {}, - JsJQuery: {}, - JsVite: {}, + JsReact: {}, + JsAngular: {}, + JsJQuery: {}, + JsVite: {}, + SpringFrontend: {}, } func (f Dependency) Language() Language { @@ -89,6 +93,8 @@ func (f Dependency) Display() string { return "Vite" case JsNext: return "Next.js" + case SpringFrontend: + return "Spring Frontend" } return "" @@ -110,6 +116,7 @@ const ( DbPostgres DatabaseDep = "postgres" DbMongo DatabaseDep = "mongo" DbMySql DatabaseDep = "mysql" + DbCosmos DatabaseDep = "cosmos" DbSqlServer DatabaseDep = "sqlserver" DbRedis DatabaseDep = "redis" ) @@ -122,6 +129,8 @@ func (db DatabaseDep) Display() string { return "MongoDB" case DbMySql: return "MySQL" + case DbCosmos: + return "Cosmos DB" case DbSqlServer: return "SQL Server" case DbRedis: @@ -131,6 +140,73 @@ func (db DatabaseDep) Display() string { return "" } +//type AzureDep string + +type AzureDep interface { + ResourceDisplay() string +} + +type AzureDepServiceBus struct { + Queues []string + IsJms bool +} + +func (a AzureDepServiceBus) ResourceDisplay() string { + return "Azure Service Bus" +} + +type AzureDepEventHubs struct { + EventHubsNamePropertyMap map[string]string + DependencyTypes []DependencyType + SpringBootVersion string +} + +type DependencyType string + +const ( + SpringCloudStreamEventHubs DependencyType = "spring-cloud-azure-stream-binder-eventhubs" + SpringCloudEventHubsStarter DependencyType = "spring-cloud-azure-starter-eventhubs" + SpringIntegrationEventHubs DependencyType = "spring-cloud-azure-starter-integration-eventhubs" + SpringMessagingEventHubs DependencyType = "spring-messaging-azure-eventhubs" + SpringCloudStreamKafka DependencyType = "spring-cloud-starter-stream-kafka" + SpringKafka DependencyType = "spring-kafka" +) + +func (a AzureDepEventHubs) UseKafka() bool { + for _, dependencyType := range a.DependencyTypes { + if dependencyType == SpringCloudStreamKafka || dependencyType == SpringKafka { + return true + } + } + return false +} + +func (a AzureDepEventHubs) ResourceDisplay() string { + return "Azure Event Hubs" +} + +type AzureDepStorageAccount struct { + ContainerNamePropertyMap map[string]string +} + +func (a AzureDepStorageAccount) ResourceDisplay() string { + return "Azure Storage Account" +} + +type Metadata struct { + ApplicationName string + ServerPort int + DatabaseNameInPropertySpringDatasourceUrl map[DatabaseDep]string + ContainsDependencySpringCloudAzureStarter bool + ContainsDependencySpringCloudAzureStarterJdbcPostgresql bool + ContainsDependencySpringCloudAzureStarterJdbcMysql bool + ContainsDependencySpringCloudEurekaServer bool + ContainsDependencySpringCloudEurekaClient bool + ContainsDependencySpringCloudConfigServer bool + ContainsDependencySpringCloudConfigClient bool + ContainsDependencyAboutEmbeddedWebServer bool +} + type Project struct { // The language associated with the project. Language Language @@ -141,9 +217,17 @@ type Project struct { // Experimental: Database dependencies inferred through heuristics while scanning dependencies in the project. DatabaseDeps []DatabaseDep + // Experimental: Azure dependencies inferred through heuristics while scanning dependencies in the project. + AzureDeps []AzureDep + + // Experimental: Metadata inferred through heuristics while scanning the project. + Metadata Metadata + // The path to the project directory. Path string + Options map[string]interface{} + // A short description of the detection rule applied. DetectionRule string @@ -179,7 +263,10 @@ type projectDetector interface { var allDetectors = []projectDetector{ // Order here determines precedence when two projects are in the same directory. // This is unlikely to occur in practice, but reordering could help to break the tie in these cases. - &javaDetector{}, + &javaDetector{ + mvnCli: maven.NewCli(exec.NewCommandRunner(nil)), + modulePoms: make(map[string]pom), + }, &dotNetAppHostDetector{ // TODO(ellismg): Remove ambient authority. dotnetCli: dotnet.NewCli(exec.NewCommandRunner(nil)), @@ -264,7 +351,7 @@ func detectAny(ctx context.Context, detectors []projectDetector, path string, en if project != nil { log.Printf("Found project %s at %s", project.Language, path) - // docker is an optional property of a project, and thus is different than other detectors + // docker is an optional property of a project, and thus is different from other detectors docker, err := detectDockerInDirectory(path, entries) if err != nil { return nil, fmt.Errorf("detecting docker project: %w", err) diff --git a/cli/azd/internal/appdetect/appdetect_test.go b/cli/azd/internal/appdetect/appdetect_test.go index b356a151ace..9a0d14cfa4c 100644 --- a/cli/azd/internal/appdetect/appdetect_test.go +++ b/cli/azd/internal/appdetect/appdetect_test.go @@ -43,17 +43,41 @@ func TestDetect(t *testing.T) { }, { Language: Java, - Path: "java-multimodules/application", + Path: "java-multi-levels/submodule/notsubmodule3", DetectionRule: "Inferred by presence of: pom.xml", - DatabaseDeps: []DatabaseDep{ - DbMySql, - DbPostgres, + }, + { + Language: Java, + Path: "java-multi-levels/submodule/subsubmodule1", + DetectionRule: "Inferred by presence of: pom.xml", + Options: map[string]interface{}{ + JavaProjectOptionParentPomDir: filepath.Join(dir, "java-multi-levels"), + }, + }, + { + Language: Java, + Path: "java-multi-levels/submodule/subsubmodule2", + DetectionRule: "Inferred by presence of: pom.xml", + Options: map[string]interface{}{ + JavaProjectOptionParentPomDir: filepath.Join(dir, "java-multi-levels"), }, }, { Language: Java, - Path: "java-multimodules/library", + Path: "java-multimodules/application", DetectionRule: "Inferred by presence of: pom.xml", + DatabaseDeps: []DatabaseDep{ + DbMongo, + DbMySql, + DbPostgres, + DbRedis, + }, + Options: map[string]interface{}{ + JavaProjectOptionParentPomDir: filepath.Join(dir, "java-multimodules"), + }, + Metadata: Metadata{ + ContainsDependencyAboutEmbeddedWebServer: true, + }, }, { Language: JavaScript, @@ -127,17 +151,41 @@ func TestDetect(t *testing.T) { }, { Language: Java, - Path: "java-multimodules/application", + Path: "java-multi-levels/submodule/notsubmodule3", DetectionRule: "Inferred by presence of: pom.xml", - DatabaseDeps: []DatabaseDep{ - DbMySql, - DbPostgres, + }, + { + Language: Java, + Path: "java-multi-levels/submodule/subsubmodule1", + DetectionRule: "Inferred by presence of: pom.xml", + Options: map[string]interface{}{ + JavaProjectOptionParentPomDir: filepath.Join(dir, "java-multi-levels"), + }, + }, + { + Language: Java, + Path: "java-multi-levels/submodule/subsubmodule2", + DetectionRule: "Inferred by presence of: pom.xml", + Options: map[string]interface{}{ + JavaProjectOptionParentPomDir: filepath.Join(dir, "java-multi-levels"), }, }, { Language: Java, - Path: "java-multimodules/library", + Path: "java-multimodules/application", DetectionRule: "Inferred by presence of: pom.xml", + DatabaseDeps: []DatabaseDep{ + DbMongo, + DbMySql, + DbPostgres, + DbRedis, + }, + Options: map[string]interface{}{ + JavaProjectOptionParentPomDir: filepath.Join(dir, "java-multimodules"), + }, + Metadata: Metadata{ + ContainsDependencyAboutEmbeddedWebServer: true, + }, }, }, }, @@ -160,17 +208,41 @@ func TestDetect(t *testing.T) { }, { Language: Java, - Path: "java-multimodules/application", + Path: "java-multi-levels/submodule/notsubmodule3", DetectionRule: "Inferred by presence of: pom.xml", - DatabaseDeps: []DatabaseDep{ - DbMySql, - DbPostgres, + }, + { + Language: Java, + Path: "java-multi-levels/submodule/subsubmodule1", + DetectionRule: "Inferred by presence of: pom.xml", + Options: map[string]interface{}{ + JavaProjectOptionParentPomDir: filepath.Join(dir, "java-multi-levels"), + }, + }, + { + Language: Java, + Path: "java-multi-levels/submodule/subsubmodule2", + DetectionRule: "Inferred by presence of: pom.xml", + Options: map[string]interface{}{ + JavaProjectOptionParentPomDir: filepath.Join(dir, "java-multi-levels"), }, }, { Language: Java, - Path: "java-multimodules/library", + Path: "java-multimodules/application", DetectionRule: "Inferred by presence of: pom.xml", + DatabaseDeps: []DatabaseDep{ + DbMongo, + DbMySql, + DbPostgres, + DbRedis, + }, + Options: map[string]interface{}{ + JavaProjectOptionParentPomDir: filepath.Join(dir, "java-multimodules"), + }, + Metadata: Metadata{ + ContainsDependencyAboutEmbeddedWebServer: true, + }, }, }, }, @@ -196,17 +268,41 @@ func TestDetect(t *testing.T) { }, { Language: Java, - Path: "java-multimodules/application", + Path: "java-multi-levels/submodule/notsubmodule3", DetectionRule: "Inferred by presence of: pom.xml", - DatabaseDeps: []DatabaseDep{ - DbMySql, - DbPostgres, + }, + { + Language: Java, + Path: "java-multi-levels/submodule/subsubmodule1", + DetectionRule: "Inferred by presence of: pom.xml", + Options: map[string]interface{}{ + JavaProjectOptionParentPomDir: filepath.Join(dir, "java-multi-levels"), + }, + }, + { + Language: Java, + Path: "java-multi-levels/submodule/subsubmodule2", + DetectionRule: "Inferred by presence of: pom.xml", + Options: map[string]interface{}{ + JavaProjectOptionParentPomDir: filepath.Join(dir, "java-multi-levels"), }, }, { Language: Java, - Path: "java-multimodules/library", + Path: "java-multimodules/application", DetectionRule: "Inferred by presence of: pom.xml", + DatabaseDeps: []DatabaseDep{ + DbMongo, + DbMySql, + DbPostgres, + DbRedis, + }, + Options: map[string]interface{}{ + JavaProjectOptionParentPomDir: filepath.Join(dir, "java-multimodules"), + }, + Metadata: Metadata{ + ContainsDependencyAboutEmbeddedWebServer: true, + }, }, { Language: Python, diff --git a/cli/azd/internal/appdetect/java.go b/cli/azd/internal/appdetect/java.go index fe6fec3ea65..f0361672812 100644 --- a/cli/azd/internal/appdetect/java.go +++ b/cli/azd/internal/appdetect/java.go @@ -2,142 +2,107 @@ package appdetect import ( "context" - "encoding/xml" - "fmt" "io/fs" - "maps" - "os" + "log" "path/filepath" - "slices" "strings" + + "github.com/azure/azure-dev/cli/azd/internal/tracing" + "github.com/azure/azure-dev/cli/azd/internal/tracing/fields" + "github.com/azure/azure-dev/cli/azd/pkg/tools/maven" ) type javaDetector struct { - rootProjects []mavenProject + mvnCli *maven.Cli + rootPoms []pom + modulePoms map[string]pom } +// JavaProjectOptionParentPomDir The parent module path of the maven multi-module project +const JavaProjectOptionParentPomDir = "parentPath" + func (jd *javaDetector) Language() Language { return Java } func (jd *javaDetector) DetectProject(ctx context.Context, path string, entries []fs.DirEntry) (*Project, error) { for _, entry := range entries { - if strings.ToLower(entry.Name()) == "pom.xml" { - pomFile := filepath.Join(path, entry.Name()) - project, err := readMavenProject(pomFile) + if strings.ToLower(entry.Name()) == "pom.xml" { // todo: support file names like backend-pom.xml + tracing.SetUsageAttributes(fields.AppInitJavaDetect.String("start")) + pomPath := filepath.Join(path, entry.Name()) + mavenProject, err := createMavenProject(ctx, jd.mvnCli, pomPath) if err != nil { - return nil, fmt.Errorf("error reading pom.xml: %w", err) + log.Printf("Please edit azure.yaml manually to satisfy your requirement. azd can not help you "+ + "to that by detect your java project because error happened when reading pom.xml: %s. ", err) + return nil, nil } - if len(project.Modules) > 0 { - // This is a multi-module project, we will capture the analysis, but return nil - // to continue recursing - jd.rootProjects = append(jd.rootProjects, *project) + if len(mavenProject.pom.Modules) > 0 { + // This is a multi-module project, we will capture the analysis, but return nil to continue recursing + jd.captureRootAndModules(mavenProject, path) return nil, nil } - var currentRoot *mavenProject - for _, rootProject := range jd.rootProjects { - // we can say that the project is in the root project if the path is under the project - if inRoot := strings.HasPrefix(pomFile, rootProject.path); inRoot { - currentRoot = &rootProject + if !isSpringBootRunnableProject(mavenProject) { + return nil, nil + } + + var parentPom *pom + for _, parentPomItem := range jd.rootPoms { + // we can say that the project is in the root project if + // 1) the project path is under the root project + // 2) the project is the module of root project + parentPomFilePath := parentPomItem.pomFilePath + underRootPath := strings.HasPrefix(pomPath, filepath.Dir(parentPomFilePath)+string(filepath.Separator)) + rootPomItem, exist := jd.modulePoms[mavenProject.pom.pomFilePath] + if underRootPath && exist && rootPomItem.pomFilePath == parentPomFilePath { + parentPom = &parentPomItem + break } } - _ = currentRoot // use currentRoot here in the analysis - result, err := detectDependencies(project, &Project{ + project := Project{ Language: Java, Path: path, DetectionRule: "Inferred by presence of: pom.xml", - }) - if err != nil { - return nil, fmt.Errorf("detecting dependencies: %w", err) + } + detectAzureDependenciesByAnalyzingSpringBootProject(mavenProject, &project) + if parentPom != nil { + project.Options = map[string]interface{}{ + JavaProjectOptionParentPomDir: filepath.Dir(parentPom.pomFilePath), + } } - return result, nil + tracing.SetUsageAttributes(fields.AppInitJavaDetect.String("finish")) + return &project, nil } } - return nil, nil } -// mavenProject represents the top-level structure of a Maven POM file. -type mavenProject struct { - XmlName xml.Name `xml:"project"` - Parent parent `xml:"parent"` - Modules []string `xml:"modules>module"` // Capture the modules - Dependencies []dependency `xml:"dependencies>dependency"` - DependencyManagement dependencyManagement `xml:"dependencyManagement"` - Build build `xml:"build"` - path string -} - -// Parent represents the parent POM if this project is a module. -type parent struct { - GroupId string `xml:"groupId"` - ArtifactId string `xml:"artifactId"` - Version string `xml:"version"` -} - -// Dependency represents a single Maven dependency. -type dependency struct { - GroupId string `xml:"groupId"` - ArtifactId string `xml:"artifactId"` - Version string `xml:"version"` - Scope string `xml:"scope,omitempty"` -} - -// DependencyManagement includes a list of dependencies that are managed. -type dependencyManagement struct { - Dependencies []dependency `xml:"dependencies>dependency"` -} - -// Build represents the build configuration which can contain plugins. -type build struct { - Plugins []plugin `xml:"plugins>plugin"` -} - -// Plugin represents a build plugin. -type plugin struct { - GroupId string `xml:"groupId"` - ArtifactId string `xml:"artifactId"` - Version string `xml:"version"` -} - -func readMavenProject(filePath string) (*mavenProject, error) { - bytes, err := os.ReadFile(filePath) - if err != nil { - return nil, err - } - - var project mavenProject - if err := xml.Unmarshal(bytes, &project); err != nil { - return nil, fmt.Errorf("parsing xml: %w", err) +// captureRootAndModules records the root and modules information for parent detection later +func (jd *javaDetector) captureRootAndModules(mavenProject mavenProject, path string) { + if _, ok := jd.modulePoms[mavenProject.pom.pomFilePath]; !ok { + // add into rootPoms if it's new root + jd.rootPoms = append(jd.rootPoms, mavenProject.pom) } - - project.path = filepath.Dir(filePath) - - return &project, nil -} - -func detectDependencies(mavenProject *mavenProject, project *Project) (*Project, error) { - databaseDepMap := map[DatabaseDep]struct{}{} - for _, dep := range mavenProject.Dependencies { - if dep.GroupId == "com.mysql" && dep.ArtifactId == "mysql-connector-j" { - databaseDepMap[DbMySql] = struct{}{} + for _, module := range mavenProject.pom.Modules { + // for module: submodule, module path is the ./submodule/pom.xml + // for module: backend-pom.xml, module path is the /backend-pom.xml + var modulePath string + if strings.HasSuffix(module, ".xml") { + modulePath = filepath.Join(path, module) + } else { + modulePath = filepath.Join(path, module, "pom.xml") } - - if dep.GroupId == "org.postgresql" && dep.ArtifactId == "postgresql" { - databaseDepMap[DbPostgres] = struct{}{} + // modulePath points to the actual root pom, not current parent pom + jd.modulePoms[modulePath] = mavenProject.pom + for { + if result, ok := jd.modulePoms[jd.modulePoms[modulePath].pomFilePath]; ok { + jd.modulePoms[modulePath] = result + } else { + break + } } } - - if len(databaseDepMap) > 0 { - project.DatabaseDeps = slices.SortedFunc(maps.Keys(databaseDepMap), - func(a, b DatabaseDep) int { - return strings.Compare(string(a), string(b)) - }) - } - - return project, nil } diff --git a/cli/azd/internal/appdetect/maven_project.go b/cli/azd/internal/appdetect/maven_project.go new file mode 100644 index 00000000000..2f2ecaa7351 --- /dev/null +++ b/cli/azd/internal/appdetect/maven_project.go @@ -0,0 +1,21 @@ +package appdetect + +import ( + "context" + + "github.com/azure/azure-dev/cli/azd/pkg/tools/maven" +) + +type mavenProject struct { + pom pom +} + +func createMavenProject(ctx context.Context, mvnCli *maven.Cli, pomFilePath string) (mavenProject, error) { + pom, err := createEffectivePomOrSimulatedEffectivePom(ctx, mvnCli, pomFilePath) + if err != nil { + return mavenProject{}, err + } + return mavenProject{ + pom: pom, + }, nil +} diff --git a/cli/azd/internal/appdetect/pom.go b/cli/azd/internal/appdetect/pom.go new file mode 100644 index 00000000000..8745c7e50b6 --- /dev/null +++ b/cli/azd/internal/appdetect/pom.go @@ -0,0 +1,637 @@ +package appdetect + +import ( + "context" + "encoding/xml" + "fmt" + "log" + "log/slog" + "os" + "path/filepath" + "strings" + + "github.com/azure/azure-dev/cli/azd/internal" + "github.com/azure/azure-dev/cli/azd/pkg/tools/maven" +) + +// pom represents the top-level structure of a Maven POM file. +type pom struct { + XmlName xml.Name `xml:"project"` + Parent parent `xml:"parent"` + GroupId string `xml:"groupId"` + ArtifactId string `xml:"artifactId"` + Version string `xml:"version"` + Properties Properties `xml:"properties"` + Modules []string `xml:"modules>module"` + Dependencies []dependency `xml:"dependencies>dependency"` + DependencyManagement dependencyManagement `xml:"dependencyManagement"` + Profiles []profile `xml:"profiles>profile"` + Build build `xml:"build"` + pomFilePath string + propertyMap map[string]string + dependencyManagementMap map[string]string +} + +// Parent represents the parent POM if this project is a module. +type parent struct { + GroupId string `xml:"groupId"` + ArtifactId string `xml:"artifactId"` + Version string `xml:"version"` + RelativePath string `xml:"relativePath"` +} + +type Properties struct { + Entries []Property `xml:",any"` // Capture all elements inside +} + +type Property struct { + XMLName xml.Name + Value string `xml:",chardata"` +} + +// Dependency represents a single Maven dependency. +type dependency struct { + GroupId string `xml:"groupId"` + ArtifactId string `xml:"artifactId"` + Version string `xml:"version"` + Scope string `xml:"scope,omitempty"` +} + +type profile struct { + Id string `xml:"id"` + ActiveByDefault string `xml:"activation>activeByDefault"` + Properties Properties `xml:"properties"` + Modules []string `xml:"modules>module"` // Capture the modules + Dependencies []dependency `xml:"dependencies>dependency"` + DependencyManagement dependencyManagement `xml:"dependencyManagement"` + Build build `xml:"build"` + propertyMap map[string]string + dependencyManagementMap map[string]string +} + +// DependencyManagement includes a list of dependencies that are managed. +type dependencyManagement struct { + Dependencies []dependency `xml:"dependencies>dependency"` +} + +// Build represents the build configuration which can contain plugins. +type build struct { + Plugins []plugin `xml:"plugins>plugin"` +} + +// Plugin represents a build plugin. +type plugin struct { + GroupId string `xml:"groupId"` + ArtifactId string `xml:"artifactId"` + Version string `xml:"version"` +} + +const ( + DependencyScopeCompile string = "compile" + DependencyScopeTest string = "test" +) + +func createEffectivePomOrSimulatedEffectivePom(ctx context.Context, mvnCli *maven.Cli, pomPath string) (pom, error) { + effectivePom, err := createEffectivePom(ctx, mvnCli, pomPath) + if err == nil { + effectivePom.pomFilePath = pomPath + return effectivePom, nil + } + return createSimulatedEffectivePom(pomPath) +} + +// Simulated effective pom not strictly equal to effective pom, +// it just tries best to make sure these item are same to the real effective pom: +// 1. pom.Dependencies. Only care about the groupId/artifactId/version. +// 2. pom.Build.Plugins. +// 2.1. Only care about the groupId/artifactId/version. +// 2.2. Not include the default maven plugins (name with this patten: "maven-xxx-plugin"). +func createSimulatedEffectivePom(pomFilePath string) (pom, error) { + pom, err := unmarshalPomFromFilePath(pomFilePath) + if err != nil { + return pom, err + } + convertToSimulatedEffectivePom(&pom) + return pom, nil +} + +func convertToSimulatedEffectivePom(pom *pom) { + setDefaultScopeForDependenciesInAllPlaces(pom) + + createPropertyMapAccordingToProjectProperty(pom) + addCommonPropertiesLikeProjectGroupIdAndProjectVersionToPropertyMap(pom) + // replacePropertyPlaceHolderInPropertyMap should run before other replacePropertyPlaceHolderInXxx + replacePropertyPlaceHolderInPropertyMap(pom) + // replacePropertyPlaceHolderInGroupId should run before createDependencyManagementMap + replacePropertyPlaceHolderInGroupId(pom) + // createDependencyManagementMap run before replacePropertyPlaceHolderInVersion + createDependencyManagementMap(pom) + + // active profile has higher priority than parent and imported bom in dependency management + absorbInformationFromActiveProfile(pom) + // replacePropertyPlaceHolderInVersion should run after absorbInformationFromActiveProfile + replacePropertyPlaceHolderInVersion(pom) + absorbInformationFromParentAndImportedDependenciesInDependencyManagement(pom) + // updateDependencyVersionAccordingToDependencyManagement should run after absorbInformationFromActiveProfile + updateDependencyVersionAccordingToDependencyManagement(pom) +} + +func absorbInformationFromActiveProfile(pom *pom) { + for i := range pom.Profiles { + if pom.Profiles[i].ActiveByDefault != "true" { + continue + } + absorbPropertyMap(pom, pom.Profiles[i].propertyMap, true) + absorbDependencyManagement(pom, pom.Profiles[i].dependencyManagementMap, true) + absorbDependencies(pom, pom.Profiles[i].Dependencies) + absorbBuildPlugins(pom, pom.Profiles[i].Build.Plugins) + } +} + +func absorbInformationFromParentAndImportedDependenciesInDependencyManagement(pom *pom) { + absorbInformationFromParent(pom) + absorbImportedBomInDependencyManagement(pom) +} + +func absorbInformationFromParent(pom *pom) { + if !parentExists(*pom) { + slog.DebugContext(context.TODO(), "Skip analyze parent pom because parent not set.", + "pomFilePath", pom.pomFilePath) + return + } + if absorbInformationFromParentInLocalFileSystem(pom) { + return + } + absorbInformationFromParentInRemoteMavenRepository(pom) +} + +func absorbInformationFromParentInLocalFileSystem(pom *pom) bool { + parentPomFilePath := getParentPomFilePath(*pom) + if !fileExists(parentPomFilePath) { + slog.DebugContext(context.TODO(), "Skip analyze parent pom because parent pom file not set.", + "pomFilePath", pom.pomFilePath) + return false + } + parentEffectivePom, err := createSimulatedEffectivePom(parentPomFilePath) + if err != nil { + slog.DebugContext(context.TODO(), "Skip analyze parent pom because analyze parent pom failed.", + "pomFilePath", pom.pomFilePath) + return false + } + if pom.Parent.GroupId != parentEffectivePom.GroupId || + pom.Parent.ArtifactId != parentEffectivePom.ArtifactId || + pom.Parent.Version != parentEffectivePom.Version { + slog.DebugContext(context.TODO(), "Skip analyze parent pom because groupId/artifactId/version not the same.", + "pomFilePath", pom.pomFilePath) + return false + } + absorbInformationFromParentPom(pom, parentEffectivePom) + return true +} + +func parentExists(pom pom) bool { + return pom.Parent.GroupId != "" && pom.Parent.ArtifactId != "" +} + +func getParentPomFilePath(pom pom) string { + relativePath := pom.Parent.RelativePath + if relativePath == "" { + relativePath = "../pom.xml" + } + parentPomFilePath := filepath.Join(filepath.Dir(makePathFitCurrentOs(pom.pomFilePath)), + makePathFitCurrentOs(relativePath)) + parentPomFilePath = filepath.Clean(parentPomFilePath) + return parentPomFilePath +} + +func makePathFitCurrentOs(filePath string) string { + if os.PathSeparator == '\\' { + return strings.ReplaceAll(filePath, "/", "\\") + } else { + return strings.ReplaceAll(filePath, "\\", "/") + } +} + +func absorbInformationFromParentInRemoteMavenRepository(pom *pom) { + p := pom.Parent + parent, err := getSimulatedEffectivePomFromMavenRepository(p.GroupId, p.ArtifactId, p.Version) + if err != nil { + slog.InfoContext(context.TODO(), "Skip absorb parent from remote maven repository.", + "ArtifactId", pom.ArtifactId, "err", err) + } + absorbInformationFromParentPom(pom, parent) +} + +func absorbInformationFromParentPom(pom *pom, parent pom) { + absorbPropertyMap(pom, parent.propertyMap, false) + absorbDependencyManagement(pom, parent.dependencyManagementMap, false) + absorbDependencies(pom, parent.Dependencies) + absorbBuildPlugins(pom, parent.Build.Plugins) +} + +func absorbDependencies(pom *pom, dependencies []dependency) { + for _, dep := range dependencies { + if !containsDependency(pom.Dependencies, dep) { + pom.Dependencies = append(pom.Dependencies, dep) + } + } +} + +func containsDependency(deps []dependency, targetDep dependency) bool { + for _, dep := range deps { + if dep.GroupId == targetDep.GroupId && dep.ArtifactId == targetDep.ArtifactId { + return true + } + } + return false +} + +func absorbBuildPlugins(pom *pom, plugins []plugin) { + for _, p := range plugins { + if !containsBuildPlugin(pom.Build.Plugins, p) { + pom.Build.Plugins = append(pom.Build.Plugins, p) + } + } +} + +func containsBuildPlugin(plugins []plugin, targetPlugin plugin) bool { + for _, p := range plugins { + if p.GroupId == targetPlugin.GroupId && p.ArtifactId == targetPlugin.ArtifactId { + return true + } + } + return false +} + +func absorbImportedBomInDependencyManagement(pom *pom) { + for _, dep := range pom.DependencyManagement.Dependencies { + if dep.Scope != "import" { + continue + } + toBeAbsorbedPom, err := getSimulatedEffectivePomFromMavenRepository( + dep.GroupId, dep.ArtifactId, dep.Version) + if err != nil { + slog.InfoContext(context.TODO(), "Skip absorb imported bom from remote maven repository.", + "ArtifactId", pom.ArtifactId, "err", err) + } + absorbDependencyManagement(pom, toBeAbsorbedPom.dependencyManagementMap, false) + } +} + +func absorbPropertyMap(pom *pom, propertyMap map[string]string, override bool) { + for key, value := range propertyMap { + updatePropertyMap(pom.propertyMap, key, value, override) + } + replacePropertyPlaceHolderInPropertyMap(pom) + replacePropertyPlaceHolderInGroupId(pom) + replacePropertyPlaceHolderInVersion(pom) +} + +func absorbDependencyManagement(pom *pom, dependencyManagementMap map[string]string, override bool) { + for key, value := range dependencyManagementMap { + updateDependencyManagement(pom, key, value, override) + } +} + +func getSimulatedEffectivePomFromMavenRepository(groupId string, artifactId string, version string) (pom, error) { + result, err := getSimulatedEffectivePomFromLocalMavenRepository(groupId, artifactId, version) + if err == nil { + return result, nil + } + return getSimulatedEffectivePomFromRemoteMavenRepository(groupId, artifactId, version) +} + +func getSimulatedEffectivePomFromLocalMavenRepository(groupId string, artifactId string, version string) (pom, error) { + pomPath, err := getPathInLocalMavenRepository(groupId, artifactId, version) + if err != nil { + return pom{}, err + } + return createSimulatedEffectivePom(pomPath) +} + +func getSimulatedEffectivePomFromRemoteMavenRepository(groupId string, artifactId string, version string) (pom, error) { + requestUrl := getRemoteMavenRepositoryUrl(groupId, artifactId, version) + bytes, err := internal.Download(requestUrl) + if err != nil { + return pom{}, err + } + savePomFileToLocalMavenRepository(groupId, artifactId, version, bytes) + return createSimulatedEffectivePomByPomFileBytes(bytes) +} + +func savePomFileToLocalMavenRepository(groupId string, artifactId string, version string, bytes []byte) { + pomPath, err := getPathInLocalMavenRepository(groupId, artifactId, version) + if err != nil { + slog.DebugContext(context.TODO(), "Failed to get pomPath.", + "groupId", groupId, "artifactId", artifactId, "version", version, "err", err) + return + } + dir := filepath.Dir(pomPath) + if err := os.MkdirAll(dir, 0755); err != nil { + slog.DebugContext(context.TODO(), "Failed to create pomPath.", + "groupId", groupId, "artifactId", artifactId, "version", version, "err", err) + return + } + err = os.WriteFile(pomPath, bytes, 0600) + if err != nil { + slog.DebugContext(context.TODO(), "Failed to write file.", "pomPath", pomPath, "err", err) + } +} + +func createSimulatedEffectivePomByPomFileBytes(bytes []byte) (pom, error) { + var result pom + if err := xml.Unmarshal(bytes, &result); err != nil { + return pom{}, fmt.Errorf("parsing xml: %w", err) + } + convertToSimulatedEffectivePom(&result) + for _, value := range result.dependencyManagementMap { + if isVariable(value) { + log.Printf("Unresolved property: value = %s\n", value) + } + } + return result, nil +} + +func getPathInLocalMavenRepository(groupId string, artifactId string, version string) (string, error) { + homeDir, err := os.UserHomeDir() + if err != nil { + return "", err + } + relativePath := makePathFitCurrentOs(relativePathInMavenRepository(groupId, artifactId, version)) + return filepath.Join(homeDir, ".m2", "repository", relativePath), nil +} + +func getRemoteMavenRepositoryUrl(groupId string, artifactId string, version string) string { + return fmt.Sprintf("https://repo.maven.apache.org/maven2/%s", + relativePathInMavenRepository(groupId, artifactId, version)) +} + +func relativePathInMavenRepository(groupId string, artifactId string, version string) string { + return fmt.Sprintf("%s/%s/%s/%s-%s.pom", + strings.ReplaceAll(groupId, ".", "/"), artifactId, version, artifactId, version) +} + +func unmarshalPomFromFilePath(pomFilePath string) (pom, error) { + bytes, err := os.ReadFile(pomFilePath) + if err != nil { + return pom{}, err + } + result, err := unmarshalPomFromBytes(bytes) + if err != nil { + return pom{}, err + } + result.pomFilePath = pomFilePath + return result, nil +} + +func setDefaultScopeForDependenciesInAllPlaces(pom *pom) { + setDefaultScopeForDependencies(pom.Dependencies) + setDefaultScopeForDependencies(pom.DependencyManagement.Dependencies) + for i := range pom.Profiles { + setDefaultScopeForDependencies(pom.Profiles[i].Dependencies) + setDefaultScopeForDependencies(pom.Profiles[i].DependencyManagement.Dependencies) + } +} + +func setDefaultScopeForDependencies(dependencies []dependency) { + for i := range dependencies { + if dependencies[i].Scope == "" { + dependencies[i].Scope = DependencyScopeCompile + } + } +} + +func unmarshalPomFromString(pomString string) (pom, error) { + return unmarshalPomFromBytes([]byte(pomString)) +} + +func unmarshalPomFromBytes(pomBytes []byte) (pom, error) { + var unmarshalledPom pom + if err := xml.Unmarshal(pomBytes, &unmarshalledPom); err != nil { + return pom{}, fmt.Errorf("parsing xml: %w", err) + } + return unmarshalledPom, nil +} + +func addCommonPropertiesLikeProjectGroupIdAndProjectVersionToPropertyMap(pom *pom) { + updatePropertyMap(pom.propertyMap, "project.groupId", pom.GroupId, false) + pomVersion := pom.Version + if pomVersion == "" { + pomVersion = pom.Parent.Version + } + updatePropertyMap(pom.propertyMap, "project.version", pomVersion, false) +} + +func createPropertyMapAccordingToProjectProperty(pom *pom) { + pom.propertyMap = make(map[string]string) // propertyMap only create once + for _, entry := range pom.Properties.Entries { + updatePropertyMap(pom.propertyMap, entry.XMLName.Local, entry.Value, false) + } + for i := range pom.Profiles { + pom.Profiles[i].propertyMap = make(map[string]string) + for _, entry := range pom.Profiles[i].Properties.Entries { + updatePropertyMap(pom.Profiles[i].propertyMap, entry.XMLName.Local, entry.Value, false) + } + } +} + +func updatePropertyMap(propertyMap map[string]string, key string, value string, override bool) { + if _, ok := propertyMap[key]; !override && ok { + return + } + propertyMap[key] = value +} + +func replacePropertyPlaceHolderInPropertyMap(pom *pom) { + for key, value := range pom.propertyMap { + if isVariable(value) { + variableName := getVariableName(value) + if variableValue, ok := pom.propertyMap[variableName]; ok { + pom.propertyMap[key] = variableValue + } + } + } +} + +func replacePropertyPlaceHolderInGroupId(pom *pom) { + replacePropertyPlaceHolderInDependenciesGroupId(pom.DependencyManagement.Dependencies, pom.propertyMap) + replacePropertyPlaceHolderInDependenciesGroupId(pom.Dependencies, pom.propertyMap) + replacePropertyPlaceHolderInPluginsGroupId(pom.Build.Plugins, pom.propertyMap) + for i := range pom.Profiles { + replacePropertyPlaceHolderInDependenciesGroupId(pom.Profiles[i].DependencyManagement.Dependencies, + pom.propertyMap) + replacePropertyPlaceHolderInDependenciesGroupId(pom.Profiles[i].Dependencies, pom.propertyMap) + replacePropertyPlaceHolderInPluginsGroupId(pom.Profiles[i].Build.Plugins, pom.propertyMap) + } +} + +func replacePropertyPlaceHolderInDependenciesGroupId(dependencies []dependency, propertyMap map[string]string) { + for i, dep := range dependencies { + if isVariable(dep.GroupId) { + variableName := getVariableName(dep.GroupId) + if variableValue, ok := propertyMap[variableName]; ok { + dependencies[i].GroupId = variableValue + } + } + } +} + +func replacePropertyPlaceHolderInPluginsGroupId(plugins []plugin, propertyMap map[string]string) { + for i, dep := range plugins { + if isVariable(dep.GroupId) { + variableName := getVariableName(dep.GroupId) + if variableValue, ok := propertyMap[variableName]; ok { + plugins[i].GroupId = variableValue + } + } + } +} + +func replacePropertyPlaceHolderInVersion(pom *pom) { + replacePropertyPlaceHolderInDependencyManagementVersion(pom.dependencyManagementMap, + pom.DependencyManagement.Dependencies, pom.propertyMap) + replacePropertyPlaceHolderInDependenciesVersion(pom.Dependencies, pom.propertyMap) + replacePropertyPlaceHolderInBuildPluginsVersion(pom.Build.Plugins, pom.propertyMap) + for i := range pom.Profiles { + replacePropertyPlaceHolderInDependencyManagementVersion(pom.Profiles[i].dependencyManagementMap, + pom.Profiles[i].DependencyManagement.Dependencies, pom.propertyMap) + replacePropertyPlaceHolderInDependenciesVersion(pom.Profiles[i].Dependencies, pom.propertyMap) + replacePropertyPlaceHolderInBuildPluginsVersion(pom.Profiles[i].Build.Plugins, pom.propertyMap) + } +} + +func replacePropertyPlaceHolderInDependencyManagementVersion(dependencyManagementMap map[string]string, + dependencies []dependency, propertyMap map[string]string) { + for key, value := range dependencyManagementMap { + if isVariable(value) { + variableName := getVariableName(value) + if variableValue, ok := propertyMap[variableName]; ok { + updateDependencyVersionInDependencyManagement(dependencyManagementMap, + dependencies, key, variableValue) + } + } + } +} + +func replacePropertyPlaceHolderInDependenciesVersion(dependencies []dependency, propertyMap map[string]string) { + for i, dep := range dependencies { + if isVariable(dep.Version) { + variableName := getVariableName(dep.Version) + if variableValue, ok := propertyMap[variableName]; ok { + dependencies[i].Version = variableValue + } + } + } +} + +func replacePropertyPlaceHolderInBuildPluginsVersion(plugins []plugin, propertyMap map[string]string) { + for i, dep := range plugins { + if isVariable(dep.Version) { + variableName := getVariableName(dep.Version) + if variableValue, ok := propertyMap[variableName]; ok { + plugins[i].Version = variableValue + } + } + } +} + +const variablePrefix = "${" +const variableSuffix = "}" + +func isVariable(value string) bool { + return strings.HasPrefix(value, variablePrefix) && strings.HasSuffix(value, variableSuffix) +} + +func getVariableName(value string) string { + return strings.TrimSuffix(strings.TrimPrefix(value, variablePrefix), variableSuffix) +} + +func toDependencyManagementMapKey(dependency dependency) string { + return fmt.Sprintf("%s:%s:%s", dependency.GroupId, dependency.ArtifactId, dependency.Scope) +} + +func createDependencyFromDependencyManagementMapKeyAndVersion(key string, version string) dependency { + parts := strings.Split(key, ":") + if len(parts) != 3 { + return dependency{} + } + return dependency{parts[0], parts[1], version, parts[2]} +} + +func createDependencyManagementMap(pom *pom) { + pom.dependencyManagementMap = make(map[string]string) // dependencyManagementMap only create once + for _, dep := range pom.DependencyManagement.Dependencies { + pom.dependencyManagementMap[toDependencyManagementMapKey(dep)] = dep.Version + } + for i := range pom.Profiles { + pom.Profiles[i].dependencyManagementMap = make(map[string]string) + for _, dep := range pom.Profiles[i].DependencyManagement.Dependencies { + pom.Profiles[i].dependencyManagementMap[toDependencyManagementMapKey(dep)] = dep.Version + } + } +} + +func updateDependencyManagement(pom *pom, key string, value string, override bool) { + if value == "" { + log.Printf("error: add dependency management without version") + return + } + if _, alreadyExist := pom.dependencyManagementMap[key]; !override && alreadyExist { + return + } + // always make sure DependencyManagement and dependencyManagementMap synced + pom.dependencyManagementMap[key] = value + pom.DependencyManagement.Dependencies = append(pom.DependencyManagement.Dependencies, + createDependencyFromDependencyManagementMapKeyAndVersion(key, value)) +} + +// always make sure DependencyManagement and dependencyManagementMap synced +func updateDependencyVersionInDependencyManagement(dependencyManagementMap map[string]string, + dependencies []dependency, key string, value string) { + dependencyManagementMap[key] = value + for i, dep := range dependencies { + currentKey := toDependencyManagementMapKey(dep) + if currentKey == key { + dependencies[i].Version = value + } + } +} + +func updateDependencyVersionAccordingToDependencyManagement(pom *pom) { + for i, dep := range pom.Dependencies { + if strings.TrimSpace(dep.Version) != "" { + continue + } + key := toDependencyManagementMapKey(dep) + if managedVersion, ok := pom.dependencyManagementMap[key]; ok { + pom.Dependencies[i].Version = managedVersion + } else if dep.Scope == DependencyScopeTest { + dep.Scope = DependencyScopeCompile + key = toDependencyManagementMapKey(dep) + if managedVersion, ok = pom.dependencyManagementMap[key]; ok { + pom.Dependencies[i].Version = managedVersion + } + } + } +} + +func createEffectivePom(ctx context.Context, mvnCli *maven.Cli, pomPath string) (pom, error) { + effectivePom, err := mvnCli.EffectivePom(ctx, pomPath) + if err != nil { + return pom{}, err + } + var resultPom pom + err = xml.Unmarshal([]byte(effectivePom), &resultPom) + return resultPom, err +} + +func fileExists(path string) bool { + if path == "" { + return false + } + if _, err := os.Stat(path); err == nil { + return true + } else { + return false + } +} diff --git a/cli/azd/internal/appdetect/pom_test.go b/cli/azd/internal/appdetect/pom_test.go new file mode 100644 index 00000000000..ebcafd2a9e9 --- /dev/null +++ b/cli/azd/internal/appdetect/pom_test.go @@ -0,0 +1,2246 @@ +package appdetect + +import ( + "context" + "log/slog" + "os" + os_exec "os/exec" + "path/filepath" + "reflect" + "strings" + "testing" + + "github.com/azure/azure-dev/cli/azd/pkg/exec" + "github.com/azure/azure-dev/cli/azd/pkg/tools/maven" +) + +func TestCreateEffectivePom(t *testing.T) { + path, err := os_exec.LookPath("java") + if err != nil { + t.Skip("Skip TestCreateEffectivePom because java command doesn't exist.") + } else { + slog.Info("Java command found.", "path", path) + } + tests := []struct { + name string + testPoms []testPom + expected []dependency + }{ + { + name: "Test with two dependencies", + testPoms: []testPom{ + { + pomFilePath: "pom.xml", + pomContentString: ` + + 4.0.0 + com.example + example-project + 1.0.0 + + + org.springframework + spring-core + 5.3.8 + compile + + + junit + junit + 4.13.2 + test + + + + `, + }, + }, + expected: []dependency{ + { + GroupId: "org.springframework", + ArtifactId: "spring-core", + Version: "5.3.8", + Scope: "compile", + }, + { + GroupId: "junit", + ArtifactId: "junit", + Version: "4.13.2", + Scope: "test", + }, + }, + }, + { + name: "Test with no dependencies", + testPoms: []testPom{ + { + pomFilePath: "pom.xml", + pomContentString: ` + + 4.0.0 + com.example + example-project + 1.0.0 + + + + `, + }, + }, + expected: []dependency{}, + }, + { + name: "Test with one dependency which version is decided by dependencyManagement", + testPoms: []testPom{ + { + pomFilePath: "pom.xml", + pomContentString: ` + + 4.0.0 + com.example + example-project + 1.0.0 + + + org.slf4j + slf4j-api + + + + + + org.springframework.boot + spring-boot-dependencies + 3.0.0 + pom + import + + + + + `, + }, + }, + expected: []dependency{ + { + GroupId: "org.slf4j", + ArtifactId: "slf4j-api", + Version: "2.0.4", + Scope: "compile", + }, + }, + }, + { + name: "Test with one dependency which version is decided by parent", + testPoms: []testPom{ + { + pomFilePath: "pom.xml", + pomContentString: ` + + + org.springframework.boot + spring-boot-starter-parent + 3.0.0 + + + 4.0.0 + com.example + example-project + 1.0.0 + + + org.slf4j + slf4j-api + + + + `, + }, + }, + expected: []dependency{ + { + GroupId: "org.slf4j", + ArtifactId: "slf4j-api", + Version: "2.0.4", + Scope: "compile", + }, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + workingDir, err := prepareTestPomFiles(tt.testPoms) + if err != nil { + t.Fatalf("%v", err) + } + for _, testPom := range tt.testPoms { + pomFilePath := filepath.Join(workingDir, testPom.pomFilePath) + + effectivePom, err := createEffectivePom(context.TODO(), maven.NewCli(exec.NewCommandRunner(nil)), + pomFilePath) + if err != nil { + t.Fatalf("createEffectivePom failed: %v", err) + } + + if len(effectivePom.Dependencies) != len(tt.expected) { + t.Fatalf("Expected: %d\nActual: %d", len(tt.expected), len(effectivePom.Dependencies)) + } + + for i, dep := range effectivePom.Dependencies { + if dep != tt.expected[i] { + t.Errorf("\nExpected: %s\nActual: %s", tt.expected[i], dep) + } + } + } + }) + } +} + +func TestCreatePropertyMapAccordingToProjectProperty(t *testing.T) { + tests := []struct { + name string + pomString string + expected map[string]string + }{ + { + name: "Test createPropertyMapAccordingToProjectProperty", + pomString: ` + + 4.0.0 + com.example + example-project + 1.0.0 + + 3.3.5 + 2023.0.3 + 5.18.0 + + + `, + expected: map[string]string{ + "version.spring.boot": "3.3.5", + "version.spring.cloud": "2023.0.3", + "version.spring.cloud.azure": "5.18.0", + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + pom, err := unmarshalPomFromString(tt.pomString) + if err != nil { + t.Fatalf("Failed to unmarshal string: %v", err) + } + createPropertyMapAccordingToProjectProperty(&pom) + if !reflect.DeepEqual(pom.propertyMap, tt.expected) { + t.Fatalf("\nExpected: %s\nActual: %s", tt.expected, pom.propertyMap) + } + }) + } +} + +func TestReplacePropertyPlaceHolder(t *testing.T) { + var tests = []struct { + name string + inputPom pom + expected pom + }{ + { + name: "Test replacePropertyPlaceHolder", + inputPom: pom{ + GroupId: "sampleGroupId", + ArtifactId: "sampleArtifactId", + Version: "1.0.0", + DependencyManagement: dependencyManagement{ + Dependencies: []dependency{ + { + GroupId: "groupIdOne", + ArtifactId: "artifactIdOne", + Version: "${version.spring.boot}", + Scope: DependencyScopeCompile, + }, + }, + }, + Dependencies: []dependency{ + { + GroupId: "groupIdTwo", + ArtifactId: "artifactIdTwo", + Version: "${version.spring.cloud}", + Scope: DependencyScopeCompile, + }, + { + GroupId: "${project.groupId}", + ArtifactId: "artifactIdThree", + Version: "${project.version}", + Scope: DependencyScopeCompile, + }, + }, + Build: build{ + Plugins: []plugin{ + { + GroupId: "groupIdFour", + ArtifactId: "artifactIdFour", + Version: "${version.spring.cloud.azure}", + }, + }, + }, + propertyMap: map[string]string{ + "version.spring.boot": "3.3.5", + "version.spring.cloud": "2023.0.3", + "version.spring.cloud.azure": "5.18.0", + "another.property": "${version.spring.cloud.azure}", + }, + dependencyManagementMap: map[string]string{ + "groupIdOne:artifactIdOne:compile": "${version.spring.boot}", + }, + }, + expected: pom{ + GroupId: "sampleGroupId", + ArtifactId: "sampleArtifactId", + Version: "1.0.0", + DependencyManagement: dependencyManagement{ + Dependencies: []dependency{ + { + GroupId: "groupIdOne", + ArtifactId: "artifactIdOne", + Version: "3.3.5", + Scope: DependencyScopeCompile, + }, + }, + }, + Dependencies: []dependency{ + { + GroupId: "groupIdTwo", + ArtifactId: "artifactIdTwo", + Version: "2023.0.3", + Scope: DependencyScopeCompile, + }, + { + GroupId: "sampleGroupId", + ArtifactId: "artifactIdThree", + Version: "1.0.0", + Scope: DependencyScopeCompile, + }, + }, + Build: build{ + Plugins: []plugin{ + { + GroupId: "groupIdFour", + ArtifactId: "artifactIdFour", + Version: "5.18.0", + }, + }, + }, + propertyMap: map[string]string{ + "version.spring.boot": "3.3.5", + "version.spring.cloud": "2023.0.3", + "version.spring.cloud.azure": "5.18.0", + "another.property": "5.18.0", + "project.groupId": "sampleGroupId", + "project.version": "1.0.0", + }, + dependencyManagementMap: map[string]string{ + "groupIdOne:artifactIdOne:compile": "3.3.5", + }, + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + addCommonPropertiesLikeProjectGroupIdAndProjectVersionToPropertyMap(&tt.inputPom) + replacePropertyPlaceHolderInPropertyMap(&tt.inputPom) + replacePropertyPlaceHolderInGroupId(&tt.inputPom) + createDependencyManagementMap(&tt.inputPom) + replacePropertyPlaceHolderInVersion(&tt.inputPom) + if !reflect.DeepEqual(tt.inputPom, tt.expected) { + t.Fatalf("\nExpected: %s\nActual: %s", tt.expected, tt.inputPom) + } + }) + } +} + +func TestCreateDependencyManagementMap(t *testing.T) { + var tests = []struct { + name string + inputPom pom + expected pom + }{ + { + name: "Test createDependencyManagementMap", + inputPom: pom{ + DependencyManagement: dependencyManagement{ + Dependencies: []dependency{ + { + GroupId: "groupIdOne", + ArtifactId: "artifactIdOne", + Version: "1.0.0", + Scope: DependencyScopeCompile, + }, + }, + }, + Dependencies: []dependency{ + { + GroupId: "groupIdOne", + ArtifactId: "artifactIdOne", + Scope: DependencyScopeCompile, + }, + }, + }, + expected: pom{ + DependencyManagement: dependencyManagement{ + Dependencies: []dependency{ + { + GroupId: "groupIdOne", + ArtifactId: "artifactIdOne", + Version: "1.0.0", + Scope: DependencyScopeCompile, + }, + }, + }, + Dependencies: []dependency{ + { + GroupId: "groupIdOne", + ArtifactId: "artifactIdOne", + Scope: DependencyScopeCompile, + }, + }, + dependencyManagementMap: map[string]string{ + "groupIdOne:artifactIdOne:compile": "1.0.0", + }, + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + createDependencyManagementMap(&tt.inputPom) + if !reflect.DeepEqual(tt.inputPom, tt.expected) { + t.Fatalf("\nExpected: %s\nActual: %s", tt.expected, tt.inputPom) + } + }) + } +} + +func TestUpdateDependencyVersionAccordingToDependencyManagement(t *testing.T) { + var tests = []struct { + name string + inputPom pom + expected pom + }{ + { + name: "Test updateDependencyVersionAccordingToDependencyManagement", + inputPom: pom{ + Dependencies: []dependency{ + { + GroupId: "groupIdOne", + ArtifactId: "artifactIdOne", + Scope: DependencyScopeCompile, + }, + }, + dependencyManagementMap: map[string]string{ + "groupIdOne:artifactIdOne:compile": "1.0.0", + }, + }, + expected: pom{ + Dependencies: []dependency{ + { + GroupId: "groupIdOne", + ArtifactId: "artifactIdOne", + Version: "1.0.0", + Scope: DependencyScopeCompile, + }, + }, + dependencyManagementMap: map[string]string{ + "groupIdOne:artifactIdOne:compile": "1.0.0", + }, + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + updateDependencyVersionAccordingToDependencyManagement(&tt.inputPom) + if !reflect.DeepEqual(tt.inputPom, tt.expected) { + t.Fatalf("\nExpected: %s\nActual: %s", tt.expected, tt.inputPom) + } + }) + } +} + +func TestGetRemoteMavenRepositoryUrl(t *testing.T) { + var tests = []struct { + name string + groupId string + artifactId string + version string + expected string + }{ + { + name: "spring-boot-starter-parent", + groupId: "org.springframework.boot", + artifactId: "spring-boot-starter-parent", + version: "3.4.0", + expected: "https://repo.maven.apache.org/maven2/org/springframework/boot/spring-boot-starter-parent/3.4.0/" + + "spring-boot-starter-parent-3.4.0.pom", + }, + { + name: "spring-boot-dependencies", + groupId: "org.springframework.boot", + artifactId: "spring-boot-dependencies", + version: "3.4.0", + expected: "https://repo.maven.apache.org/maven2/org/springframework/boot/spring-boot-dependencies/3.4.0/" + + "spring-boot-dependencies-3.4.0.pom", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := getRemoteMavenRepositoryUrl(tt.groupId, tt.artifactId, tt.version) + if !reflect.DeepEqual(actual, tt.expected) { + t.Fatalf("\nExpected: %s\nActual: %s", tt.expected, actual) + } + }) + } +} + +func TestGetSimulatedEffectivePomFromRemoteMavenRepository(t *testing.T) { + var tests = []struct { + name string + groupId string + artifactId string + version string + expected int + }{ + { + name: "spring-boot-starter-parent", + groupId: "org.springframework.boot", + artifactId: "spring-boot-starter-parent", + version: "3.4.0", + expected: 1496, + }, + { + name: "spring-boot-dependencies", + groupId: "org.springframework.boot", + artifactId: "spring-boot-dependencies", + version: "3.4.0", + expected: 1496, + }, + { + name: "kotlin-bom", + groupId: "org.jetbrains.kotlin", + artifactId: "kotlin-bom", + version: "1.9.25", + expected: 23, + }, + { + name: "infinispan-bom", + groupId: "org.infinispan", + artifactId: "infinispan-bom", + version: "15.0.11.Final", + expected: 65, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + pom, err := getSimulatedEffectivePomFromRemoteMavenRepository(tt.groupId, tt.artifactId, tt.version) + if err != nil { + t.Fatalf("Failed to create temp directory: %v", err) + } + for _, value := range pom.dependencyManagementMap { + if isVariable(value) { + t.Fatalf("Unresolved property: value = %s", value) + } + } + actual := len(pom.dependencyManagementMap) + if !reflect.DeepEqual(actual, tt.expected) { + t.Fatalf("\nExpected: %d\nActual: %d", tt.expected, actual) + } + }) + } +} + +func TestMakePathFitCurrentOs(t *testing.T) { + var tests = []struct { + name string + input string + }{ + { + name: "linux", + input: "/home/user/example/file.txt", + }, + { + name: "windows", + input: "C:\\Users\\example\\Work", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := makePathFitCurrentOs(tt.input) + strings.Contains(actual, string(os.PathSeparator)) + }) + } +} + +func TestGetParentPomFilePath(t *testing.T) { + var tests = []struct { + name string + input pom + expected string + }{ + { + name: "relativePath not set", + input: pom{ + pomFilePath: "/home/user/example-user/" + + "example-project-grandparent/example-project-parent/example-project-module-one/pom.xml", + }, + expected: makePathFitCurrentOs("/home/user/example-user/" + + "example-project-grandparent/example-project-parent/pom.xml"), + }, + { + name: "relativePath set to grandparent folder", + input: pom{ + pomFilePath: "/home/user/example-user/" + + "example-project-grandparent/example-project-parent/example-project-module-one/pom.xml", + Parent: parent{ + RelativePath: "../../pom.xml", + }, + }, + expected: makePathFitCurrentOs("/home/user/example-user/example-project-grandparent/pom.xml"), + }, + { + name: "relativePath set to another file name", + input: pom{ + pomFilePath: "/home/user/example-user/" + + "example-project-grandparent/example-project-parent/example-project-module-one/pom.xml", + Parent: parent{ + RelativePath: "../another-pom.xml", + }, + }, + expected: makePathFitCurrentOs("/home/user/example-user/" + + "example-project-grandparent/example-project-parent/another-pom.xml"), + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := getParentPomFilePath(tt.input) + if !reflect.DeepEqual(actual, tt.expected) { + t.Fatalf("\nExpected: %s\nActual: %s", tt.expected, actual) + } + }) + } +} + +func TestAbsorbPropertyMap(t *testing.T) { + var tests = []struct { + name string + input pom + toBeAbsorbedPom pom + expected pom + }{ + { + name: "relativePath not set", + input: pom{ + GroupId: "sampleGroupId", + ArtifactId: "sampleArtifactId", + Version: "1.0.0", + DependencyManagement: dependencyManagement{ + Dependencies: []dependency{ + { + GroupId: "groupIdOne", + ArtifactId: "artifactIdOne", + Version: "${version.spring.boot}", + Scope: DependencyScopeCompile, + }, + }, + }, + Dependencies: []dependency{ + { + GroupId: "groupIdTwo", + ArtifactId: "artifactIdTwo", + Version: "${version.spring.cloud}", + Scope: DependencyScopeCompile, + }, + { + GroupId: "groupIdThree", + ArtifactId: "artifactIdThree", + Version: "${another.property}", + Scope: DependencyScopeCompile, + }, + }, + Build: build{ + Plugins: []plugin{ + { + GroupId: "groupIdFour", + ArtifactId: "artifactIdFour", + Version: "${version.spring.cloud.azure}", + }, + }, + }, + propertyMap: map[string]string{ + "another.property": "${version.spring.cloud.azure}", + }, + dependencyManagementMap: map[string]string{ + "groupIdOne:artifactIdOne:compile": "${version.spring.boot}", + }, + }, + toBeAbsorbedPom: pom{ + GroupId: "sampleGroupId", + ArtifactId: "sampleArtifactIdToBeAbsorbed", + Version: "1.0.0", + propertyMap: map[string]string{ + "version.spring.boot": "3.3.5", + "version.spring.cloud": "2023.0.3", + "version.spring.cloud.azure": "5.18.0", + }, + }, + expected: pom{ + GroupId: "sampleGroupId", + ArtifactId: "sampleArtifactId", + Version: "1.0.0", + DependencyManagement: dependencyManagement{ + Dependencies: []dependency{ + { + GroupId: "groupIdOne", + ArtifactId: "artifactIdOne", + Version: "3.3.5", + Scope: DependencyScopeCompile, + }, + }, + }, + Dependencies: []dependency{ + { + GroupId: "groupIdTwo", + ArtifactId: "artifactIdTwo", + Version: "2023.0.3", + Scope: DependencyScopeCompile, + }, + { + GroupId: "groupIdThree", + ArtifactId: "artifactIdThree", + Version: "5.18.0", + Scope: DependencyScopeCompile, + }, + }, + Build: build{ + Plugins: []plugin{ + { + GroupId: "groupIdFour", + ArtifactId: "artifactIdFour", + Version: "5.18.0", + }, + }, + }, + propertyMap: map[string]string{ + "version.spring.boot": "3.3.5", + "version.spring.cloud": "2023.0.3", + "version.spring.cloud.azure": "5.18.0", + "another.property": "5.18.0", + }, + dependencyManagementMap: map[string]string{ + "groupIdOne:artifactIdOne:compile": "3.3.5", + }, + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + absorbPropertyMap(&tt.input, tt.toBeAbsorbedPom.propertyMap, false) + if !reflect.DeepEqual(tt.input, tt.expected) { + t.Fatalf("\nExpected: %s\nActual: %s", tt.expected, tt.input) + } + }) + } +} + +func TestAbsorbDependencyManagement(t *testing.T) { + var tests = []struct { + name string + input pom + toBeAbsorbedPom pom + expected pom + }{ + { + name: "test absorbDependencyManagement", + input: pom{ + GroupId: "sampleGroupId", + ArtifactId: "sampleArtifactId", + Version: "1.0.0", + Dependencies: []dependency{ + { + GroupId: "groupIdOne", + ArtifactId: "artifactIdOne", + Scope: "compile", + }, + }, + dependencyManagementMap: map[string]string{}, + }, + toBeAbsorbedPom: pom{ + GroupId: "sampleGroupId", + ArtifactId: "sampleArtifactIdToBeAbsorbed", + Version: "1.0.0", + DependencyManagement: dependencyManagement{ + Dependencies: []dependency{ + { + GroupId: "groupIdOne", + ArtifactId: "artifactIdOne", + Version: "1.0.0", + Scope: "compile", + }, + }, + }, + dependencyManagementMap: map[string]string{ + "groupIdOne:artifactIdOne:compile": "1.0.0", + }, + }, + expected: pom{ + GroupId: "sampleGroupId", + ArtifactId: "sampleArtifactId", + Version: "1.0.0", + DependencyManagement: dependencyManagement{ + Dependencies: []dependency{ + { + GroupId: "groupIdOne", + ArtifactId: "artifactIdOne", + Version: "1.0.0", + Scope: "compile", + }, + }, + }, + Dependencies: []dependency{ + { + GroupId: "groupIdOne", + ArtifactId: "artifactIdOne", + Scope: "compile", + }, + }, + dependencyManagementMap: map[string]string{ + "groupIdOne:artifactIdOne:compile": "1.0.0", + }, + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + absorbDependencyManagement(&tt.input, tt.toBeAbsorbedPom.dependencyManagementMap, false) + if !reflect.DeepEqual(tt.input, tt.expected) { + t.Fatalf("\nExpected: %s\nActual: %s", tt.expected, tt.input) + } + }) + } +} + +func TestAbsorbDependency(t *testing.T) { + var tests = []struct { + name string + input pom + toBeAbsorbedPom pom + expected pom + }{ + { + name: "absorb 2 dependencies", + input: pom{ + GroupId: "sampleGroupId", + ArtifactId: "sampleArtifactId", + Version: "1.0.0", + Dependencies: []dependency{}, + }, + toBeAbsorbedPom: pom{ + GroupId: "sampleGroupId", + ArtifactId: "sampleArtifactIdToBeAbsorbed", + Version: "1.0.0", + Dependencies: []dependency{ + { + GroupId: "groupIdOne", + ArtifactId: "artifactIdOne", + Version: "1.0.0", + Scope: "compile", + }, + { + GroupId: "groupIdTwo", + ArtifactId: "artifactIdTwo", + Version: "1.0.0", + Scope: "test", + }, + }, + }, + expected: pom{ + GroupId: "sampleGroupId", + ArtifactId: "sampleArtifactId", + Version: "1.0.0", + Dependencies: []dependency{ + { + GroupId: "groupIdOne", + ArtifactId: "artifactIdOne", + Version: "1.0.0", + Scope: "compile", + }, + { + GroupId: "groupIdTwo", + ArtifactId: "artifactIdTwo", + Version: "1.0.0", + Scope: "test", + }, + }, + }, + }, + { + name: "absorb 1 dependency and skip 1 dependency", + input: pom{ + GroupId: "sampleGroupId", + ArtifactId: "sampleArtifactId", + Version: "1.0.0", + Dependencies: []dependency{ + { + GroupId: "groupIdOne", + ArtifactId: "artifactIdOne", + Version: "2.0.0", + Scope: "compile", + }, + }, + }, + toBeAbsorbedPom: pom{ + GroupId: "sampleGroupId", + ArtifactId: "sampleArtifactIdToBeAbsorbed", + Version: "1.0.0", + Dependencies: []dependency{ + { + GroupId: "groupIdOne", + ArtifactId: "artifactIdOne", + Version: "1.0.0", + Scope: "compile", + }, + { + GroupId: "groupIdTwo", + ArtifactId: "artifactIdTwo", + Version: "1.0.0", + Scope: "test", + }, + }, + }, + expected: pom{ + GroupId: "sampleGroupId", + ArtifactId: "sampleArtifactId", + Version: "1.0.0", + Dependencies: []dependency{ + { + GroupId: "groupIdOne", + ArtifactId: "artifactIdOne", + Version: "2.0.0", // keep original value + Scope: "compile", + }, + { + GroupId: "groupIdTwo", + ArtifactId: "artifactIdTwo", + Version: "1.0.0", + Scope: "test", + }, + }, + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + absorbDependencies(&tt.input, tt.toBeAbsorbedPom.Dependencies) + if !reflect.DeepEqual(tt.input, tt.expected) { + t.Fatalf("\nExpected: %s\nActual: %s", tt.expected, tt.input) + } + }) + } +} + +func TestAbsorbBuildPlugin(t *testing.T) { + var tests = []struct { + name string + input pom + toBeAbsorbedPom pom + expected pom + }{ + { + name: "absorb 2 plugins", + input: pom{ + GroupId: "sampleGroupId", + ArtifactId: "sampleArtifactId", + Version: "1.0.0", + }, + toBeAbsorbedPom: pom{ + GroupId: "sampleGroupId", + ArtifactId: "sampleArtifactIdToBeAbsorbed", + Version: "1.0.0", + Build: build{ + Plugins: []plugin{ + { + GroupId: "groupIdOne", + ArtifactId: "artifactIdOne", + Version: "1.0.0", + }, + { + GroupId: "groupIdTwo", + ArtifactId: "artifactIdTwo", + Version: "1.0.0", + }, + }, + }, + }, + expected: pom{ + GroupId: "sampleGroupId", + ArtifactId: "sampleArtifactId", + Version: "1.0.0", + Build: build{ + Plugins: []plugin{ + { + GroupId: "groupIdOne", + ArtifactId: "artifactIdOne", + Version: "1.0.0", + }, + { + GroupId: "groupIdTwo", + ArtifactId: "artifactIdTwo", + Version: "1.0.0", + }, + }, + }, + }, + }, + { + name: "absorb 1 plugin and skip 1 plugin", + input: pom{ + GroupId: "sampleGroupId", + ArtifactId: "sampleArtifactId", + Version: "1.0.0", + Build: build{ + Plugins: []plugin{ + { + GroupId: "groupIdOne", + ArtifactId: "artifactIdOne", + Version: "2.0.0", + }, + }, + }, + }, + toBeAbsorbedPom: pom{ + GroupId: "sampleGroupId", + ArtifactId: "sampleArtifactIdToBeAbsorbed", + Version: "1.0.0", + Build: build{ + Plugins: []plugin{ + { + GroupId: "groupIdOne", + ArtifactId: "artifactIdOne", + Version: "1.0.0", + }, + { + GroupId: "groupIdTwo", + ArtifactId: "artifactIdTwo", + Version: "1.0.0", + }, + }, + }, + }, + expected: pom{ + GroupId: "sampleGroupId", + ArtifactId: "sampleArtifactId", + Version: "1.0.0", + Build: build{ + Plugins: []plugin{ + { + GroupId: "groupIdOne", + ArtifactId: "artifactIdOne", + Version: "2.0.0", // keep original value + }, + { + GroupId: "groupIdTwo", + ArtifactId: "artifactIdTwo", + Version: "1.0.0", + }, + }, + }, + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + absorbBuildPlugins(&tt.input, tt.toBeAbsorbedPom.Build.Plugins) + if !reflect.DeepEqual(tt.input, tt.expected) { + t.Fatalf("\nExpected: %s\nActual: %s", tt.expected, tt.input) + } + }) + } +} + +func TestCreateSimulatedEffectivePom(t *testing.T) { + if os.Getenv("GITHUB_ACTIONS") == "true" { + t.Skip("Skip TestCreateSimulatedEffectivePom in GitHub Actions because it will time out.") + } + var tests = []struct { + name string + testPoms []testPom + }{ + { + name: "No parent", + testPoms: []testPom{ + { + pomFilePath: "pom.xml", + pomContentString: ` + + 4.0.0 + com.example + example-project + 1.0.0 + + + org.springframework + spring-core + 5.3.8 + compile + + + junit + junit + 4.13.2 + test + + + + `, + }, + }, + }, + { + name: "Self-defined parent", + testPoms: []testPom{ + { + pomFilePath: "./pom.xml", + pomContentString: ` + + 4.0.0 + com.example + example-project-parent + 1.0.0 + pom + + + + org.springframework + spring-core + 5.3.8 + compile + + + junit + junit + 4.13.2 + test + + + + + `, + }, + { + pomFilePath: "./module-one/pom.xml", + pomContentString: ` + + 4.0.0 + com.example + example-project-module-one + 1.0.0 + + com.example + example-project-parent + 1.0.0 + ../pom.xml + + + + org.springframework + spring-core + compile + + + junit + junit + test + + + + `, + }, + }, + }, + { + name: "S-defined parent in grandparent folder", + testPoms: []testPom{ + { + pomFilePath: "./pom.xml", + pomContentString: ` + + 4.0.0 + com.example + example-project-parent + 1.0.0 + pom + + + + org.springframework + spring-core + 5.3.8 + compile + + + junit + junit + 4.13.2 + test + + + + + `, + }, + { + pomFilePath: "./modules/module-one/pom.xml", + pomContentString: ` + + 4.0.0 + com.example + example-project-module-one + 1.0.0 + + com.example + example-project-parent + 1.0.0 + ../../pom.xml + + + + org.springframework + spring-core + compile + + + junit + junit + test + + + + `, + }, + }, + }, + { + name: "Set spring-boot-starter-parent as parent", + testPoms: []testPom{ + { + pomFilePath: "./pom.xml", + pomContentString: ` + + 4.0.0 + com.example + example-project-grandparent + 1.0.0 + pom + + org.springframework.boot + spring-boot-starter-parent + 3.0.0 + + + + + org.springframework + spring-core + compile + + + junit + junit + test + + + + `, + }, + }, + }, + { + name: "Set spring-boot-starter-parent as grandparent's parent", + testPoms: []testPom{ + { + pomFilePath: "./pom.xml", + pomContentString: ` + + 4.0.0 + com.example + example-project-grandparent + 1.0.0 + pom + + org.springframework.boot + spring-boot-starter-parent + 3.0.0 + + + + `, + }, + { + pomFilePath: "./modules/pom.xml", + pomContentString: ` + + 4.0.0 + com.example + example-project-parent + 1.0.0 + pom + + com.example + example-project-grandparent + 1.0.0 + ../pom.xml + + + `, + }, + { + pomFilePath: "./modules/module-one/pom.xml", + pomContentString: ` + + 4.0.0 + com.example + example-project-module-one + 1.0.0 + + com.example + example-project-parent + 1.0.0 + ../pom.xml + + + + org.springframework + spring-core + compile + + + junit + junit + test + + + + `, + }, + }, + }, + { + name: "Import spring-boot-dependencies in grandparent", + testPoms: []testPom{ + { + pomFilePath: "./pom.xml", + pomContentString: ` + + 4.0.0 + com.example + example-project-grandparent + 1.0.0 + pom + + + + org.springframework.boot + spring-boot-dependencies + 3.0.0 + pom + import + + + + + `, + }, + { + pomFilePath: "./modules/pom.xml", + pomContentString: ` + + 4.0.0 + com.example + example-project-parent + 1.0.0 + pom + + com.example + example-project-grandparent + 1.0.0 + ../pom.xml + + + `, + }, + { + pomFilePath: "./modules/module-one/pom.xml", + pomContentString: ` + + 4.0.0 + com.example + example-project-module-one + 1.0.0 + + com.example + example-project-parent + 1.0.0 + ../pom.xml + + + + org.springframework + spring-core + compile + + + junit + junit + test + + + + `, + }, + }, + }, + { + name: "Override version in dependencies", + testPoms: []testPom{ + { + pomFilePath: "./pom.xml", + pomContentString: ` + + 4.0.0 + com.example + example-project-grandparent + 1.0.0 + pom + + + + org.springframework.boot + spring-boot-dependencies + 3.0.0 + pom + import + + + + + `, + }, + { + pomFilePath: "./modules/pom.xml", + pomContentString: ` + + 4.0.0 + com.example + example-project-parent + 1.0.0 + pom + + com.example + example-project-grandparent + 1.0.0 + ../pom.xml + + + `, + }, + { + pomFilePath: "./modules/module-one/pom.xml", + pomContentString: ` + + 4.0.0 + com.example + example-project-module-one + 1.0.0 + + com.example + example-project-parent + 1.0.0 + ../pom.xml + + + + org.springframework + spring-core + compile + + + junit + junit + 4.13.0 + test + + + + `, + }, + }, + }, + { + name: "Override version in dependencyManagement", + testPoms: []testPom{ + { + pomFilePath: "./pom.xml", + pomContentString: ` + + 4.0.0 + com.example + example-project-grandparent + 1.0.0 + pom + + + + org.springframework.boot + spring-boot-dependencies + 3.0.0 + pom + import + + + + + `, + }, + { + pomFilePath: "./modules/pom.xml", + pomContentString: ` + + 4.0.0 + com.example + example-project-parent + 1.0.0 + pom + + com.example + example-project-grandparent + 1.0.0 + ../pom.xml + + + `, + }, + { + pomFilePath: "./modules/module-one/pom.xml", + pomContentString: ` + + 4.0.0 + com.example + example-project-module-one + 1.0.0 + + com.example + example-project-parent + 1.0.0 + ../pom.xml + + + + + junit + junit + 4.13.0 + test + + + + + + org.springframework + spring-core + compile + + + junit + junit + test + + + + `, + }, + }, + }, + { + name: "Version different in dependencyManagement of grandparent & parent & leaf pom", + testPoms: []testPom{ + { + pomFilePath: "./pom.xml", + pomContentString: ` + + 4.0.0 + com.example + example-project-grandparent + 1.0.0 + pom + + + + org.springframework.boot + spring-boot-dependencies + 3.0.0 + pom + import + + + + + `, + }, + { + pomFilePath: "./modules/pom.xml", + pomContentString: ` + + 4.0.0 + com.example + example-project-parent + 1.0.0 + pom + + com.example + example-project-grandparent + 1.0.0 + ../pom.xml + + + + + junit + junit + 4.13.1 + test + + + + + `, + }, + { + pomFilePath: "./modules/module-one/pom.xml", + pomContentString: ` + + 4.0.0 + com.example + example-project-module-one + 1.0.0 + + com.example + example-project-parent + 1.0.0 + ../pom.xml + + + + + junit + junit + 4.13.0 + test + + + + + + org.springframework + spring-core + compile + + + junit + junit + test + + + + `, + }, + }, + }, + { + name: "Scope not set in leaf pom", + testPoms: []testPom{ + { + pomFilePath: "./pom.xml", + pomContentString: ` + + 4.0.0 + com.example + example-project-grandparent + 1.0.0 + pom + + + + org.springframework.boot + spring-boot-dependencies + 3.0.0 + pom + import + + + + + `, + }, + { + pomFilePath: "./modules/pom.xml", + pomContentString: ` + + 4.0.0 + com.example + example-project-parent + 1.0.0 + pom + + com.example + example-project-grandparent + 1.0.0 + ../pom.xml + + + `, + }, + { + pomFilePath: "./modules/module-one/pom.xml", + pomContentString: ` + + 4.0.0 + com.example + example-project-module-one + 1.0.0 + + com.example + example-project-parent + 1.0.0 + ../pom.xml + + + + org.springframework + spring-core + + + junit + junit + test + + + + `, + }, + }, + }, + { + name: "Set spring-boot-maven-plugin in grandparent", + testPoms: []testPom{ + { + pomFilePath: "./pom.xml", + pomContentString: ` + + 4.0.0 + com.example + example-project-grandparent + 1.0.0 + pom + + 3.3.5 + + + + + org.springframework.boot + spring-boot-dependencies + ${version.spring.boot} + pom + import + + + + + + + org.springframework.boot + spring-boot-maven-plugin + ${version.spring.boot} + + + + repackage + + + + + + + + `, + }, + { + pomFilePath: "./modules/pom.xml", + pomContentString: ` + + 4.0.0 + com.example + example-project-parent + 1.0.0 + pom + + com.example + example-project-grandparent + 1.0.0 + ../pom.xml + + + `, + }, + { + pomFilePath: "./modules/module-one/pom.xml", + pomContentString: ` + + 4.0.0 + com.example + example-project-module-one + 1.0.0 + + com.example + example-project-parent + 1.0.0 + ../pom.xml + + + + org.springframework + spring-core + + + junit + junit + test + + + + `, + }, + }, + }, + { + name: "Set profiles and set activeByDefault = true", + testPoms: []testPom{ + { + pomFilePath: "./pom.xml", + pomContentString: ` + + 4.0.0 + + + org.springframework.boot + spring-boot-starter-parent + 3.2.3 + + + com.example + example-project + 1.0.0 + + + 2023.0.0 + + + + + + org.springframework.cloud + spring-cloud-dependencies + ${spring-cloud.version} + pom + import + + + + + + + default + + true + + + + org.springframework.cloud + spring-cloud-starter-netflix-eureka-client + + + + + + `, + }, + }, + }, + { + name: "Set profiles and set activeByDefault = false", + testPoms: []testPom{ + { + pomFilePath: "./pom.xml", + pomContentString: ` + + 4.0.0 + + + org.springframework.boot + spring-boot-starter-parent + 3.2.3 + + + com.example + example-project + 1.0.0 + + + 2023.0.0 + + + + + + org.springframework.cloud + spring-cloud-dependencies + ${spring-cloud.version} + pom + import + + + + + + + default + + false + + + + org.springframework.cloud + spring-cloud-starter-netflix-eureka-client + + + + + + `, + }, + }, + }, + { + name: "Override properties in profile", + testPoms: []testPom{ + { + pomFilePath: "./pom.xml", + pomContentString: ` + + 4.0.0 + + + org.springframework.boot + spring-boot-starter-parent + 3.2.3 + + + com.example + example-project + 1.0.0 + + + 2023.0.0 + + + + + + org.springframework.cloud + spring-cloud-dependencies + ${spring-cloud.version} + pom + import + + + + + + + default + + true + + + 2023.0.4 + + + + org.springframework.cloud + spring-cloud-starter-netflix-eureka-client + + + + + + `, + }, + }, + }, + { + name: "Add build section in profile", + testPoms: []testPom{ + { + pomFilePath: "./pom.xml", + pomContentString: ` + + 4.0.0 + + + org.springframework.boot + spring-boot-starter-parent + 3.3.5 + + + com.example + example-project + 1.0.0 + + + + default + + true + + + + + org.springframework.boot + spring-boot-maven-plugin + 3.3.5 + + + + repackage + + + + + + + + + + `, + }, + }, + }, + { + name: "Add dependencyManagement section in profile", + testPoms: []testPom{ + { + pomFilePath: "./pom.xml", + pomContentString: ` + + 4.0.0 + + com.example + example-project + 1.0.0 + + + + + org.springframework.boot + spring-boot-dependencies + 3.0.0 + pom + import + + + + + + org.springframework + spring-core + compile + + + junit + junit + test + + + + + + default + + true + + + + + org.springframework + spring-core + 5.3.8 + compile + + + junit + junit + 4.13.2 + test + + + + + + + `, + }, + }, + }, + { + name: "Add dependencyManagement and dependencies section in profile", + testPoms: []testPom{ + { + pomFilePath: "./pom.xml", + pomContentString: ` + + 4.0.0 + + com.example + example-project + 1.0.0 + + + + + org.springframework.boot + spring-boot-dependencies + 3.0.0 + pom + import + + + + + + + default + + true + + + + + org.springframework + spring-core + 5.3.8 + compile + + + junit + junit + 4.13.2 + test + + + + + + org.springframework + spring-core + compile + + + junit + junit + test + + + + + + `, + }, + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + workingDir, err := prepareTestPomFiles(tt.testPoms) + if err != nil { + t.Fatalf("%v", err) + } + for _, testPom := range tt.testPoms { + pomFilePath := filepath.Join(workingDir, testPom.pomFilePath) + effectivePom, err := createEffectivePom(context.TODO(), maven.NewCli(exec.NewCommandRunner(nil)), + pomFilePath) + if err != nil { + t.Fatalf("%v", err) + } + simulatedEffectivePom, err := createSimulatedEffectivePom(pomFilePath) + if err != nil { + t.Fatalf("%v", err) + } + if !reflect.DeepEqual(effectivePom.Dependencies, simulatedEffectivePom.Dependencies) { + t.Fatalf("\neffectivePom.Dependencies: %s\nsimulatedEffectivePom.Dependencies: %s", + effectivePom.Dependencies, simulatedEffectivePom.Dependencies) + } + removeDefaultMavenPluginsInEffectivePom(&effectivePom) + if !reflect.DeepEqual(effectivePom.Build.Plugins, simulatedEffectivePom.Build.Plugins) { + t.Fatalf("\neffectivePom.Build.Plugins: %s\nsimulatedEffectivePom.Build.Plugins: %s", + effectivePom.Build.Plugins, simulatedEffectivePom.Build.Plugins) + } + } + }) + } +} + +func removeDefaultMavenPluginsInEffectivePom(effectivePom *pom) { + var newPlugins []plugin + for _, plugin := range effectivePom.Build.Plugins { + if strings.HasPrefix(plugin.ArtifactId, "maven-") && + strings.HasSuffix(plugin.ArtifactId, "-plugin") { + continue + } + newPlugins = append(newPlugins, plugin) + } + effectivePom.Build.Plugins = newPlugins +} + +type testPom struct { + pomFilePath string + pomContentString string +} + +func prepareTestPomFiles(testPoms []testPom) (string, error) { + tempDir, err := os.MkdirTemp("", "prepareTestPomFiles") + if err != nil { + return "", err + } + for _, testPom := range testPoms { + pomPath := filepath.Join(tempDir, testPom.pomFilePath) + err := os.MkdirAll(filepath.Dir(pomPath), 0755) + if err != nil { + return "", err + } + err = os.WriteFile(pomPath, []byte(testPom.pomContentString), 0600) + if err != nil { + return "", err + } + } + return tempDir, nil +} diff --git a/cli/azd/internal/appdetect/spring_boot.go b/cli/azd/internal/appdetect/spring_boot.go new file mode 100644 index 00000000000..58250650881 --- /dev/null +++ b/cli/azd/internal/appdetect/spring_boot.go @@ -0,0 +1,719 @@ +package appdetect + +import ( + "fmt" + "log" + "maps" + "regexp" + "slices" + "strconv" + "strings" +) + +type SpringBootProject struct { + applicationProperties map[string]string + pom pom +} + +const UnknownSpringBootVersion string = "unknownSpringBootVersion" + +type DatabaseDependencyRule struct { + databaseDep DatabaseDep + mavenDependencies []MavenDependency +} + +type MavenDependency struct { + groupId string + artifactId string +} + +var databaseDependencyRules = []DatabaseDependencyRule{ + { + databaseDep: DbPostgres, + mavenDependencies: []MavenDependency{ + { + groupId: "org.postgresql", + artifactId: "postgresql", + }, + { + groupId: "com.azure.spring", + artifactId: "spring-cloud-azure-starter-jdbc-postgresql", + }, + }, + }, + { + databaseDep: DbMySql, + mavenDependencies: []MavenDependency{ + { + groupId: "com.mysql", + artifactId: "mysql-connector-j", + }, + { + groupId: "com.azure.spring", + artifactId: "spring-cloud-azure-starter-jdbc-mysql", + }, + }, + }, + { + databaseDep: DbRedis, + mavenDependencies: []MavenDependency{ + { + groupId: "org.springframework.boot", + artifactId: "spring-boot-starter-data-redis", + }, + { + groupId: "org.springframework.boot", + artifactId: "spring-boot-starter-data-redis-reactive", + }, + }, + }, + { + databaseDep: DbMongo, + mavenDependencies: []MavenDependency{ + { + groupId: "org.springframework.boot", + artifactId: "spring-boot-starter-data-mongodb", + }, + { + groupId: "org.springframework.boot", + artifactId: "spring-boot-starter-data-mongodb-reactive", + }, + }, + }, + { + databaseDep: DbCosmos, + mavenDependencies: []MavenDependency{ + { + groupId: "com.azure.spring", + artifactId: "spring-cloud-azure-starter-data-cosmos", + }, + }, + }, +} + +func detectAzureDependenciesByAnalyzingSpringBootProject(mavenProject mavenProject, azdProject *Project) { + pom := mavenProject.pom + if !isSpringBootApplication(pom) { + log.Printf("Skip analyzing spring boot project. pomFilePath = %s.", pom.pomFilePath) + return + } + var springBootProject = SpringBootProject{ + applicationProperties: readProperties(azdProject.Path), + pom: pom, + } + detectDatabases(azdProject, &springBootProject) + detectServiceBus(azdProject, &springBootProject) + detectEventHubs(azdProject, &springBootProject) + detectStorageAccount(azdProject, &springBootProject) + detectMetadata(azdProject, &springBootProject) + detectSpringFrontend(azdProject, &springBootProject) +} + +func detectSpringFrontend(azdProject *Project, springBootProject *SpringBootProject) { + for _, p := range springBootProject.pom.Build.Plugins { + if p.GroupId == "com.github.eirslett" && p.ArtifactId == "frontend-maven-plugin" { + azdProject.Dependencies = append(azdProject.Dependencies, SpringFrontend) + break + } + } +} + +func detectDatabases(azdProject *Project, springBootProject *SpringBootProject) { + databaseDepMap := map[DatabaseDep]struct{}{} + for _, rule := range databaseDependencyRules { + for _, targetDependency := range rule.mavenDependencies { + var targetGroupId = targetDependency.groupId + var targetArtifactId = targetDependency.artifactId + if hasDependency(springBootProject, targetGroupId, targetArtifactId) { + databaseDepMap[rule.databaseDep] = struct{}{} + logServiceAddedAccordingToMavenDependency(rule.databaseDep.Display(), + targetGroupId, targetArtifactId) + break + } + } + } + if len(databaseDepMap) > 0 { + azdProject.DatabaseDeps = slices.SortedFunc(maps.Keys(databaseDepMap), + func(a, b DatabaseDep) int { + return strings.Compare(string(a), string(b)) + }) + } +} + +func detectServiceBus(azdProject *Project, springBootProject *SpringBootProject) { + // we need to figure out multiple projects are using the same service bus + detectServiceBusAccordingToJMSMavenDependency(azdProject, springBootProject) + detectServiceBusAccordingToSpringCloudStreamBinderMavenDependency(azdProject, springBootProject) +} + +func detectServiceBusAccordingToJMSMavenDependency(azdProject *Project, springBootProject *SpringBootProject) { + var targetGroupId = "com.azure.spring" + var targetArtifactId = "spring-cloud-azure-starter-servicebus-jms" + if hasDependency(springBootProject, targetGroupId, targetArtifactId) { + newDependency := AzureDepServiceBus{ + IsJms: true, + } + azdProject.AzureDeps = append(azdProject.AzureDeps, newDependency) + logServiceAddedAccordingToMavenDependency(newDependency.ResourceDisplay(), targetGroupId, targetArtifactId) + } +} + +func detectServiceBusAccordingToSpringCloudStreamBinderMavenDependency( + azdProject *Project, springBootProject *SpringBootProject) { + var targetGroupId = "com.azure.spring" + var targetArtifactId = "spring-cloud-azure-stream-binder-servicebus" + if hasDependency(springBootProject, targetGroupId, targetArtifactId) { + bindingDestinations := getBindingDestinationMap(springBootProject.applicationProperties) + var destinations = DistinctValues(bindingDestinations) + newDep := AzureDepServiceBus{ + Queues: destinations, + IsJms: false, + } + azdProject.AzureDeps = append(azdProject.AzureDeps, newDep) + logServiceAddedAccordingToMavenDependency(newDep.ResourceDisplay(), targetGroupId, targetArtifactId) + for bindingName, destination := range bindingDestinations { + log.Printf(" Detected Service Bus queue [%s] for binding [%s] by analyzing property file.", + destination, bindingName) + } + } +} + +func detectEventHubs(azdProject *Project, springBootProject *SpringBootProject) { + // we need to figure out multiple projects are using the same event hub + detectEventHubsAccordingToSpringCloudStreamBinderMavenDependency(azdProject, springBootProject) + detectEventHubsAccordingToSpringCloudEventhubsStarterMavenDependency(azdProject, springBootProject) + detectEventHubsAccordingToSpringIntegrationEventhubsMavenDependency(azdProject, springBootProject) + detectEventHubsAccordingToSpringMessagingEventhubsMavenDependency(azdProject, springBootProject) + detectEventHubsAccordingToSpringCloudStreamKafkaMavenDependency(azdProject, springBootProject) + detectEventHubsAccordingToSpringKafkaMavenDependency(azdProject, springBootProject) +} + +func detectEventHubsAccordingToSpringCloudStreamBinderMavenDependency( + azdProject *Project, springBootProject *SpringBootProject) { + var targetGroupId = "com.azure.spring" + var targetArtifactId = "spring-cloud-azure-stream-binder-eventhubs" + if hasDependency(springBootProject, targetGroupId, targetArtifactId) { + bindingDestinations := getBindingDestinationMap(springBootProject.applicationProperties) + newDep := AzureDepEventHubs{ + EventHubsNamePropertyMap: bindingDestinations, + DependencyTypes: []DependencyType{SpringCloudStreamEventHubs}, + } + addAzureDepEventHubsIntoProject(azdProject, newDep) + logServiceAddedAccordingToMavenDependency(newDep.ResourceDisplay(), targetGroupId, targetArtifactId) + for bindingName, destination := range bindingDestinations { + log.Printf(" Detected Event Hub [%s] for binding [%s] by analyzing property file.", + destination, bindingName) + } + } +} + +func detectEventHubsAccordingToSpringCloudEventhubsStarterMavenDependency( + azdProject *Project, springBootProject *SpringBootProject) { + var targetGroupId = "com.azure.spring" + var targetArtifactId = "spring-cloud-azure-starter-eventhubs" + // event-hub-name can be specified in different levels, see + // https://learn.microsoft.com/azure/developer/java/spring-framework/configuration-properties-azure-event-hubs + var targetPropertyNames = []string{ + "spring.cloud.azure.eventhubs.event-hub-name", + "spring.cloud.azure.eventhubs.producer.event-hub-name", + "spring.cloud.azure.eventhubs.consumer.event-hub-name", + "spring.cloud.azure.eventhubs.processor.event-hub-name", + } + if hasDependency(springBootProject, targetGroupId, targetArtifactId) { + eventHubsNamePropertyMap := map[string]string{} + for _, propertyName := range targetPropertyNames { + if propertyValue, ok := springBootProject.applicationProperties[propertyName]; ok { + eventHubsNamePropertyMap[propertyName] = propertyValue + } + } + newDep := AzureDepEventHubs{ + EventHubsNamePropertyMap: eventHubsNamePropertyMap, + DependencyTypes: []DependencyType{SpringCloudEventHubsStarter}, + } + addAzureDepEventHubsIntoProject(azdProject, newDep) + logServiceAddedAccordingToMavenDependency(newDep.ResourceDisplay(), targetGroupId, targetArtifactId) + for property, name := range eventHubsNamePropertyMap { + log.Printf(" Detected Event Hub [%s] for [%s] by analyzing property file.", property, name) + } + } +} + +func detectEventHubsAccordingToSpringIntegrationEventhubsMavenDependency( + azdProject *Project, springBootProject *SpringBootProject) { + var targetGroupId = "com.azure.spring" + var targetArtifactId = "spring-cloud-azure-starter-integration-eventhubs" + if hasDependency(springBootProject, targetGroupId, targetArtifactId) { + newDep := AzureDepEventHubs{ + // eventhubs name is empty here because no configured property + EventHubsNamePropertyMap: map[string]string{}, + DependencyTypes: []DependencyType{SpringIntegrationEventHubs}, + } + addAzureDepEventHubsIntoProject(azdProject, newDep) + logServiceAddedAccordingToMavenDependency(newDep.ResourceDisplay(), targetGroupId, targetArtifactId) + } +} + +func detectEventHubsAccordingToSpringMessagingEventhubsMavenDependency( + azdProject *Project, springBootProject *SpringBootProject) { + var targetGroupId = "com.azure.spring" + var targetArtifactId = "spring-messaging-azure-eventhubs" + if hasDependency(springBootProject, targetGroupId, targetArtifactId) { + newDep := AzureDepEventHubs{ + // eventhubs name is empty here because no configured property + EventHubsNamePropertyMap: map[string]string{}, + DependencyTypes: []DependencyType{SpringMessagingEventHubs}, + } + addAzureDepEventHubsIntoProject(azdProject, newDep) + logServiceAddedAccordingToMavenDependency(newDep.ResourceDisplay(), targetGroupId, targetArtifactId) + } +} + +func detectEventHubsAccordingToSpringCloudStreamKafkaMavenDependency( + azdProject *Project, springBootProject *SpringBootProject) { + var targetGroupId = "org.springframework.cloud" + var targetArtifactId = "spring-cloud-starter-stream-kafka" + if hasDependency(springBootProject, targetGroupId, targetArtifactId) { + bindingDestinations := getBindingDestinationMap(springBootProject.applicationProperties) + newDep := AzureDepEventHubs{ + EventHubsNamePropertyMap: bindingDestinations, + SpringBootVersion: detectSpringBootVersion(springBootProject.pom), + DependencyTypes: []DependencyType{SpringCloudStreamKafka}, + } + addAzureDepEventHubsIntoProject(azdProject, newDep) + logServiceAddedAccordingToMavenDependency(newDep.ResourceDisplay(), targetGroupId, targetArtifactId) + for bindingName, destination := range bindingDestinations { + log.Printf(" Detected Kafka Topic [%s] for binding [%s] by analyzing property file.", + destination, bindingName) + } + } +} + +func detectEventHubsAccordingToSpringKafkaMavenDependency(azdProject *Project, springBootProject *SpringBootProject) { + var targetGroupId = "org.springframework.kafka" + var targetArtifactId = "spring-kafka" + if hasDependency(springBootProject, targetGroupId, targetArtifactId) { + newDep := AzureDepEventHubs{ + // eventhubs name is empty here because no configured property + EventHubsNamePropertyMap: map[string]string{}, + SpringBootVersion: detectSpringBootVersion(springBootProject.pom), + DependencyTypes: []DependencyType{SpringKafka}, + } + addAzureDepEventHubsIntoProject(azdProject, newDep) + logServiceAddedAccordingToMavenDependency(newDep.ResourceDisplay(), targetGroupId, targetArtifactId) + } +} + +func addAzureDepEventHubsIntoProject( + azdProject *Project, + newDep AzureDepEventHubs) { + for index, azureDep := range azdProject.AzureDeps { + if azureDep, ok := azureDep.(AzureDepEventHubs); ok { + // already have existing dependency + for property, eventHubsName := range newDep.EventHubsNamePropertyMap { + azureDep.EventHubsNamePropertyMap[property] = eventHubsName + } + azureDep.DependencyTypes = append(azureDep.DependencyTypes, newDep.DependencyTypes...) + azureDep.SpringBootVersion = newDep.SpringBootVersion + azdProject.AzureDeps[index] = azureDep + return + } + } + + // add new dependency + azdProject.AzureDeps = append(azdProject.AzureDeps, newDep) +} + +func detectStorageAccount(azdProject *Project, springBootProject *SpringBootProject) { + detectStorageAccountAccordingToSpringCloudStreamBinderMavenDependencyAndProperty(azdProject, springBootProject) + detectStorageAccountAccordingToSpringIntegrationEventhubsMavenDependencyAndProperty(azdProject, springBootProject) + detectStorageAccountAccordingToSpringMessagingEventhubsMavenDependencyAndProperty(azdProject, springBootProject) +} + +func detectStorageAccountAccordingToSpringCloudStreamBinderMavenDependencyAndProperty( + azdProject *Project, springBootProject *SpringBootProject) { + var targetGroupId = "com.azure.spring" + var targetArtifactId = "spring-cloud-azure-stream-binder-eventhubs" + var targetPropertyNameSuffix = "spring.cloud.azure.eventhubs.processor.checkpoint-store.container-name" + if hasDependency(springBootProject, targetGroupId, targetArtifactId) { + bindingDestinations := getBindingDestinationMap(springBootProject.applicationProperties) + containsInBindingName := "" + for bindingName := range bindingDestinations { + if strings.Contains(bindingName, "-in-") { // Example: consume-in-0 + containsInBindingName = bindingName + break + } + } + if containsInBindingName != "" { + detectStorageAccountAccordingToProperty(azdProject, springBootProject.applicationProperties, + targetGroupId, targetArtifactId, targetPropertyNameSuffix, + "binding name ["+containsInBindingName+"] contains '-in-'") + } + } +} + +func detectStorageAccountAccordingToSpringIntegrationEventhubsMavenDependencyAndProperty( + azdProject *Project, springBootProject *SpringBootProject) { + var targetGroupId = "com.azure.spring" + var targetArtifactId = "spring-cloud-azure-starter-integration-eventhubs" + var targetPropertyNameSuffix = "spring.cloud.azure.eventhubs.processor.checkpoint-store.container-name" + if hasDependency(springBootProject, targetGroupId, targetArtifactId) { + detectStorageAccountAccordingToProperty(azdProject, springBootProject.applicationProperties, + targetGroupId, targetArtifactId, targetPropertyNameSuffix, "") + } +} + +func detectStorageAccountAccordingToSpringMessagingEventhubsMavenDependencyAndProperty( + azdProject *Project, springBootProject *SpringBootProject) { + var targetGroupId = "com.azure.spring" + var targetArtifactId = "spring-messaging-azure-eventhubs" + var targetPropertyNameSuffix = "spring.cloud.azure.eventhubs.processor.checkpoint-store.container-name" + if hasDependency(springBootProject, targetGroupId, targetArtifactId) { + detectStorageAccountAccordingToProperty(azdProject, springBootProject.applicationProperties, + targetGroupId, targetArtifactId, targetPropertyNameSuffix, "") + } +} + +func detectStorageAccountAccordingToProperty(azdProject *Project, applicationProperties map[string]string, + targetGroupId string, targetArtifactId string, targetPropertyNameSuffix string, extraCondition string) { + containerNamePropertyMap := make(map[string]string) + for key, value := range applicationProperties { + if strings.HasSuffix(key, targetPropertyNameSuffix) { + containerNamePropertyMap[key] = value + } + } + if len(containerNamePropertyMap) > 0 { + newDep := AzureDepStorageAccount{ + ContainerNamePropertyMap: containerNamePropertyMap, + } + azdProject.AzureDeps = append(azdProject.AzureDeps, newDep) + logServiceAddedAccordingToMavenDependencyAndExtraCondition(newDep.ResourceDisplay(), targetGroupId, + targetArtifactId, extraCondition) + for property, containerName := range containerNamePropertyMap { + log.Printf(" Detected Storage container name: [%s] for [%s] by analyzing property file.", + containerName, property) + } + } +} + +func detectMetadata(azdProject *Project, springBootProject *SpringBootProject) { + detectPropertySpringApplicationName(azdProject, springBootProject) + detectPropertyServerPort(azdProject, springBootProject) + detectPropertySpringCloudAzureCosmosDatabase(azdProject, springBootProject) + detectPropertySpringDataMongodbDatabase(azdProject, springBootProject) + detectPropertySpringDataMongodbUri(azdProject, springBootProject) + detectPropertySpringDatasourceUrl(azdProject, springBootProject) + + detectDependencySpringCloudAzureStarter(azdProject, springBootProject) + detectDependencySpringCloudAzureStarterJdbcMysql(azdProject, springBootProject) + detectDependencySpringCloudAzureStarterJdbcPostgresql(azdProject, springBootProject) + detectDependencySpringCloudConfig(azdProject, springBootProject) + detectDependencySpringCloudEureka(azdProject, springBootProject) + detectDependencyAboutEmbeddedWebServer(azdProject, springBootProject) +} + +func detectPropertySpringCloudAzureCosmosDatabase(azdProject *Project, springBootProject *SpringBootProject) { + var targetPropertyName = "spring.cloud.azure.cosmos.database" + propertyValue, ok := springBootProject.applicationProperties[targetPropertyName] + if !ok { + log.Printf("%s property not exist in project. Path = %s", targetPropertyName, azdProject.Path) + return + } + databaseName := "" + if IsValidDatabaseName(propertyValue) { + databaseName = propertyValue + } else { + return + } + if azdProject.Metadata.DatabaseNameInPropertySpringDatasourceUrl == nil { + azdProject.Metadata.DatabaseNameInPropertySpringDatasourceUrl = map[DatabaseDep]string{} + } + if azdProject.Metadata.DatabaseNameInPropertySpringDatasourceUrl[DbCosmos] == "" { + // spring.data.mongodb.database has lower priority than spring.data.mongodb.uri + azdProject.Metadata.DatabaseNameInPropertySpringDatasourceUrl[DbCosmos] = databaseName + } +} + +func detectPropertySpringDatasourceUrl(azdProject *Project, springBootProject *SpringBootProject) { + var targetPropertyName = "spring.datasource.url" + propertyValue, ok := springBootProject.applicationProperties[targetPropertyName] + if !ok { + log.Printf("%s property not exist in project. Path = %s", targetPropertyName, azdProject.Path) + return + } + databaseName := getDatabaseName(propertyValue) + if databaseName == "" { + log.Printf("can not get database name from property: %s", targetPropertyName) + return + } + if azdProject.Metadata.DatabaseNameInPropertySpringDatasourceUrl == nil { + azdProject.Metadata.DatabaseNameInPropertySpringDatasourceUrl = map[DatabaseDep]string{} + } + if strings.HasPrefix(propertyValue, "jdbc:postgresql") { + azdProject.Metadata.DatabaseNameInPropertySpringDatasourceUrl[DbPostgres] = databaseName + } else if strings.HasPrefix(propertyValue, "jdbc:mysql") { + azdProject.Metadata.DatabaseNameInPropertySpringDatasourceUrl[DbMySql] = databaseName + } +} + +func detectPropertySpringDataMongodbUri(azdProject *Project, springBootProject *SpringBootProject) { + var targetPropertyName = "spring.data.mongodb.uri" + propertyValue, ok := springBootProject.applicationProperties[targetPropertyName] + if !ok { + log.Printf("%s property not exist in project. Path = %s", targetPropertyName, azdProject.Path) + return + } + databaseName := getDatabaseName(propertyValue) + if databaseName == "" { + log.Printf("can not get database name from property: %s", targetPropertyName) + return + } + if azdProject.Metadata.DatabaseNameInPropertySpringDatasourceUrl == nil { + azdProject.Metadata.DatabaseNameInPropertySpringDatasourceUrl = map[DatabaseDep]string{} + } + azdProject.Metadata.DatabaseNameInPropertySpringDatasourceUrl[DbMongo] = databaseName +} + +func detectPropertySpringDataMongodbDatabase(azdProject *Project, springBootProject *SpringBootProject) { + var targetPropertyName = "spring.data.mongodb.database" + propertyValue, ok := springBootProject.applicationProperties[targetPropertyName] + if !ok { + log.Printf("%s property not exist in project. Path = %s", targetPropertyName, azdProject.Path) + return + } + databaseName := "" + if IsValidDatabaseName(propertyValue) { + databaseName = propertyValue + } else { + return + } + if azdProject.Metadata.DatabaseNameInPropertySpringDatasourceUrl == nil { + azdProject.Metadata.DatabaseNameInPropertySpringDatasourceUrl = map[DatabaseDep]string{} + } + if azdProject.Metadata.DatabaseNameInPropertySpringDatasourceUrl[DbMongo] == "" { + // spring.data.mongodb.database has lower priority than spring.data.mongodb.uri + azdProject.Metadata.DatabaseNameInPropertySpringDatasourceUrl[DbMongo] = databaseName + } +} + +func getDatabaseName(datasourceURL string) string { + lastSlashIndex := strings.LastIndex(datasourceURL, "/") + if lastSlashIndex == -1 { + return "" + } + result := datasourceURL[lastSlashIndex+1:] + if idx := strings.Index(result, "?"); idx != -1 { + result = result[:idx] + } + if IsValidDatabaseName(result) { + return result + } + return "" +} + +func IsValidDatabaseName(name string) bool { + if len(name) < 3 || len(name) > 63 { + return false + } + re := regexp.MustCompile(`^[a-z0-9]+(-[a-z0-9]+)*$`) + return re.MatchString(name) +} + +func detectDependencySpringCloudAzureStarter(azdProject *Project, springBootProject *SpringBootProject) { + var targetGroupId = "com.azure.spring" + var targetArtifactId = "spring-cloud-azure-starter" + if hasDependency(springBootProject, targetGroupId, targetArtifactId) { + azdProject.Metadata.ContainsDependencySpringCloudAzureStarter = true + logMetadataUpdated("ContainsDependencySpringCloudAzureStarter = true") + } +} + +func detectDependencySpringCloudAzureStarterJdbcPostgresql(azdProject *Project, springBootProject *SpringBootProject) { + var targetGroupId = "com.azure.spring" + var targetArtifactId = "spring-cloud-azure-starter-jdbc-postgresql" + if hasDependency(springBootProject, targetGroupId, targetArtifactId) { + azdProject.Metadata.ContainsDependencySpringCloudAzureStarterJdbcPostgresql = true + logMetadataUpdated("ContainsDependencySpringCloudAzureStarterJdbcPostgresql = true") + } +} + +func detectDependencySpringCloudAzureStarterJdbcMysql(azdProject *Project, springBootProject *SpringBootProject) { + var targetGroupId = "com.azure.spring" + var targetArtifactId = "spring-cloud-azure-starter-jdbc-mysql" + if hasDependency(springBootProject, targetGroupId, targetArtifactId) { + azdProject.Metadata.ContainsDependencySpringCloudAzureStarterJdbcMysql = true + logMetadataUpdated("ContainsDependencySpringCloudAzureStarterJdbcMysql = true") + } +} + +func detectPropertySpringApplicationName(azdProject *Project, springBootProject *SpringBootProject) { + var targetPropertyName = "spring.application.name" + if appName, ok := springBootProject.applicationProperties[targetPropertyName]; ok { + azdProject.Metadata.ApplicationName = appName + } +} + +func detectPropertyServerPort(azdProject *Project, springBootProject *SpringBootProject) { + var targetPropertyName = "server.port" + if serverPort, ok := springBootProject.applicationProperties[targetPropertyName]; ok { + if port, err := strconv.Atoi(serverPort); err == nil { + azdProject.Metadata.ServerPort = port + } else { + log.Printf("Failed to convert the value of server.port to int. %v.", err) + } + } +} + +func detectDependencySpringCloudEureka(azdProject *Project, springBootProject *SpringBootProject) { + var targetGroupId = "org.springframework.cloud" + var targetArtifactId = "spring-cloud-starter-netflix-eureka-server" + if hasDependency(springBootProject, targetGroupId, targetArtifactId) { + azdProject.Metadata.ContainsDependencySpringCloudEurekaServer = true + logMetadataUpdated("ContainsDependencySpringCloudEurekaServer = true") + } + + targetGroupId = "org.springframework.cloud" + targetArtifactId = "spring-cloud-starter-netflix-eureka-client" + if hasDependency(springBootProject, targetGroupId, targetArtifactId) { + azdProject.Metadata.ContainsDependencySpringCloudEurekaClient = true + logMetadataUpdated("ContainsDependencySpringCloudEurekaClient = true") + } +} + +func detectDependencySpringCloudConfig(azdProject *Project, springBootProject *SpringBootProject) { + var targetGroupId = "org.springframework.cloud" + var targetArtifactId = "spring-cloud-config-server" + if hasDependency(springBootProject, targetGroupId, targetArtifactId) { + azdProject.Metadata.ContainsDependencySpringCloudConfigServer = true + logMetadataUpdated("ContainsDependencySpringCloudConfigServer = true") + } + + targetGroupId = "org.springframework.cloud" + targetArtifactId = "spring-cloud-starter-config" + if hasDependency(springBootProject, targetGroupId, targetArtifactId) { + azdProject.Metadata.ContainsDependencySpringCloudConfigClient = true + logMetadataUpdated("ContainsDependencySpringCloudConfigClient = true") + } +} + +func detectDependencyAboutEmbeddedWebServer(azdProject *Project, springBootProject *SpringBootProject) { + var targetGroupId = "org.springframework.boot" + var targetArtifactIds = []string{ + "spring-boot-starter-web", + "spring-boot-starter-webflux", + "spring-boot-starter-tomcat", + "spring-boot-starter-jetty", + "spring-boot-starter-undertow", + "spring-boot-starter-reactor-netty", + } + for _, targetArtifactId := range targetArtifactIds { + if hasDependency(springBootProject, targetGroupId, targetArtifactId) { + azdProject.Metadata.ContainsDependencyAboutEmbeddedWebServer = true + logMetadataUpdated("ContainsDependencyAboutEmbeddedWebServer = true") + return + } + } +} + +func logServiceAddedAccordingToMavenDependency(resourceName, groupId string, artifactId string) { + logServiceAddedAccordingToMavenDependencyAndExtraCondition(resourceName, groupId, artifactId, "") +} + +func logServiceAddedAccordingToMavenDependencyAndExtraCondition( + resourceName, groupId string, artifactId string, extraCondition string) { + insertedString := "" + extraCondition = strings.TrimSpace(extraCondition) + if extraCondition != "" { + insertedString = " and " + extraCondition + } + log.Printf("Detected '%s' because found dependency '%s:%s' in pom.xml file%s.", + resourceName, groupId, artifactId, insertedString) +} + +func logMetadataUpdated(info string) { + log.Printf("Metadata updated. %s.", info) +} + +func detectSpringBootVersion(pom pom) string { + for _, dep := range pom.Dependencies { + if dep.GroupId == "org.springframework.boot" { + return dep.Version + } + } + for _, dep := range pom.Build.Plugins { + if dep.GroupId == "org.springframework.boot" { + return dep.Version + } + } + return UnknownSpringBootVersion +} + +func isSpringBootApplication(pom pom) bool { + for _, dep := range pom.Dependencies { + if dep.GroupId == "org.springframework.boot" { + return true + } + } + for _, dep := range pom.Build.Plugins { + if dep.GroupId == "org.springframework.boot" { + return true + } + } + for _, dep := range pom.Build.Plugins { + if dep.GroupId == "org.springframework.boot" && + dep.ArtifactId == "spring-boot-maven-plugin" { + return true + } + } + return false +} + +// isSpringBootRunnableProject checks if the pom indicates a runnable Spring Boot project +func isSpringBootRunnableProject(project mavenProject) bool { + targetGroupId := "org.springframework.boot" + targetArtifactId := "spring-boot-maven-plugin" + for _, plugin := range project.pom.Build.Plugins { + if plugin.GroupId == targetGroupId && plugin.ArtifactId == targetArtifactId { + return true + } + } + return false +} + +func DistinctValues(input map[string]string) []string { + valueSet := make(map[string]struct{}) + for _, value := range input { + valueSet[value] = struct{}{} + } + + var result []string + for value := range valueSet { + result = append(result, value) + } + + return result +} + +// Function to find all properties that match the pattern `spring.cloud.stream.bindings..destination` +func getBindingDestinationMap(properties map[string]string) map[string]string { + result := make(map[string]string) + + // Iterate through the properties map and look for matching keys + for key, value := range properties { + // Check if the key matches the pattern `spring.cloud.stream.bindings..destination` + if strings.HasPrefix(key, "spring.cloud.stream.bindings.") && strings.HasSuffix(key, ".destination") { + // Store the binding name and destination value + result[key] = fmt.Sprintf("%v", value) + } + } + + return result +} + +func hasDependency(project *SpringBootProject, groupId string, artifactId string) bool { + for _, projectDependency := range project.pom.Dependencies { + if projectDependency.GroupId == groupId && projectDependency.ArtifactId == artifactId { + return true + } + } + return false +} diff --git a/cli/azd/internal/appdetect/spring_boot_property.go b/cli/azd/internal/appdetect/spring_boot_property.go new file mode 100644 index 00000000000..d597019fc34 --- /dev/null +++ b/cli/azd/internal/appdetect/spring_boot_property.go @@ -0,0 +1,138 @@ +package appdetect + +import ( + "bufio" + "fmt" + "github.com/azure/azure-dev/cli/azd/pkg/osutil" + "github.com/braydonk/yaml" + "log" + "os" + "path/filepath" + "regexp" + "strings" +) + +func readProperties(projectPath string) map[string]string { + // todo: do we need to consider the bootstrap.properties + result := make(map[string]string) + readPropertiesInPropertiesFile(filepath.Join(projectPath, "/src/main/resources/application.properties"), result) + readPropertiesInYamlFile(filepath.Join(projectPath, "/src/main/resources/application.yml"), result) + readPropertiesInYamlFile(filepath.Join(projectPath, "/src/main/resources/application.yaml"), result) + profile, profileSet := result["spring.profiles.active"] + if profileSet { + readPropertiesInPropertiesFile( + filepath.Join(projectPath, "/src/main/resources/application-"+profile+".properties"), result) + readPropertiesInYamlFile(filepath.Join(projectPath, "/src/main/resources/application-"+profile+".yml"), result) + readPropertiesInYamlFile(filepath.Join(projectPath, "/src/main/resources/application-"+profile+".yaml"), result) + } + return result +} + +func readPropertiesInYamlFile(yamlFilePath string, result map[string]string) { + if !osutil.FileExists(yamlFilePath) { + return + } + data, err := os.ReadFile(yamlFilePath) + if err != nil { + log.Fatalf("error reading YAML file: %v", err) + return + } + + // Parse the YAML into a yaml.Node + var root yaml.Node + err = yaml.Unmarshal(data, &root) + if err != nil { + log.Fatalf("error unmarshalling YAML: %v", err) + return + } + + parseYAML("", &root, result) +} + +// Recursively parse the YAML and build dot-separated keys into a map +func parseYAML(prefix string, node *yaml.Node, result map[string]string) { + switch node.Kind { + case yaml.DocumentNode: + // Process each document's content + for _, contentNode := range node.Content { + parseYAML(prefix, contentNode, result) + } + case yaml.MappingNode: + // Process key-value pairs in a map + for i := 0; i < len(node.Content); i += 2 { + keyNode := node.Content[i] + valueNode := node.Content[i+1] + + // Ensure the key is a scalar + if keyNode.Kind != yaml.ScalarNode { + continue + } + + keyStr := keyNode.Value + newPrefix := keyStr + if prefix != "" { + newPrefix = prefix + "." + keyStr + } + parseYAML(newPrefix, valueNode, result) + } + case yaml.SequenceNode: + // Process items in a sequence (list) + for i, item := range node.Content { + newPrefix := fmt.Sprintf("%s[%d]", prefix, i) + parseYAML(newPrefix, item, result) + } + case yaml.ScalarNode: + // If it's a scalar value, add it to the result map + result[prefix] = getEnvironmentVariablePlaceholderHandledValue(node.Value) + default: + // Handle other node types if necessary + } +} + +func readPropertiesInPropertiesFile(propertiesFilePath string, result map[string]string) { + if !osutil.FileExists(propertiesFilePath) { + return + } + file, err := os.Open(propertiesFilePath) + if err != nil { + log.Fatalf("error opening properties file: %v", err) + return + } + defer file.Close() + + scanner := bufio.NewScanner(file) + for scanner.Scan() { + line := scanner.Text() + if strings.TrimSpace(line) == "" || strings.HasPrefix(line, "#") { + continue + } + parts := strings.SplitN(line, "=", 2) + if len(parts) == 2 { + key := strings.TrimSpace(parts[0]) + value := getEnvironmentVariablePlaceholderHandledValue(parts[1]) + result[key] = value + } + } +} + +var environmentVariableRegex = regexp.MustCompile(`\$\{([^:}]+)(?::([^}]+))?}`) + +func getEnvironmentVariablePlaceholderHandledValue(rawValue string) string { + trimmedRawValue := strings.TrimSpace(rawValue) + matches := environmentVariableRegex.FindAllStringSubmatch(trimmedRawValue, -1) + result := trimmedRawValue + for _, match := range matches { + if len(match) < 2 { + continue + } + envVar := match[1] + defaultValue := match[2] + value := os.Getenv(envVar) + if value == "" { + value = defaultValue + } + placeholder := match[0] + result = strings.Replace(result, placeholder, value, -1) + } + return result +} diff --git a/cli/azd/internal/appdetect/spring_boot_property_test.go b/cli/azd/internal/appdetect/spring_boot_property_test.go new file mode 100644 index 00000000000..40216c6c77d --- /dev/null +++ b/cli/azd/internal/appdetect/spring_boot_property_test.go @@ -0,0 +1,89 @@ +package appdetect + +import ( + "os" + "path/filepath" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestReadProperties(t *testing.T) { + var properties = readProperties(filepath.Join("testdata", "java-spring", "project-one")) + require.Equal(t, "", properties["not.exist"]) + require.Equal(t, "jdbc:h2:mem:testdb", properties["spring.datasource.url"]) + + properties = readProperties(filepath.Join("testdata", "java-spring", "project-two")) + require.Equal(t, "", properties["not.exist"]) + require.Equal(t, "jdbc:h2:mem:testdb", properties["spring.datasource.url"]) + + properties = readProperties(filepath.Join("testdata", "java-spring", "project-three")) + require.Equal(t, "", properties["not.exist"]) + require.Equal(t, "HTML", properties["spring.thymeleaf.mode"]) + + properties = readProperties(filepath.Join("testdata", "java-spring", "project-four")) + require.Equal(t, "", properties["not.exist"]) + require.Equal(t, "mysql", properties["database"]) +} + +func TestGetEnvironmentVariablePlaceholderHandledValue(t *testing.T) { + tests := []struct { + name string + inputValue string + environmentVariables map[string]string + expectedValue string + }{ + { + "No environment variable placeholder", + "valueOne", + map[string]string{}, + "valueOne", + }, + { + "Has invalid environment variable placeholder", + "${VALUE_ONE", + map[string]string{}, + "${VALUE_ONE", + }, + { + "Has valid environment variable placeholder, but environment variable not set", + "${VALUE_TWO}", + map[string]string{}, + "", + }, + { + "Has valid environment variable placeholder, and environment variable set", + "${VALUE_THREE}", + map[string]string{"VALUE_THREE": "valueThree"}, + "valueThree", + }, + { + "Has valid environment variable placeholder with default value, but environment variable not set", + "${VALUE_TWO:defaultValue}", + map[string]string{}, + "defaultValue", + }, + { + "Has valid environment variable placeholder with default value, and environment variable set", + "${VALUE_THREE:defaultValue}", + map[string]string{"VALUE_THREE": "valueThree"}, + "valueThree", + }, + { + "Has multiple environment variable placeholder with default value, and environment variable not set", + "jdbc:mysql://${MYSQL_HOST:localhost}:${MYSQL_PORT:3306}/${MYSQL_DATABASE:pet-clinic}", + map[string]string{}, + "jdbc:mysql://localhost:3306/pet-clinic", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + for k, v := range tt.environmentVariables { + err := os.Setenv(k, v) + require.NoError(t, err) + } + handledValue := getEnvironmentVariablePlaceholderHandledValue(tt.inputValue) + require.Equal(t, tt.expectedValue, handledValue) + }) + } +} diff --git a/cli/azd/internal/appdetect/spring_boot_test.go b/cli/azd/internal/appdetect/spring_boot_test.go new file mode 100644 index 00000000000..d3120daf12b --- /dev/null +++ b/cli/azd/internal/appdetect/spring_boot_test.go @@ -0,0 +1,170 @@ +package appdetect + +import ( + "context" + "path/filepath" + "testing" + + "github.com/azure/azure-dev/cli/azd/pkg/exec" + "github.com/azure/azure-dev/cli/azd/pkg/tools/maven" +) + +func TestGetDatabaseName(t *testing.T) { + tests := []struct { + input string + expected string + }{ + {"jdbc:postgresql://localhost:5432/your-database-name", "your-database-name"}, + {"jdbc:postgresql://remote_host:5432/your-database-name", "your-database-name"}, + {"jdbc:postgresql://your_postgresql_server:5432/your-database-name?sslmode=require", "your-database-name"}, + { + "jdbc:postgresql://your_postgresql_server.postgres.database.azure.com:5432/your-database-name?sslmode=require", + "your-database-name", + }, + { + "jdbc:postgresql://your_postgresql_server:5432/your-database-name?user=your_username&password=your_password", + "your-database-name", + }, + { + "jdbc:postgresql://your_postgresql_server.postgres.database.azure.com:5432/your-database-name" + + "?sslmode=require&spring.datasource.azure.passwordless-enabled=true", "your-database-name", + }, + } + for _, test := range tests { + result := getDatabaseName(test.input) + if result != test.expected { + t.Errorf("For input '%s', expected '%s', but got '%s'", test.input, test.expected, result) + } + } +} + +func TestIsValidDatabaseName(t *testing.T) { + tests := []struct { + name string + input string + expected bool + }{ + {"InvalidNameWithUnderscore", "invalid_name", false}, + {"TooShortName", "sh", false}, + { + "TooLongName", "this-name-is-way-too-long-to-be-considered-valid-" + + "because-it-exceeds-sixty-three-characters", false, + }, + {"InvalidStartWithHyphen", "-invalid-start", false}, + {"InvalidEndWithHyphen", "invalid-end-", false}, + {"ValidName", "valid-name", true}, + {"ValidNameWithNumbers", "valid123-name", true}, + {"ValidNameWithOnlyLetters", "valid-name", true}, + {"ValidNameWithOnlyNumbers", "123456", true}, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + result := IsValidDatabaseName(test.input) + if result != test.expected { + t.Errorf("For input '%s', expected %v, but got %v", test.input, test.expected, result) + } + }) + } +} + +func TestDetectDependencyAboutEmbeddedWebServer(t *testing.T) { + tests := []struct { + name string + testPoms []testPom + expected bool + }{ + { + name: "no web dependency", + testPoms: []testPom{ + { + pomFilePath: "pom.xml", + pomContentString: ` + + 4.0.0 + com.example + example-project + 1.0.0 + + `, + }, + }, + expected: false, + }, + { + name: "has dependency: spring-boot-starter-web", + testPoms: []testPom{ + { + pomFilePath: "pom.xml", + pomContentString: ` + + 4.0.0 + com.example + example-project + 1.0.0 + + + org.springframework.boot + spring-boot-starter-web + 3.0.0 + + + + `, + }, + }, + expected: true, + }, + { + name: "has dependency: spring-boot-starter-webflux", + testPoms: []testPom{ + { + pomFilePath: "pom.xml", + pomContentString: ` + + 4.0.0 + com.example + example-project + 1.0.0 + + + org.springframework.boot + spring-boot-starter-webflux + 3.0.0 + + + + `, + }, + }, + expected: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + workingDir, err := prepareTestPomFiles(tt.testPoms) + if err != nil { + t.Fatalf("%v", err) + } + for _, testPom := range tt.testPoms { + pomFilePath := filepath.Join(workingDir, testPom.pomFilePath) + mavenProject, err := createMavenProject(context.TODO(), maven.NewCli(exec.NewCommandRunner(nil)), + pomFilePath) + if err != nil { + t.Fatalf("%v", err) + } + project := Project{ + Language: Java, + Path: pomFilePath, + DetectionRule: "Inferred by presence of: pom.xml", + } + detectAzureDependenciesByAnalyzingSpringBootProject(mavenProject, &project) + if project.Metadata.ContainsDependencyAboutEmbeddedWebServer != tt.expected { + t.Errorf("\nExpected: %v\nActual: %v", tt.expected, + project.Metadata.ContainsDependencyAboutEmbeddedWebServer) + } + } + }) + } +} diff --git a/cli/azd/internal/appdetect/testdata/java-multi-levels/pom.xml b/cli/azd/internal/appdetect/testdata/java-multi-levels/pom.xml new file mode 100644 index 00000000000..cc38db5b250 --- /dev/null +++ b/cli/azd/internal/appdetect/testdata/java-multi-levels/pom.xml @@ -0,0 +1,47 @@ + + + 4.0.0 + + org.springframework.boot + spring-boot-starter-parent + 3.4.1 + + + com.example + multi-levels + 0.0.1-SNAPSHOT + multi-levels + multi-levels + pom + + + submodule + + + + 17 + + + + org.springframework.boot + spring-boot-starter + + + + org.springframework.boot + spring-boot-starter-test + test + + + + + + + org.springframework.boot + spring-boot-maven-plugin + + + + + diff --git a/cli/azd/internal/appdetect/testdata/java-multi-levels/submodule/notsubmodule3/pom.xml b/cli/azd/internal/appdetect/testdata/java-multi-levels/submodule/notsubmodule3/pom.xml new file mode 100644 index 00000000000..c37c3ab9ee5 --- /dev/null +++ b/cli/azd/internal/appdetect/testdata/java-multi-levels/submodule/notsubmodule3/pom.xml @@ -0,0 +1,44 @@ + + + 4.0.0 + + org.springframework.boot + spring-boot-starter-parent + 3.4.1 + + + + com.example + notsubmodule003 + 0.0.1-SNAPSHOT + notsubmodule3 + notsubmodule3 + + + 17 + + + + + org.springframework.boot + spring-boot-starter + + + + org.springframework.boot + spring-boot-starter-test + test + + + + + + + org.springframework.boot + spring-boot-maven-plugin + + + + + diff --git a/cli/azd/internal/appdetect/testdata/java-multi-levels/submodule/pom.xml b/cli/azd/internal/appdetect/testdata/java-multi-levels/submodule/pom.xml new file mode 100644 index 00000000000..2d59b3f9468 --- /dev/null +++ b/cli/azd/internal/appdetect/testdata/java-multi-levels/submodule/pom.xml @@ -0,0 +1,48 @@ + + + 4.0.0 + + com.example + multi-levels + 0.0.1-SNAPSHOT + ../pom.xml + + + submodule000 + 0.0.1-SNAPSHOT + submodule + submodule + pom + + + subsubmodule1 + subsubmodule2 + + + + 17 + + + + org.springframework.boot + spring-boot-starter + + + + org.springframework.boot + spring-boot-starter-test + test + + + + + + + org.springframework.boot + spring-boot-maven-plugin + + + + + diff --git a/cli/azd/internal/appdetect/testdata/java-multi-levels/submodule/subsubmodule1/pom.xml b/cli/azd/internal/appdetect/testdata/java-multi-levels/submodule/subsubmodule1/pom.xml new file mode 100644 index 00000000000..ab1fb784155 --- /dev/null +++ b/cli/azd/internal/appdetect/testdata/java-multi-levels/submodule/subsubmodule1/pom.xml @@ -0,0 +1,42 @@ + + + 4.0.0 + + com.example + submodule000 + 0.0.1-SNAPSHOT + ../pom.xml + + + subsubmodule001 + 0.0.1-SNAPSHOT + subsubmodule1 + subsubmodule1 + + + 17 + + + + org.springframework.boot + spring-boot-starter + + + + org.springframework.boot + spring-boot-starter-test + test + + + + + + + org.springframework.boot + spring-boot-maven-plugin + + + + + diff --git a/cli/azd/internal/appdetect/testdata/java-multi-levels/submodule/subsubmodule2/pom.xml b/cli/azd/internal/appdetect/testdata/java-multi-levels/submodule/subsubmodule2/pom.xml new file mode 100644 index 00000000000..d2034a59772 --- /dev/null +++ b/cli/azd/internal/appdetect/testdata/java-multi-levels/submodule/subsubmodule2/pom.xml @@ -0,0 +1,42 @@ + + + 4.0.0 + + org.springframework.boot + spring-boot-starter-parent + 3.4.1 + + + + subsubmodule002 + 0.0.1-SNAPSHOT + subsubmodule2 + subsubmodule2 + + + 17 + + + + org.springframework.boot + spring-boot-starter + + + + org.springframework.boot + spring-boot-starter-test + test + + + + + + + org.springframework.boot + spring-boot-maven-plugin + + + + + diff --git a/cli/azd/internal/appdetect/testdata/java-multimodules/application/pom.xml b/cli/azd/internal/appdetect/testdata/java-multimodules/application/pom.xml index e4ddaa858b5..a63cc042486 100644 --- a/cli/azd/internal/appdetect/testdata/java-multimodules/application/pom.xml +++ b/cli/azd/internal/appdetect/testdata/java-multimodules/application/pom.xml @@ -38,6 +38,16 @@ com.mysql mysql-connector-j + + + org.springframework.boot + spring-boot-starter-data-redis + + + + org.springframework.boot + spring-boot-starter-data-mongodb + org.postgresql diff --git a/cli/azd/internal/appdetect/testdata/java-spring/project-four/src/main/resources/application-mysql.properties b/cli/azd/internal/appdetect/testdata/java-spring/project-four/src/main/resources/application-mysql.properties new file mode 100644 index 00000000000..33ec21d3c95 --- /dev/null +++ b/cli/azd/internal/appdetect/testdata/java-spring/project-four/src/main/resources/application-mysql.properties @@ -0,0 +1,7 @@ +# database init, supports mysql too +database=mysql +spring.datasource.url=jdbc:mysql://${MYSQL_HOST:localhost}:${MYSQL_PORT:3306}/${MYSQL_DATABASE:petclinic} +spring.datasource.username=${MYSQL_USERNAME:petclinic} +spring.datasource.password=${MYSQL_PASSWORD:} +# SQL is written to be idempotent so this is safe +spring.sql.init.mode=always diff --git a/cli/azd/internal/appdetect/testdata/java-spring/project-four/src/main/resources/application-postgres.properties b/cli/azd/internal/appdetect/testdata/java-spring/project-four/src/main/resources/application-postgres.properties new file mode 100644 index 00000000000..7d9676e3aad --- /dev/null +++ b/cli/azd/internal/appdetect/testdata/java-spring/project-four/src/main/resources/application-postgres.properties @@ -0,0 +1,6 @@ +database=postgres +spring.datasource.url=jdbc:postgresql://${POSTGRES_HOST:localhost}:${POSTGRES_HOST:5432}/${POSTGRES_DATABASE:petclinic} +spring.datasource.username=${POSTGRES_USERNAME:petclinic} +spring.datasource.password=${POSTGRES_PASSWORD:} +# SQL is written to be idempotent so this is safe +spring.sql.init.mode=always diff --git a/cli/azd/internal/appdetect/testdata/java-spring/project-four/src/main/resources/application.properties b/cli/azd/internal/appdetect/testdata/java-spring/project-four/src/main/resources/application.properties new file mode 100644 index 00000000000..59d5368e73c --- /dev/null +++ b/cli/azd/internal/appdetect/testdata/java-spring/project-four/src/main/resources/application.properties @@ -0,0 +1,29 @@ +# database init, supports mysql too +database=h2 +spring.sql.init.schema-locations=classpath*:db/${database}/schema.sql +spring.sql.init.data-locations=classpath*:db/${database}/data.sql + +# Web +spring.thymeleaf.mode=HTML + +# JPA +spring.jpa.hibernate.ddl-auto=none +spring.jpa.open-in-view=true + +# Internationalization +spring.messages.basename=messages/messages + +spring.profiles.active=mysql + +# Actuator +management.endpoints.web.exposure.include=* + +# Logging +logging.level.org.springframework=INFO +# logging.level.org.springframework.web=DEBUG +# logging.level.org.springframework.context.annotation=TRACE + +# Maximum time static resources should be cached +spring.web.resources.cache.cachecontrol.max-age=12h + +server.port=8081 diff --git a/cli/azd/internal/appdetect/testdata/java-spring/project-one/src/main/resources/application.yml b/cli/azd/internal/appdetect/testdata/java-spring/project-one/src/main/resources/application.yml new file mode 100644 index 00000000000..09d0cc057c5 --- /dev/null +++ b/cli/azd/internal/appdetect/testdata/java-spring/project-one/src/main/resources/application.yml @@ -0,0 +1,12 @@ +spring: + datasource: + url: jdbc:h2:mem:testdb + jackson: + date-format: com.microsoft.azure.simpletodo.configuration.RFC3339DateFormat + serialization: + write-dates-as-timestamps: false + jpa: + hibernate: + ddl-auto: update + show-sql: true + diff --git a/cli/azd/internal/appdetect/testdata/java-spring/project-three/src/main/resources/application.properties b/cli/azd/internal/appdetect/testdata/java-spring/project-three/src/main/resources/application.properties new file mode 100644 index 00000000000..59d5368e73c --- /dev/null +++ b/cli/azd/internal/appdetect/testdata/java-spring/project-three/src/main/resources/application.properties @@ -0,0 +1,29 @@ +# database init, supports mysql too +database=h2 +spring.sql.init.schema-locations=classpath*:db/${database}/schema.sql +spring.sql.init.data-locations=classpath*:db/${database}/data.sql + +# Web +spring.thymeleaf.mode=HTML + +# JPA +spring.jpa.hibernate.ddl-auto=none +spring.jpa.open-in-view=true + +# Internationalization +spring.messages.basename=messages/messages + +spring.profiles.active=mysql + +# Actuator +management.endpoints.web.exposure.include=* + +# Logging +logging.level.org.springframework=INFO +# logging.level.org.springframework.web=DEBUG +# logging.level.org.springframework.context.annotation=TRACE + +# Maximum time static resources should be cached +spring.web.resources.cache.cachecontrol.max-age=12h + +server.port=8081 diff --git a/cli/azd/internal/appdetect/testdata/java-spring/project-two/src/main/resources/application.yaml b/cli/azd/internal/appdetect/testdata/java-spring/project-two/src/main/resources/application.yaml new file mode 100644 index 00000000000..09d0cc057c5 --- /dev/null +++ b/cli/azd/internal/appdetect/testdata/java-spring/project-two/src/main/resources/application.yaml @@ -0,0 +1,12 @@ +spring: + datasource: + url: jdbc:h2:mem:testdb + jackson: + date-format: com.microsoft.azure.simpletodo.configuration.RFC3339DateFormat + serialization: + write-dates-as-timestamps: false + jpa: + hibernate: + ddl-auto: update + show-sql: true + diff --git a/cli/azd/internal/appdetect/testdata/java/pom.xml b/cli/azd/internal/appdetect/testdata/java/pom.xml index 09cb26061ae..76271c23f67 100644 --- a/cli/azd/internal/appdetect/testdata/java/pom.xml +++ b/cli/azd/internal/appdetect/testdata/java/pom.xml @@ -9,6 +9,13 @@ Basic POM - + + + + org.springframework.boot + spring-boot-maven-plugin + + + diff --git a/cli/azd/internal/auth_type.go b/cli/azd/internal/auth_type.go new file mode 100644 index 00000000000..0399d327079 --- /dev/null +++ b/cli/azd/internal/auth_type.go @@ -0,0 +1,29 @@ +package internal + +// AuthType defines different authentication types. +type AuthType string + +const ( + AuthTypeUnspecified AuthType = "unspecified" + // Username and password, or key based authentication + AuthTypePassword AuthType = "password" + // Connection string authentication + AuthTypeConnectionString AuthType = "connectionString" + // Microsoft EntraID token credential + AuthTypeUserAssignedManagedIdentity AuthType = "userAssignedManagedIdentity" +) + +func GetAuthTypeDescription(authType AuthType) string { + switch authType { + case AuthTypeUnspecified: + return "Unspecified" + case AuthTypePassword: + return "Username and password" + case AuthTypeConnectionString: + return "Connection string" + case AuthTypeUserAssignedManagedIdentity: + return "User assigned managed identity" + default: + return "Unspecified" + } +} diff --git a/cli/azd/internal/binding/binding.go b/cli/azd/internal/binding/binding.go new file mode 100644 index 00000000000..6570884daeb --- /dev/null +++ b/cli/azd/internal/binding/binding.go @@ -0,0 +1,147 @@ +package binding + +import ( + "fmt" + "strings" + + "github.com/azure/azure-dev/cli/azd/internal" +) + +func GetBindingEnvs(source Source, target Target) (map[string]string, + error) { + switch source.Type { + case Java, SpringBoot: // todo: support other Java types + return GetBindingEnvsForSpringBoot(source, target) + default: + return GetBindingEnvsForCommonSource(target) + } +} + +type Source struct { + Type SourceType + Metadata map[MetadataType]string +} + +type SourceType string +type MetadataType string + +const ( + Java SourceType = "java" + SpringBoot SourceType = "springBoot" + Unknown SourceType = "unknown" +) + +type Target struct { + Type TargetType + Name string + AuthType internal.AuthType +} + +type TargetType string + +const ( + AzureDatabaseForPostgresql TargetType = "azure.db.postgresql" + AzureDatabaseForMysql TargetType = "azure.db.mysql" + AzureCacheForRedis TargetType = "azure.db.redis" + AzureCosmosDBForMongoDB TargetType = "azure.db.cosmos.mongo" + AzureCosmosDBForNoSQL TargetType = "azure.db.cosmos.nosql" + AzureContainerApp TargetType = "azure.host.containerapp" + AzureOpenAiModel TargetType = "azure.ai.openai.model" + AzureServiceBus TargetType = "azure.messaging.servicebus" + AzureEventHubs TargetType = "azure.messaging.eventhubs" + AzureStorageAccount TargetType = "azure.storage" +) + +type InfoType string + +const ( + InfoTypeHost InfoType = "host" + InfoTypePort InfoType = "port" + InfoTypeEndpoint InfoType = "endpoint" + InfoTypeDatabaseName InfoType = "databaseName" + InfoTypeNamespace InfoType = "namespace" + InfoTypeAccountName InfoType = "accountName" + InfoTypeUsername InfoType = "username" + InfoTypePassword InfoType = "password" + InfoTypeUrl InfoType = "url" + InfoTypeJdbcUrl InfoType = "jdbcUrl" + InfoTypeConnectionString InfoType = "connectionString" + InfoTypeSourceUserAssignedManagedIdentityClientId InfoType = "sourceUserAssignedManagedIdentityClientId" +) + +const bindingEnvPrefix = "${binding:" +const bindingEnvSuffix = "}" +const bindingEnvFormat = bindingEnvPrefix + "%s:%s:%s" + bindingEnvSuffix +const SourceUserAssignedManagedIdentityClientId = bindingEnvPrefix + + "*:*:" + string(InfoTypeSourceUserAssignedManagedIdentityClientId) + bindingEnvSuffix + +func IsBindingEnv(value string) bool { + _, infoType := ToTargetAndInfoType(value) + return infoType != "" +} + +func ToBindingEnv(target Target, infoType InfoType) string { + return fmt.Sprintf(bindingEnvFormat, target.Type, target.Name, infoType) +} + +func ReplaceBindingEnv(value string, substr string) string { + prefixIndex := strings.Index(value, bindingEnvPrefix) + if prefixIndex == -1 { + return value + } + suffixIndex := strings.Index(value, bindingEnvSuffix) + if suffixIndex == -1 { + return value + } + if prefixIndex >= suffixIndex { + return value + } + return value[0:prefixIndex] + substr + value[suffixIndex+1:] +} + +func ToTargetAndInfoType(value string) (target Target, infoType InfoType) { + prefixIndex := strings.Index(value, bindingEnvPrefix) + if prefixIndex == -1 { + return Target{}, "" + } + suffixIndex := strings.Index(value, bindingEnvSuffix) + if suffixIndex == -1 { + return Target{}, "" + } + if prefixIndex >= suffixIndex { + return Target{}, "" + } + bindingEnv := value[prefixIndex:suffixIndex] + a := strings.Split(bindingEnv, ":") + if len(a) != 4 { + return Target{}, "" + } + targetTypeString := a[1] + targetNameString := a[2] + infoTypeString := a[3] + return Target{Type: TargetType(targetTypeString), Name: targetNameString}, InfoType(infoTypeString) +} + +func MergeMapWithDuplicationCheck(a map[string]string, b map[string]string) (map[string]string, error) { + result := make(map[string]string) + for k, v := range a { + result[k] = v + } + for key, value := range b { + if existingValue, exist := result[key]; exist { + if value != existingValue { + return nil, duplicatedEnvError(existingValue, value) + } + } else { + result[key] = value + } + } + return result, nil +} + +func duplicatedEnvError(existingValue string, newValue string) error { + return fmt.Errorf( + "duplicated environment variable. existingValue = %s, newValue = %s", + existingValue, newValue, + ) +} diff --git a/cli/azd/internal/binding/binding_common.go b/cli/azd/internal/binding/binding_common.go new file mode 100644 index 00000000000..14dd8c92c6a --- /dev/null +++ b/cli/azd/internal/binding/binding_common.go @@ -0,0 +1,98 @@ +package binding + +import ( + "fmt" + + "github.com/azure/azure-dev/cli/azd/internal" +) + +func GetBindingEnvsForCommonSource(target Target) (map[string]string, error) { + switch target.Type { + case AzureDatabaseForPostgresql: + return GetBindingEnvsForCommonSourceToPostgresql(target.AuthType) + case AzureDatabaseForMysql: + return GetBindingEnvsForCommonSourceToMysql(target.AuthType) + case AzureCosmosDBForMongoDB: + return GetBindingEnvsForCommonSourceToMongoDB(target.AuthType) + case AzureCacheForRedis: + return GetBindingEnvsForCommonSourceToRedis(target.AuthType) + case AzureOpenAiModel: + return GetServiceBindingEnvsForAIModel(target.AuthType) + default: + return nil, fmt.Errorf("unsupported target type when binding for spring boot app, target.Type = %s", + target.Type) + } +} + +func GetBindingEnvsForCommonSourceToPostgresql(authType internal.AuthType) (map[string]string, error) { + switch authType { + case internal.AuthTypePassword: + return map[string]string{ + "POSTGRES_USERNAME": ToBindingEnv(Target{Type: AzureDatabaseForPostgresql}, InfoTypeUsername), + "POSTGRES_PASSWORD": ToBindingEnv(Target{Type: AzureDatabaseForPostgresql}, InfoTypePassword), + "POSTGRES_HOST": ToBindingEnv(Target{Type: AzureDatabaseForPostgresql}, InfoTypeHost), + "POSTGRES_DATABASE": ToBindingEnv(Target{Type: AzureDatabaseForPostgresql}, InfoTypeDatabaseName), + "POSTGRES_PORT": ToBindingEnv(Target{Type: AzureDatabaseForPostgresql}, InfoTypePort), + "POSTGRES_URL": ToBindingEnv(Target{Type: AzureDatabaseForPostgresql}, InfoTypeUrl), + }, nil + default: + return nil, unsupportedAuthTypeError(AzureDatabaseForPostgresql, authType) + } +} + +func GetBindingEnvsForCommonSourceToMysql(authType internal.AuthType) (map[string]string, error) { + switch authType { + case internal.AuthTypePassword: + return map[string]string{ + "MYSQL_USERNAME": ToBindingEnv(Target{Type: AzureDatabaseForMysql}, InfoTypeUsername), + "MYSQL_PASSWORD": ToBindingEnv(Target{Type: AzureDatabaseForMysql}, InfoTypePassword), + "MYSQL_HOST": ToBindingEnv(Target{Type: AzureDatabaseForMysql}, InfoTypeHost), + "MYSQL_DATABASE": ToBindingEnv(Target{Type: AzureDatabaseForMysql}, InfoTypeDatabaseName), + "MYSQL_PORT": ToBindingEnv(Target{Type: AzureDatabaseForMysql}, InfoTypePort), + "MYSQL_URL": ToBindingEnv(Target{Type: AzureDatabaseForMysql}, InfoTypeUrl), + }, nil + default: + return nil, unsupportedAuthTypeError(AzureDatabaseForMysql, authType) + } +} + +func GetBindingEnvsForCommonSourceToMongoDB(authType internal.AuthType) (map[string]string, error) { + switch authType { + case internal.AuthTypeConnectionString: + return map[string]string{ + "MONGODB_URL": ToBindingEnv(Target{Type: AzureCosmosDBForMongoDB}, InfoTypeUrl), + }, nil + default: + return nil, unsupportedAuthTypeError(AzureCosmosDBForMongoDB, authType) + } +} + +func GetBindingEnvsForCommonSourceToRedis(authType internal.AuthType) (map[string]string, error) { + switch authType { + case internal.AuthTypePassword: + return map[string]string{ + "REDIS_HOST": ToBindingEnv(Target{Type: AzureCacheForRedis}, InfoTypeHost), + "REDIS_PORT": ToBindingEnv(Target{Type: AzureCacheForRedis}, InfoTypePort), + "REDIS_ENDPOINT": ToBindingEnv(Target{Type: AzureCacheForRedis}, InfoTypeEndpoint), + "REDIS_URL": ToBindingEnv(Target{Type: AzureCacheForRedis}, InfoTypeUrl), + "REDIS_PASSWORD": ToBindingEnv(Target{Type: AzureCacheForRedis}, InfoTypePassword), + }, nil + default: + return nil, unsupportedAuthTypeError(AzureCacheForRedis, authType) + } +} + +func GetServiceBindingEnvsForAIModel(authType internal.AuthType) (map[string]string, error) { + switch authType { + case internal.AuthTypeUserAssignedManagedIdentity: + return map[string]string{ + "AZURE_OPENAI_ENDPOINT": ToBindingEnv(Target{Type: AzureOpenAiModel}, InfoTypeEndpoint), + }, nil + default: + return nil, unsupportedAuthTypeError(AzureOpenAiModel, authType) + } +} + +func unsupportedAuthTypeError(targetType TargetType, authType internal.AuthType) error { + return fmt.Errorf("unsupported auth type, serviceType = %s, authType = %s", targetType, authType) +} diff --git a/cli/azd/internal/binding/binding_spring_boot.go b/cli/azd/internal/binding/binding_spring_boot.go new file mode 100644 index 00000000000..a64b24bc73e --- /dev/null +++ b/cli/azd/internal/binding/binding_spring_boot.go @@ -0,0 +1,285 @@ +package binding + +import ( + "fmt" + "strings" + + "github.com/azure/azure-dev/cli/azd/internal" +) + +const ( + IsSpringBootJms MetadataType = "IsSpringBootJms" + IsSpringBootKafka MetadataType = "IsSpringBootKafka" + SpringBootVersion MetadataType = "SpringBootVersion" +) + +func GetBindingEnvsForSpringBoot(source Source, target Target) (map[string]string, error) { + switch target.Type { + case AzureDatabaseForPostgresql: + return GetBindingEnvsForSpringBootToPostgresql(target.AuthType) + case AzureDatabaseForMysql: + return GetBindingEnvsForSpringBootToMysql(target.AuthType) + case AzureCosmosDBForMongoDB: + return GetBindingEnvsForSpringBootToMongoDb(target.AuthType) + case AzureCosmosDBForNoSQL: + return GetBindingEnvsForSpringBootToCosmosNoSQL(target.AuthType) + case AzureCacheForRedis: + return GetBindingEnvsForSpringBootToRedis(target.AuthType) + case AzureServiceBus: + if source.Metadata[IsSpringBootJms] == "true" { + return GetBindingEnvsForSpringBootToServiceBusJms(target.AuthType) + } else { + return GetBindingEnvsForSpringBootToServiceBusNotJms(target.AuthType) + } + case AzureEventHubs: + if source.Metadata[IsSpringBootKafka] == "true" { + return GetBindingEnvsForSpringBootToEventHubsKafka(source.Metadata[SpringBootVersion], target.AuthType) + } else { + return GetServiceBindingEnvsForEventHubs(target.AuthType) + } + case AzureStorageAccount: + return GetServiceBindingEnvsForStorageAccount(target.AuthType) + default: + return nil, fmt.Errorf("unsupported target type when binding for spring boot app, target.Type = %s", + target.Type) + } +} + +func GetBindingEnvsForSpringBootToPostgresql(authType internal.AuthType) (map[string]string, error) { + target := Target{Type: AzureDatabaseForPostgresql} + switch authType { + case internal.AuthTypePassword: + return map[string]string{ + "spring.datasource.url": ToBindingEnv(target, InfoTypeJdbcUrl), + "spring.datasource.username": ToBindingEnv(target, InfoTypeUsername), + "spring.datasource.password": ToBindingEnv(target, InfoTypePassword), + }, nil + case internal.AuthTypeUserAssignedManagedIdentity: + return map[string]string{ + "spring.datasource.url": ToBindingEnv(target, InfoTypeJdbcUrl), + "spring.datasource.username": ToBindingEnv(target, InfoTypeUsername), + "spring.datasource.password": "", + "spring.datasource.azure.passwordless-enabled": "true", + "spring.cloud.azure.credential.client-id": SourceUserAssignedManagedIdentityClientId, + "spring.cloud.azure.credential.managed-identity-enabled": "true", + }, nil + default: + return nil, unsupportedAuthTypeError(AzureDatabaseForPostgresql, authType) + } +} + +func GetBindingEnvsForSpringBootToMysql(authType internal.AuthType) (map[string]string, error) { + target := Target{Type: AzureDatabaseForMysql} + switch authType { + case internal.AuthTypePassword: + return map[string]string{ + "spring.datasource.url": ToBindingEnv(target, InfoTypeJdbcUrl), + "spring.datasource.username": ToBindingEnv(target, InfoTypeUsername), + "spring.datasource.password": ToBindingEnv(target, InfoTypePassword), + }, nil + case internal.AuthTypeUserAssignedManagedIdentity: + return map[string]string{ + "spring.datasource.url": ToBindingEnv(target, InfoTypeJdbcUrl), + "spring.datasource.username": ToBindingEnv(target, InfoTypeUsername), + "spring.datasource.password": "", + "spring.datasource.azure.passwordless-enabled": "true", + "spring.cloud.azure.credential.client-id": SourceUserAssignedManagedIdentityClientId, + "spring.cloud.azure.credential.managed-identity-enabled": "true", + }, nil + default: + return nil, unsupportedAuthTypeError(AzureDatabaseForMysql, authType) + } +} + +func GetBindingEnvsForSpringBootToMongoDb(authType internal.AuthType) (map[string]string, error) { + target := Target{Type: AzureCosmosDBForMongoDB} + switch authType { + case internal.AuthTypeConnectionString: + return map[string]string{ + "spring.data.mongodb.uri": ToBindingEnv(target, InfoTypeJdbcUrl), + "spring.data.mongodb.database": ToBindingEnv(target, InfoTypeDatabaseName), + }, nil + default: + return nil, unsupportedAuthTypeError(AzureCosmosDBForMongoDB, authType) + } +} + +func GetBindingEnvsForSpringBootToCosmosNoSQL(authType internal.AuthType) (map[string]string, error) { + target := Target{Type: AzureCosmosDBForNoSQL} + switch authType { + case internal.AuthTypeUserAssignedManagedIdentity: + return map[string]string{ + "spring.cloud.azure.cosmos.endpoint": ToBindingEnv(target, InfoTypeEndpoint), + "spring.cloud.azure.cosmos.database": ToBindingEnv(target, InfoTypeDatabaseName), + }, nil + default: + return nil, unsupportedAuthTypeError(AzureCosmosDBForNoSQL, authType) + } +} + +func GetBindingEnvsForSpringBootToRedis(authType internal.AuthType) (map[string]string, error) { + target := Target{Type: AzureCacheForRedis} + switch authType { + case internal.AuthTypePassword: + return map[string]string{ + "spring.data.redis.url": ToBindingEnv(target, InfoTypeUrl), + }, nil + default: + return nil, unsupportedAuthTypeError(AzureCacheForRedis, authType) + } +} + +func GetBindingEnvsForSpringBootToServiceBusJms(authType internal.AuthType) (map[string]string, error) { + target := Target{Type: AzureServiceBus} + switch authType { + case internal.AuthTypeUserAssignedManagedIdentity: + return map[string]string{ + "spring.jms.servicebus.pricing-tier": "premium", + "spring.jms.servicebus.passwordless-enabled": "true", + "spring.jms.servicebus.credential.managed-identity-enabled": "true", + "spring.jms.servicebus.credential.client-id": SourceUserAssignedManagedIdentityClientId, + "spring.jms.servicebus.namespace": ToBindingEnv(target, InfoTypeNamespace), + "spring.jms.servicebus.connection-string": "", + }, nil + case internal.AuthTypeConnectionString: + return map[string]string{ + "spring.jms.servicebus.pricing-tier": "premium", + "spring.jms.servicebus.passwordless-enabled": "false", + "spring.jms.servicebus.credential.managed-identity-enabled": "false", + "spring.jms.servicebus.credential.client-id": "", + "spring.jms.servicebus.namespace": "", + "spring.jms.servicebus.connection-string": ToBindingEnv(target, InfoTypeConnectionString), + }, nil + default: + return nil, unsupportedAuthTypeError(AzureServiceBus, authType) + } +} + +func GetBindingEnvsForSpringBootToServiceBusNotJms(authType internal.AuthType) (map[string]string, error) { + target := Target{Type: AzureServiceBus} + switch authType { + case internal.AuthTypeUserAssignedManagedIdentity: + return map[string]string{ + // Not add this: spring.cloud.azure.servicebus.connection-string = "" + // because of this: https://github.com/Azure/azure-sdk-for-java/issues/42880 + "spring.cloud.azure.servicebus.credential.managed-identity-enabled": "true", + "spring.cloud.azure.servicebus.credential.client-id": SourceUserAssignedManagedIdentityClientId, + "spring.cloud.azure.servicebus.namespace": ToBindingEnv(target, + InfoTypeNamespace), + }, nil + case internal.AuthTypeConnectionString: + return map[string]string{ + "spring.cloud.azure.servicebus.namespace": ToBindingEnv(target, + InfoTypeNamespace), + "spring.cloud.azure.servicebus.connection-string": ToBindingEnv(target, + InfoTypeConnectionString), + "spring.cloud.azure.servicebus.credential.managed-identity-enabled": "false", + "spring.cloud.azure.servicebus.credential.client-id": "", + }, nil + default: + return nil, unsupportedAuthTypeError(AzureServiceBus, authType) + } +} + +func GetBindingEnvsForSpringBootToEventHubsKafka(springBootVersion string, + authType internal.AuthType) (map[string]string, error) { + target := Target{Type: AzureEventHubs} + var springBootVersionDecidedBindingEnv = make(map[string]string) + if strings.HasPrefix(springBootVersion, "2.") { + springBootVersionDecidedBindingEnv["spring.cloud.stream.binders.kafka.environment.spring.main.sources"] = + "com.azure.spring.cloud.autoconfigure.eventhubs.kafka.AzureEventHubsKafkaAutoConfiguration" + } else { + springBootVersionDecidedBindingEnv["spring.cloud.stream.binders.kafka.environment.spring.main.sources"] = + "com.azure.spring.cloud.autoconfigure.implementation.eventhubs.kafka" + + ".AzureEventHubsKafkaAutoConfiguration" + } + var commonInformation map[string]string + switch authType { + case internal.AuthTypeUserAssignedManagedIdentity: + commonInformation = map[string]string{ + // Not add this: spring.cloud.azure.servicebus.connection-string = "" + // because of this: https://github.com/Azure/azure-sdk-for-java/issues/42880 + "spring.cloud.stream.kafka.binder.brokers": ToBindingEnv(target, InfoTypeEndpoint), + "spring.cloud.azure.eventhubs.credential.managed-identity-enabled": "true", + "spring.cloud.azure.eventhubs.credential.client-id": SourceUserAssignedManagedIdentityClientId, + } + case internal.AuthTypeConnectionString: + commonInformation = map[string]string{ + "spring.cloud.stream.kafka.binder.brokers": ToBindingEnv(target, InfoTypeEndpoint), + "spring.cloud.azure.eventhubs.connection-string": ToBindingEnv(target, + InfoTypeConnectionString), + "spring.cloud.azure.eventhubs.credential.managed-identity-enabled": "false", + "spring.cloud.azure.eventhubs.credential.client-id": "", + } + default: + return nil, unsupportedAuthTypeError(AzureEventHubs, authType) + } + return MergeMapWithDuplicationCheck(springBootVersionDecidedBindingEnv, commonInformation) +} + +func GetServiceBindingEnvsForEventHubs(authType internal.AuthType) (map[string]string, error) { + target := Target{Type: AzureEventHubs} + switch authType { + case internal.AuthTypeUserAssignedManagedIdentity: + return map[string]string{ + // Not add this: spring.cloud.azure.eventhubs.connection-string = "" + // because of this: https://github.com/Azure/azure-sdk-for-java/issues/42880 + "spring.cloud.azure.eventhubs.credential.managed-identity-enabled": "true", + "spring.cloud.azure.eventhubs.credential.client-id": SourceUserAssignedManagedIdentityClientId, + "spring.cloud.azure.eventhubs.namespace": ToBindingEnv(target, InfoTypeNamespace), + }, nil + case internal.AuthTypeConnectionString: + return map[string]string{ + "spring.cloud.azure.eventhubs.namespace": ToBindingEnv(target, InfoTypeNamespace), + "spring.cloud.azure.eventhubs.connection-string": ToBindingEnv(target, + InfoTypeConnectionString), + "spring.cloud.azure.eventhubs.credential.managed-identity-enabled": "false", + "spring.cloud.azure.eventhubs.credential.client-id": "", + }, nil + default: + return nil, unsupportedAuthTypeError(AzureEventHubs, authType) + } +} + +func GetServiceBindingEnvsForStorageAccount(authType internal.AuthType) (map[string]string, error) { + target := Target{Type: AzureStorageAccount} + switch authType { + case internal.AuthTypeUserAssignedManagedIdentity: + return map[string]string{ + "spring.cloud.azure.eventhubs.processor.checkpoint-store.account-name": ToBindingEnv( + target, InfoTypeAccountName), + "spring.cloud.azure.eventhubs.processor.checkpoint-store.credential.managed-identity-enabled": "true", + "spring.cloud.azure.eventhubs.processor.checkpoint-store.credential." + + "client-id": SourceUserAssignedManagedIdentityClientId, + "spring.cloud.azure.eventhubs.processor.checkpoint-store.connection-string": "", + }, nil + case internal.AuthTypeConnectionString: + return map[string]string{ + "spring.cloud.azure.eventhubs.processor.checkpoint-store.account-name": ToBindingEnv( + target, InfoTypeAccountName), + "spring.cloud.azure.eventhubs.processor.checkpoint-store.connection-string": ToBindingEnv( + target, InfoTypeConnectionString), + "spring.cloud.azure.eventhubs.processor.checkpoint-store.credential.managed-identity-enabled": "false", + "spring.cloud.azure.eventhubs.processor.checkpoint-store.credential.client-id": "", + }, nil + default: + return nil, unsupportedAuthTypeError(AzureStorageAccount, authType) + } +} + +func GetServiceBindingEnvsForEurekaServer(eurekaServerName string) map[string]string { + return map[string]string{ + "eureka.client.register-with-eureka": "true", + "eureka.client.fetch-registry": "true", + "eureka.instance.prefer-ip-address": "true", + "eureka.client.serviceUrl.defaultZone": fmt.Sprintf("%s/eureka", + ToBindingEnv(Target{Type: AzureContainerApp, Name: eurekaServerName}, InfoTypeHost)), + } +} + +func GetServiceBindingEnvsForConfigServer(configServerName string) map[string]string { + return map[string]string{ + "spring.config.import": fmt.Sprintf("optional:configserver:%s?fail-fast=true", + ToBindingEnv(Target{Type: AzureContainerApp, Name: configServerName}, InfoTypeHost)), + } +} diff --git a/cli/azd/internal/binding/binding_test.go b/cli/azd/internal/binding/binding_test.go new file mode 100644 index 00000000000..9b7398a3291 --- /dev/null +++ b/cli/azd/internal/binding/binding_test.go @@ -0,0 +1,186 @@ +package binding + +import ( + "fmt" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestMergeMapWithDuplicationCheck(t *testing.T) { + empty := map[string]string{} + name1Value1 := map[string]string{ + "name1": "value1", + } + name1Value2 := map[string]string{ + "name1": "value2", + } + name2Value2 := map[string]string{ + "name2": "value2", + } + name1Value1Name2Value2 := map[string]string{ + "name1": "value1", + "name2": "value2", + } + + tests := []struct { + name string + a map[string]string + b map[string]string + expected map[string]string + expectedError error + }{ + { + name: "2 empty map", + a: empty, + b: empty, + expected: empty, + expectedError: nil, + }, + { + name: "one is empty, another is not", + a: empty, + b: name1Value1, + expected: name1Value1, + expectedError: nil, + }, + { + name: "no duplication", + a: name1Value1, + b: name2Value2, + expected: name1Value1Name2Value2, + expectedError: nil, + }, + { + name: "duplicated name but same value", + a: name1Value1, + b: name1Value1, + expected: name1Value1, + expectedError: nil, + }, + { + name: "duplicated name, different value", + a: name1Value1, + b: name1Value2, + expected: nil, + expectedError: fmt.Errorf("duplicated environment variable. existingValue = %s, newValue = %s", + "value1", "value2"), + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + env, err := MergeMapWithDuplicationCheck(tt.a, tt.b) + assert.Equal(t, tt.expected, env) + assert.Equal(t, tt.expectedError, err) + }) + } +} + +func TestToBindingEnv(t *testing.T) { + tests := []struct { + name string + target Target + infoType InfoType + want string + }{ + { + name: "postgres password", + target: Target{Type: AzureDatabaseForPostgresql}, + infoType: InfoTypePassword, + want: "${binding:azure.db.postgresql::password}", + }, + { + name: "mysql username", + target: Target{Type: AzureDatabaseForMysql}, + infoType: InfoTypeUsername, + want: "${binding:azure.db.mysql::username}", + }, + { + name: "mysql username", + target: Target{Type: AzureContainerApp, Name: "testApp"}, + infoType: InfoTypeHost, + want: "${binding:azure.host.containerapp:testApp:host}", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := ToBindingEnv(tt.target, tt.infoType) + assert.Equal(t, tt.want, actual) + }) + } +} + +func TestIsBindingEnvValue(t *testing.T) { + tests := []struct { + name string + input string + want bool + }{ + { + name: "valid - whole string", + input: "${binding:azure.db.postgresql::password}", + want: true, + }, + { + name: "valid - sub string", + input: "optional:configserver:${binding:azure.host.containerapp:testApp:host}?fail-fast=true", + want: true, + }, + { + name: "valid - SourceUserAssignedManagedIdentityClientId", + input: SourceUserAssignedManagedIdentityClientId, + want: true, + }, + { + name: "invalid - no target info type", + input: "${binding:azure.db.postgres::}", + want: false, + }, + { + name: "invalid - no required prefix and suffix.", + input: "binding:azure.db.postgresql::password", + want: false, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := IsBindingEnv(tt.input) + assert.Equal(t, tt.want, result) + }) + } +} + +func TestToTargetAndInfoType(t *testing.T) { + tests := []struct { + name string + input string + target Target + infoType InfoType + }{ + { + name: "invalid input", + input: "${binding:azure.db.mysql::username}", + target: Target{Type: AzureDatabaseForMysql}, + infoType: InfoTypeUsername, + }, + { + name: "postgres password", + input: "${binding:azure.db.postgresql::password}", + target: Target{Type: AzureDatabaseForPostgresql}, + infoType: InfoTypePassword, + }, + { + name: "mysql username", + input: "optional:configserver:${binding:azure.host.containerapp:testApp:host}?fail-fast=true", + target: Target{Type: AzureContainerApp, Name: "testApp"}, + infoType: InfoTypeHost, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + resourceType, resourceInfoType := ToTargetAndInfoType(tt.input) + assert.Equal(t, tt.target, resourceType) + assert.Equal(t, tt.infoType, resourceInfoType) + }) + } +} diff --git a/cli/azd/internal/cmd/add/add_configure.go b/cli/azd/internal/cmd/add/add_configure.go index fac15c5a0a8..887b64fe2fb 100644 --- a/cli/azd/internal/cmd/add/add_configure.go +++ b/cli/azd/internal/cmd/add/add_configure.go @@ -3,6 +3,7 @@ package add import ( "context" "fmt" + "github.com/azure/azure-dev/cli/azd/internal" "slices" "strings" "unicode" @@ -32,7 +33,7 @@ func configure( return fillAiModelName(ctx, r, console, p) case project.ResourceTypeDbPostgres, project.ResourceTypeDbMongo: - return fillDatabaseName(ctx, r, console, p) + return fillDatabaseNameAndAuthType(ctx, r, console, p) case project.ResourceTypeDbRedis: if _, exists := p.prj.Resources["redis"]; exists { return nil, fmt.Errorf("only one Redis resource is allowed at this time") @@ -45,7 +46,7 @@ func configure( } } -func fillDatabaseName( +func fillDatabaseNameAndAuthType( ctx context.Context, r *project.ResourceConfig, console input.Console, @@ -74,9 +75,77 @@ func fillDatabaseName( break } + // prompt for the database name + for { + databaseName, err := console.Prompt(ctx, input.ConsoleOptions{ + Message: fmt.Sprintf("Input the databaseName for %s "+ + "(Not databaseServerName. This url can explain the difference: "+ + "'jdbc:mysql://databaseServerName:3306/databaseName'):", r.Type.String()), + Help: "Hint: App database name\n\n" + + "Name of the database that the app connects to. " + + "This database will be created after running azd provision or azd up.", + }) + if err != nil { + return r, err + } + + if err := validateResourceName(databaseName, p.prj); err != nil { + console.Message(ctx, err.Error()) + continue + } + + switch r.Type { + case project.ResourceTypeDbPostgres: + modelProps, ok := r.Props.(project.PostgresProps) + if ok { + modelProps.DatabaseName = databaseName + r.Props = modelProps + } + case project.ResourceTypeDbMongo: + modelProps, ok := r.Props.(project.MongoDBProps) + if ok { + modelProps.DatabaseName = databaseName + r.Props = modelProps + } + } + break + } + + if r.Type == project.ResourceTypeDbPostgres { + modelProps, ok := r.Props.(project.PostgresProps) + if ok { + authType, err := chooseAuthTypeByPrompt(r.Name, []internal.AuthType{ + internal.AuthTypePassword, internal.AuthTypeUserAssignedManagedIdentity}, ctx, console) + if err != nil { + return r, err + } + modelProps.AuthType = authType + r.Props = modelProps + } + } + return r, nil } +func chooseAuthTypeByPrompt( + name string, + authOptions []internal.AuthType, + ctx context.Context, + console input.Console) (internal.AuthType, error) { + var options []string + for _, option := range authOptions { + options = append(options, internal.GetAuthTypeDescription(option)) + } + selection, err := console.Select(ctx, input.ConsoleOptions{ + Message: "Choose auth type for " + name + ":", + Options: options, + }) + if err != nil { + return internal.AuthTypeUnspecified, err + } + return authOptions[selection], nil +} + func fillAiModelName( ctx context.Context, r *project.ResourceConfig, diff --git a/cli/azd/internal/cmd/add/add_configure_host.go b/cli/azd/internal/cmd/add/add_configure_host.go index 367584d9c57..670cdba4bc9 100644 --- a/cli/azd/internal/cmd/add/add_configure_host.go +++ b/cli/azd/internal/cmd/add/add_configure_host.go @@ -245,7 +245,7 @@ func addServiceAsResource( } if props.Port == -1 { - port, err := repository.PromptPort(console, ctx, svc.Name, prj) + port, err := repository.GetOrPromptPort(console, ctx, svc.Name, prj) if err != nil { return nil, err } diff --git a/cli/azd/internal/cmd/add/add_select.go b/cli/azd/internal/cmd/add/add_select.go index 015be1d271b..14957093aef 100644 --- a/cli/azd/internal/cmd/add/add_select.go +++ b/cli/azd/internal/cmd/add/add_select.go @@ -54,5 +54,11 @@ func selectDatabase( } r.Type = resourceTypesDisplayMap[resourceTypesDisplay[dbOption]] + switch r.Type { + case project.ResourceTypeDbPostgres: + r.Props = project.PostgresProps{} + case project.ResourceTypeDbMongo: + r.Props = project.MongoDBProps{} + } return r, nil } diff --git a/cli/azd/internal/download_util.go b/cli/azd/internal/download_util.go new file mode 100644 index 00000000000..753e873b543 --- /dev/null +++ b/cli/azd/internal/download_util.go @@ -0,0 +1,43 @@ +package internal + +import ( + "context" + "fmt" + "io" + "log/slog" + "net/http" + "net/url" + "time" +) + +func Download(requestUrl string) ([]byte, error) { + parsedUrl, err := url.ParseRequestURI(requestUrl) + if err != nil { + return nil, err + } + if !isAllowedHost(parsedUrl.Host) { + return nil, fmt.Errorf("invalid host") + } + client := &http.Client{ + Timeout: 30 * time.Second, + Transport: &http.Transport{ + Proxy: http.ProxyFromEnvironment, + }, + } + slog.DebugContext(context.TODO(), "Downloading file.", "requestUrl", requestUrl, "err", err) + resp, err := client.Get(requestUrl) + if err != nil { + return nil, err + } + defer func(Body io.ReadCloser) { + err := Body.Close() + if err != nil { + slog.DebugContext(context.TODO(), "Failed to close http body.", "requestUrl", requestUrl, "err", err) + } + }(resp.Body) + return io.ReadAll(resp.Body) +} + +func isAllowedHost(host string) bool { + return host == "repo.maven.apache.org" +} diff --git a/cli/azd/internal/repository/app_init.go b/cli/azd/internal/repository/app_init.go index cb211bc14b1..27d292f1efe 100644 --- a/cli/azd/internal/repository/app_init.go +++ b/cli/azd/internal/repository/app_init.go @@ -6,10 +6,14 @@ import ( "maps" "os" "path/filepath" + "regexp" "slices" + "strconv" "strings" "time" + "github.com/azure/azure-dev/cli/azd/internal/binding" + "github.com/azure/azure-dev/cli/azd/internal" "github.com/azure/azure-dev/cli/azd/internal/appdetect" "github.com/azure/azure-dev/cli/azd/internal/names" @@ -39,11 +43,19 @@ var LanguageMap = map[appdetect.Language]project.ServiceLanguageKind{ var dbMap = map[appdetect.DatabaseDep]struct{}{ appdetect.DbMongo: {}, appdetect.DbPostgres: {}, + appdetect.DbMySql: {}, + appdetect.DbCosmos: {}, appdetect.DbRedis: {}, } var featureCompose = alpha.MustFeatureKey("compose") +var azureDepMap = map[string]struct{}{ + appdetect.AzureDepServiceBus{}.ResourceDisplay(): {}, + appdetect.AzureDepEventHubs{}.ResourceDisplay(): {}, + appdetect.AzureDepStorageAccount{}.ResourceDisplay(): {}, +} + // InitFromApp initializes the infra directory and project file from the current existing app. func (i *Initializer) InitFromApp( ctx context.Context, @@ -71,7 +83,8 @@ func (i *Initializer) InitFromApp( prj, err := appdetect.Detect(ctx, wd, appdetect.WithExcludePatterns([]string{ "**/eng", "**/tool", - "**/tools"}, + "**/tools", + }, false)) if err != nil { i.console.StopSpinner(ctx, title, input.GetStepResultFormat(err)) @@ -120,10 +133,55 @@ func (i *Initializer) InitFromApp( i.console.StopSpinner(ctx, title, input.StepDone) var prjAppHost []appdetect.Project - for _, prj := range projects { + for index, prj := range projects { if prj.Language == appdetect.DotNetAppHost { prjAppHost = append(prjAppHost, prj) } + + if prj.Language == appdetect.Java { + var hasKafkaDep bool + for depIndex, dep := range prj.AzureDeps { + if eventHubs, ok := dep.(appdetect.AzureDepEventHubs); ok { + // prompt spring boot version if not detected for kafka + if eventHubs.UseKafka() { + hasKafkaDep = true + springBootVersion := eventHubs.SpringBootVersion + if springBootVersion == appdetect.UnknownSpringBootVersion { + springBootVersionInput, err := promptSpringBootVersion(i.console, ctx) + if err != nil { + return err + } + eventHubs.SpringBootVersion = springBootVersionInput + prj.AzureDeps[depIndex] = eventHubs + } + } + // prompt event hubs name if not detected + if len(eventHubs.EventHubsNamePropertyMap) == 0 { + promptMissingEventHubsNameOrExit(i.console, ctx, &eventHubs) + prj.AzureDeps[depIndex] = eventHubs + } + for property, eventHubsName := range eventHubs.EventHubsNamePropertyMap { + if eventHubsName == "" { + promptMissingPropertyAndExit(i.console, ctx, property) + } + } + } + if storageAccount, ok := dep.(appdetect.AzureDepStorageAccount); ok { + for property, containerName := range storageAccount.ContainerNamePropertyMap { + if containerName == "" { + promptMissingPropertyAndExit(i.console, ctx, property) + } + } + } + } + + if hasKafkaDep && !prj.Metadata.ContainsDependencySpringCloudAzureStarter { + err := processSpringCloudAzureDepByPrompt(i.console, ctx, &projects[index]) + if err != nil { + return err + } + } + } } if len(prjAppHost) > 1 { @@ -251,7 +309,7 @@ func (i *Initializer) InitFromApp( var infraSpec *scaffold.InfraSpec composeEnabled := i.features.IsEnabled(featureCompose) if !composeEnabled { // backwards compatibility - spec, err := i.infraSpecFromDetect(ctx, detect) + spec, err := i.infraSpecFromDetect(ctx, &detect) if err != nil { return err } @@ -268,7 +326,7 @@ func (i *Initializer) InitFromApp( title = "Generating " + output.WithHighLightFormat("./"+azdcontext.ProjectFileName) i.console.ShowSpinner(ctx, title, input.Step) - err = i.genProjectFile(ctx, azdCtx, detect, composeEnabled) + err = i.genProjectFile(ctx, azdCtx, &detect, infraSpec, composeEnabled) if err != nil { i.console.StopSpinner(ctx, title, input.GetStepResultFormat(err)) return err @@ -361,9 +419,10 @@ func (i *Initializer) genFromInfra( func (i *Initializer) genProjectFile( ctx context.Context, azdCtx *azdcontext.AzdContext, - detect detectConfirm, + detect *detectConfirm, + spec *scaffold.InfraSpec, addResources bool) error { - config, err := i.prjConfigFromDetect(ctx, azdCtx.ProjectDirectory(), detect, addResources) + config, err := i.prjConfigFromDetect(ctx, azdCtx.ProjectDirectory(), detect, spec, addResources) if err != nil { return fmt.Errorf("converting config: %w", err) } @@ -383,23 +442,147 @@ const InitGenTemplateId = "azd-init" func (i *Initializer) prjConfigFromDetect( ctx context.Context, root string, - detect detectConfirm, + detect *detectConfirm, + spec *scaffold.InfraSpec, addResources bool) (project.ProjectConfig, error) { config := project.ProjectConfig{ Name: azdcontext.ProjectName(root), Metadata: &project.ProjectMetadata{ Template: fmt.Sprintf("%s@%s", InitGenTemplateId, internal.VersionInfo().Version), }, - Services: map[string]*project.ServiceConfig{}, + Services: map[string]*project.ServiceConfig{}, + Resources: map[string]*project.ResourceConfig{}, + } + + var javaEurekaServerService project.ServiceConfig + var javaConfigServerService project.ServiceConfig + var err error + for _, svc := range detect.Services { + if svc.Metadata.ContainsDependencySpringCloudEurekaServer { + javaEurekaServerService, err = ServiceFromDetect(root, svc.Metadata.ApplicationName, svc) + if err != nil { + return config, err + } + } + if svc.Metadata.ContainsDependencySpringCloudConfigServer { + javaConfigServerService, err = ServiceFromDetect(root, svc.Metadata.ApplicationName, svc) + if err != nil { + return config, err + } + } } svcMapping := map[string]string{} for _, prj := range detect.Services { - svc, err := ServiceFromDetect(root, "", prj) + svc, err := ServiceFromDetect(root, prj.Metadata.ApplicationName, prj) if err != nil { return config, err } + if !addResources { + for _, db := range prj.DatabaseDeps { + switch db { + case appdetect.DbMongo: + config.Resources["mongo"] = &project.ResourceConfig{ + Type: project.ResourceTypeDbMongo, + Name: spec.DbCosmosMongo.DatabaseName, + Props: project.MongoDBProps{ + DatabaseName: spec.DbCosmosMongo.DatabaseName, + }, + } + case appdetect.DbPostgres: + config.Resources["postgres"] = &project.ResourceConfig{ + Type: project.ResourceTypeDbPostgres, + Name: spec.DbPostgres.DatabaseName, + Props: project.PostgresProps{ + DatabaseName: spec.DbPostgres.DatabaseName, + AuthType: spec.DbPostgres.AuthType, + }, + } + case appdetect.DbMySql: + config.Resources["mysql"] = &project.ResourceConfig{ + Type: project.ResourceTypeDbMySQL, + Props: project.MySQLProps{ + DatabaseName: spec.DbMySql.DatabaseName, + AuthType: spec.DbMySql.AuthType, + }, + } + case appdetect.DbRedis: + config.Resources["redis"] = &project.ResourceConfig{ + Type: project.ResourceTypeDbRedis, + } + case appdetect.DbCosmos: + cosmosDBProps := project.CosmosDBProps{ + DatabaseName: spec.DbCosmos.DatabaseName, + } + for _, container := range spec.DbCosmos.Containers { + cosmosDBProps.Containers = append(cosmosDBProps.Containers, project.CosmosDBContainerProps{ + ContainerName: container.ContainerName, + PartitionKeyPaths: container.PartitionKeyPaths, + }) + } + config.Resources["cosmos"] = &project.ResourceConfig{ + Type: project.ResourceTypeDbCosmos, + Props: cosmosDBProps, + } + } + + } + for _, azureDep := range prj.AzureDeps { + switch azureDep.(type) { + case appdetect.AzureDepServiceBus: + config.Resources["servicebus"] = &project.ResourceConfig{ + Type: project.ResourceTypeMessagingServiceBus, + Props: project.ServiceBusProps{ + Queues: spec.AzureServiceBus.Queues, + IsJms: spec.AzureServiceBus.IsJms, + AuthType: spec.AzureServiceBus.AuthType, + }, + } + case appdetect.AzureDepEventHubs: + if spec.AzureEventHubs.UseKafka { + config.Resources["kafka"] = &project.ResourceConfig{ + Type: project.ResourceTypeMessagingKafka, + Props: project.KafkaProps{ + Topics: spec.AzureEventHubs.EventHubNames, + AuthType: spec.AzureEventHubs.AuthType, + SpringBootVersion: spec.AzureEventHubs.SpringBootVersion, + }, + } + } else { + config.Resources["eventhubs"] = &project.ResourceConfig{ + Type: project.ResourceTypeMessagingEventHubs, + Props: project.EventHubsProps{ + EventHubNames: spec.AzureEventHubs.EventHubNames, + AuthType: spec.AzureEventHubs.AuthType, + }, + } + } + case appdetect.AzureDepStorageAccount: + config.Resources["storage"] = &project.ResourceConfig{ + Type: project.ResourceTypeStorage, + Props: project.StorageProps{ + Containers: spec.AzureStorageAccount.ContainerNames, + AuthType: spec.AzureStorageAccount.AuthType, + }, + } + + } + } + } + + if prj.Metadata.ContainsDependencySpringCloudEurekaClient { + err := appendJavaEurekaServerEnv(&svc, javaEurekaServerService.Name) + if err != nil { + return config, err + } + } + if prj.Metadata.ContainsDependencySpringCloudConfigClient { + err := appendJavaConfigServerEnv(&svc, javaConfigServerService.Name) + if err != nil { + return config, err + } + } config.Services[svc.Name] = &svc svcMapping[prj.Path] = svc.Name } @@ -414,45 +597,141 @@ func (i *Initializer) prjConfigFromDetect( }) for _, database := range databases { + var resourceConfig project.ResourceConfig + var databaseName string if database == appdetect.DbRedis { - redis := project.ResourceConfig{ - Type: project.ResourceTypeDbRedis, - Name: "redis", + databaseName = "redis" + } else { + var err error + databaseName, err = getDatabaseName(database, detect, i.console, ctx) + if err != nil { + return config, err + } + } + var authType = internal.AuthTypeUnspecified + if database == appdetect.DbPostgres || database == appdetect.DbMySql { + var err error + authType, err = chooseAuthTypeByPrompt( + database.Display(), + []internal.AuthType{internal.AuthTypeUserAssignedManagedIdentity, internal.AuthTypePassword}, + ctx, + i.console) + if err != nil { + return config, err + } + continueProvision, err := checkPasswordlessConfigurationAndContinueProvision(database, authType, detect, + i.console, ctx) + if err != nil { + return config, err + } + if !continueProvision { + continue } - config.Resources[redis.Name] = &redis - dbNames[database] = redis.Name - continue } - - var dbType project.ResourceType switch database { + case appdetect.DbRedis: + resourceConfig = project.ResourceConfig{ + Type: project.ResourceTypeDbRedis, + Name: "redis", + } case appdetect.DbMongo: - dbType = project.ResourceTypeDbMongo + resourceConfig = project.ResourceConfig{ + Type: project.ResourceTypeDbMongo, + Name: "mongo", + Props: project.MongoDBProps{ + DatabaseName: databaseName, + }, + } + case appdetect.DbCosmos: + cosmosDBProps := project.CosmosDBProps{ + DatabaseName: databaseName, + } + containers, err := detectCosmosSqlDatabaseContainersInDirectory(detect.root) + if err != nil { + return config, err + } + for _, container := range containers { + cosmosDBProps.Containers = append(cosmosDBProps.Containers, project.CosmosDBContainerProps{ + ContainerName: container.ContainerName, + PartitionKeyPaths: container.PartitionKeyPaths, + }) + } + resourceConfig = project.ResourceConfig{ + Type: project.ResourceTypeDbCosmos, + Name: "cosmos", + Props: cosmosDBProps, + } case appdetect.DbPostgres: - dbType = project.ResourceTypeDbPostgres + resourceConfig = project.ResourceConfig{ + Type: project.ResourceTypeDbPostgres, + Name: "postgresql", + Props: project.PostgresProps{ + DatabaseName: databaseName, + AuthType: authType, + }, + } + case appdetect.DbMySql: + resourceConfig = project.ResourceConfig{ + Type: project.ResourceTypeDbMySQL, + Name: "mysql", + Props: project.MySQLProps{ + DatabaseName: databaseName, + AuthType: authType, + }, + } } + config.Resources[resourceConfig.Name] = &resourceConfig + dbNames[database] = resourceConfig.Name + } - db := project.ResourceConfig{ - Type: dbType, + for _, azureDepPair := range detect.AzureDeps { + azureDep := azureDepPair.first + authType, err := chooseAuthTypeByPrompt( + azureDep.ResourceDisplay(), + []internal.AuthType{internal.AuthTypeUserAssignedManagedIdentity, internal.AuthTypeConnectionString}, + ctx, + i.console) + if err != nil { + return config, err } - - for { - dbName, err := promptDbName(i.console, ctx, database) - if err != nil { - return config, err + switch azureDep := azureDep.(type) { + case appdetect.AzureDepServiceBus: + config.Resources["servicebus"] = &project.ResourceConfig{ + Type: project.ResourceTypeMessagingServiceBus, + Props: project.ServiceBusProps{ + Queues: azureDep.Queues, + IsJms: azureDep.IsJms, + AuthType: authType, + }, } - - if dbName == "" { - i.console.Message(ctx, "Database name is required.") - continue + case appdetect.AzureDepEventHubs: + if azureDep.UseKafka() { + config.Resources["kafka"] = &project.ResourceConfig{ + Type: project.ResourceTypeMessagingKafka, + Props: project.KafkaProps{ + Topics: appdetect.DistinctValues(azureDep.EventHubsNamePropertyMap), + AuthType: authType, + SpringBootVersion: azureDep.SpringBootVersion, + }, + } + } else { + config.Resources["eventhubs"] = &project.ResourceConfig{ + Type: project.ResourceTypeMessagingEventHubs, + Props: project.EventHubsProps{ + EventHubNames: appdetect.DistinctValues(azureDep.EventHubsNamePropertyMap), + AuthType: authType, + }, + } + } + case appdetect.AzureDepStorageAccount: + config.Resources["storage"] = &project.ResourceConfig{ + Type: project.ResourceTypeStorage, + Props: project.StorageProps{ + Containers: appdetect.DistinctValues(azureDep.ContainerNamePropertyMap), + AuthType: authType, + }, } - - db.Name = dbName - break } - - config.Resources[db.Name] = &db - dbNames[database] = db.Name } backends := []*project.ResourceConfig{} @@ -468,12 +747,21 @@ func (i *Initializer) prjConfigFromDetect( Port: -1, } - port, err := PromptPort(i.console, ctx, name, svc) + port, err := GetOrPromptPort(i.console, ctx, name, svc) if err != nil { return config, err } props.Port = port + if svc.Metadata.ContainsDependencySpringCloudEurekaClient && + javaEurekaServerService.Name != "" { + resSpec.Uses = append(resSpec.Uses, javaEurekaServerService.Name) + } + if svc.Metadata.ContainsDependencySpringCloudConfigClient && + javaConfigServerService.Name != "" { + resSpec.Uses = append(resSpec.Uses, javaConfigServerService.Name) + } + for _, db := range svc.DatabaseDeps { // filter out databases that were removed if _, ok := detect.Databases[db]; !ok { @@ -483,6 +771,21 @@ func (i *Initializer) prjConfigFromDetect( resSpec.Uses = append(resSpec.Uses, dbNames[db]) } + for _, azureDep := range svc.AzureDeps { + switch azureDep := azureDep.(type) { + case appdetect.AzureDepServiceBus: + resSpec.Uses = append(resSpec.Uses, "servicebus") + case appdetect.AzureDepEventHubs: + if azureDep.UseKafka() { + resSpec.Uses = append(resSpec.Uses, "kafka") + } else { + resSpec.Uses = append(resSpec.Uses, "eventhubs") + } + case appdetect.AzureDepStorageAccount: + resSpec.Uses = append(resSpec.Uses, "storage") + } + } + resSpec.Name = name resSpec.Props = props config.Resources[name] = &resSpec @@ -505,13 +808,130 @@ func (i *Initializer) prjConfigFromDetect( return config, nil } +func checkPasswordlessConfigurationAndContinueProvision(database appdetect.DatabaseDep, authType internal.AuthType, + detect *detectConfirm, console input.Console, ctx context.Context) (bool, error) { + if authType != internal.AuthTypeUserAssignedManagedIdentity { + return true, nil + } + for i, prj := range detect.Services { + if lackedDep := lackedAzureStarterJdbcDependency(prj, database); lackedDep != "" { + message := fmt.Sprintf("\nError!\n"+ + "You selected '%s' as auth type for '%s'.\n"+ + "For this auth type, this dependency is required:\n"+ + "%s\n"+ + "But this dependency is not found in your project:\n"+ + "%s", + internal.AuthTypeUserAssignedManagedIdentity, database, lackedDep, prj.Path) + continueOption, err := console.Select(ctx, input.ConsoleOptions{ + Message: fmt.Sprintf("%s\nSelect an option:", message), + Options: []string{ + "Exit azd and fix problem manually", + fmt.Sprintf("Continue azd and use %s in this project: %s", database.Display(), prj.Path), + fmt.Sprintf("Continue azd and not use %s in this project: %s", database.Display(), prj.Path), + }, + }) + if err != nil { + return false, err + } + + switch continueOption { + case 0: + os.Exit(0) + case 1: + continue + case 2: + // remove related database usage + var result []appdetect.DatabaseDep + for _, db := range prj.DatabaseDeps { + if db != database { + result = append(result, db) + } + } + prj.DatabaseDeps = result + detect.Services[i] = prj + // delete database if no other service used + dbUsed := false + for _, svc := range detect.Services { + for _, db := range svc.DatabaseDeps { + if db == database { + dbUsed = true + break + } + } + if dbUsed { + break + } + } + if !dbUsed { + console.Message(ctx, fmt.Sprintf( + "Deleting database %s due to no service used", database.Display())) + delete(detect.Databases, database) + return false, nil + } + } + } + } + return true, nil +} + +func lackedAzureStarterJdbcDependency(project appdetect.Project, database appdetect.DatabaseDep) string { + if project.Language != appdetect.Java { + return "" + } + + useDatabase := false + for _, db := range project.DatabaseDeps { + if db == database { + useDatabase = true + break + } + } + if !useDatabase { + return "" + } + if database == appdetect.DbMySql && !project.Metadata.ContainsDependencySpringCloudAzureStarterJdbcMysql { + return "\n" + + " com.azure.spring\n" + + " spring-cloud-azure-starter-jdbc-mysql\n" + + " xxx\n" + + "" + } + if database == appdetect.DbPostgres && !project.Metadata.ContainsDependencySpringCloudAzureStarterJdbcPostgresql { + return "\n" + + " com.azure.spring\n" + + " spring-cloud-azure-starter-jdbc-postgresql\n" + + " xxx\n" + + "" + } + return "" +} + +func chooseAuthTypeByPrompt( + name string, + authOptions []internal.AuthType, + ctx context.Context, + console input.Console) (internal.AuthType, error) { + var options []string + for _, option := range authOptions { + options = append(options, internal.GetAuthTypeDescription(option)) + } + selection, err := console.Select(ctx, input.ConsoleOptions{ + Message: "Choose auth type for " + name + ":", + Options: options, + }) + if err != nil { + return internal.AuthTypeUnspecified, err + } + return authOptions[selection], nil +} + // ServiceFromDetect creates a ServiceConfig from an appdetect project. func ServiceFromDetect( root string, svcName string, prj appdetect.Project) (project.ServiceConfig, error) { svc := project.ServiceConfig{ - Name: svcName, + Name: names.LabelName(svcName), } rel, err := filepath.Rel(root, prj.Path) if err != nil { @@ -537,6 +957,10 @@ func ServiceFromDetect( svc.Language = language + if parentPath, ok := prj.Options[appdetect.JavaProjectOptionParentPomDir].(string); ok && parentPath != "" { + svc.ParentPath = parentPath + } + if prj.Docker != nil { relDocker, err := filepath.Rel(prj.Path, prj.Docker.Path) if err != nil { @@ -572,9 +996,169 @@ func ServiceFromDetect( // angular uses dist/ svc.OutputPath = "dist/" + filepath.Base(rel) break loop + case appdetect.SpringFrontend: + svc.OutputPath = "" + break loop } } } return svc, nil } + +func processSpringCloudAzureDepByPrompt(console input.Console, ctx context.Context, project *appdetect.Project) error { + continueOption, err := console.Select(ctx, input.ConsoleOptions{ + Message: "Detected Kafka dependency but no spring-cloud-azure-starter found. Select an option", + Options: []string{ + "Exit then I will manually add this dependency", + "Continue without this dependency, and provision Azure Event Hubs for Kafka", + "Continue without this dependency, and not provision Azure Event Hubs for Kafka", + }, + }) + if err != nil { + return err + } + + switch continueOption { + case 0: + console.Message(ctx, "you have to manually add dependency com.azure.spring:spring-cloud-azure-starter. "+ + "And use right version according to this page: "+ + "https://github.com/Azure/azure-sdk-for-java/wiki/Spring-Versions-Mapping") + os.Exit(0) + case 1: + return nil + case 2: + // remove Kafka Azure Dep + var result []appdetect.AzureDep + for _, dep := range project.AzureDeps { + if eventHubs, ok := dep.(appdetect.AzureDepEventHubs); !(ok && eventHubs.UseKafka()) { + result = append(result, dep) + } + } + project.AzureDeps = result + return nil + } + return nil +} + +func promptSpringBootVersion(console input.Console, ctx context.Context) (string, error) { + selection, err := console.Select(ctx, input.ConsoleOptions{ + Message: "No spring boot version detected, what is your spring boot version?", + Options: []string{ + "Spring Boot 2.x", + "Spring Boot 3.x", + }, + }) + if err != nil { + return "", err + } + + switch selection { + case 0: + return "2.x", nil + case 1: + return "3.x", nil + default: + return appdetect.UnknownSpringBootVersion, nil + } +} + +func promptMissingEventHubsNameOrExit(console input.Console, ctx context.Context, eventHubs *appdetect.AzureDepEventHubs) { + for _, dependencyType := range eventHubs.DependencyTypes { + switch dependencyType { + case appdetect.SpringIntegrationEventHubs, appdetect.SpringMessagingEventHubs, appdetect.SpringKafka: + eventHubsNames, err := promptEventHubsNames(console, ctx) + if err != nil { + console.Message(ctx, fmt.Sprintf("Error happened when prompt eventhubs name: %s.", err)) + os.Exit(-1) + } + for i, eventHubsName := range eventHubsNames { + propertyName := string(dependencyType) + strconv.Itoa(i) + eventHubs.EventHubsNamePropertyMap[propertyName] = eventHubsName + } + case appdetect.SpringCloudStreamEventHubs, appdetect.SpringCloudStreamKafka: + promptMissingPropertyAndExit(console, ctx, "spring.cloud.stream.bindings..destination") + os.Exit(0) + case appdetect.SpringCloudEventHubsStarter: + promptMissingPropertyAndExit(console, ctx, "spring.cloud.azure.eventhubs.event-hub-name or "+ + "spring.cloud.azure.eventhubs.[producer|consumer|processor].event-hub-name") + os.Exit(0) + } + } +} + +func promptMissingPropertyAndExit(console input.Console, ctx context.Context, key string) { + console.Message(ctx, fmt.Sprintf("No value was provided for %s. Please update the configuration file "+ + "(like application.properties or application.yaml) with a valid value.", key)) + os.Exit(0) +} + +// todo: delete this after we implement to detect eventhubs names from code +func promptEventHubsNames(console input.Console, ctx context.Context) ([]string, error) { + for { + eventHubsNamesInput, err := console.Prompt(ctx, input.ConsoleOptions{ + Message: "Input the names of Azure Event Hubs (not the namespace name), " + + "if you have multiple ones, separate with commas:", + Help: "Hint: Azure Event Hubs Name, not the namespace name", + }) + if err != nil { + return []string{}, err + } + eventHubsNames := strings.Split(eventHubsNamesInput, ",") + allValidEventHubsNames := true + for i, eventHubsName := range eventHubsNames { + eventHubsNames[i] = strings.TrimSpace(eventHubsName) + if !isValidEventhubsName(eventHubsNames[i]) { + console.Message(ctx, "Invalid eventhubs name. it should contain letters, numbers, periods (.), "+ + "hyphens (-), underscores (_), must begin and end with a letter or number. Please choose another name:") + allValidEventHubsNames = false + break + } + } + if allValidEventHubsNames { + return eventHubsNames, nil + } + } +} + +// contain letters, numbers, periods (.), hyphens (-), and underscores (_) +// must begin and end with a letter or number +var eventHubsNameRegex = regexp.MustCompile(`^[a-zA-Z0-9][a-zA-Z0-9._-]*[a-zA-Z0-9]$`) + +func isValidEventhubsName(name string) bool { + // up to 256 characters + if len(name) == 0 || len(name) > 256 { + return false + } + return eventHubsNameRegex.MatchString(name) +} + +func appendJavaEurekaServerEnv(svc *project.ServiceConfig, eurekaServerName string) error { + if eurekaServerName == "" { + // eureka server not found, maybe removed when detect confirm + return nil + } + if svc.Env == nil { + svc.Env = map[string]string{} + } + clientEnvs := binding.GetServiceBindingEnvsForEurekaServer(eurekaServerName) + for key, value := range clientEnvs { + svc.Env[key] = value + } + return nil +} + +func appendJavaConfigServerEnv(svc *project.ServiceConfig, configServerName string) error { + if configServerName == "" { + // config server not found, maybe removed when detect confirm + return nil + } + if svc.Env == nil { + svc.Env = map[string]string{} + } + clientEnvs := binding.GetServiceBindingEnvsForConfigServer(configServerName) + for key, value := range clientEnvs { + svc.Env[key] = value + } + return nil +} diff --git a/cli/azd/internal/repository/app_init_test.go b/cli/azd/internal/repository/app_init_test.go index 6a0b7a7dac6..86ad0f889dd 100644 --- a/cli/azd/internal/repository/app_init_test.go +++ b/cli/azd/internal/repository/app_init_test.go @@ -10,6 +10,7 @@ import ( "github.com/azure/azure-dev/cli/azd/internal" "github.com/azure/azure-dev/cli/azd/internal/appdetect" + "github.com/azure/azure-dev/cli/azd/internal/scaffold" "github.com/azure/azure-dev/cli/azd/pkg/input" "github.com/azure/azure-dev/cli/azd/pkg/project" "github.com/stretchr/testify/require" @@ -97,13 +98,7 @@ func TestInitializer_prjConfigFromDetect(t *testing.T) { }, }, }, - interactions: []string{ - // prompt for port -- hit multiple validation cases - "notAnInteger", - "-2", - "65536", - "1234", - }, + interactions: []string{}, want: project.ProjectConfig{ Services: map[string]*project.ServiceConfig{ "dotnet": { @@ -117,10 +112,618 @@ func TestInitializer_prjConfigFromDetect(t *testing.T) { }, Resources: map[string]*project.ResourceConfig{ "dotnet": { + Type: project.ResourceTypeHostContainerApp, + Name: "dotnet", + Props: project.ContainerAppProps{}, + }, + }, + }, + }, + { + name: "api with storage umi", + detect: detectConfirm{ + Services: []appdetect.Project{ + { + Language: appdetect.Java, + Path: "java", + AzureDeps: []appdetect.AzureDep{ + appdetect.AzureDepStorageAccount{ + ContainerNamePropertyMap: map[string]string{ + "spring.cloud.azure.container": "container1", + }, + }, + }, + }, + }, + AzureDeps: map[string]Pair{ + appdetect.AzureDepStorageAccount{}.ResourceDisplay(): { + appdetect.AzureDepStorageAccount{ + ContainerNamePropertyMap: map[string]string{ + "spring.cloud.azure.container": "container1", + }, + }, EntryKindDetected, + }, + }, + }, + interactions: []string{ + // prompt for auth type + "User assigned managed identity", + }, + want: project.ProjectConfig{ + Services: map[string]*project.ServiceConfig{ + "java": { + Language: project.ServiceLanguageJava, + Host: project.ContainerAppTarget, + RelativePath: "java", + }, + }, + Resources: map[string]*project.ResourceConfig{ + "java": { Type: project.ResourceTypeHostContainerApp, - Name: "dotnet", + Name: "java", + Props: project.ContainerAppProps{ + Port: 8080, + }, + Uses: []string{"storage"}, + }, + "storage": { + Type: project.ResourceTypeStorage, + Props: project.StorageProps{ + Containers: []string{"container1"}, + AuthType: internal.AuthTypeUserAssignedManagedIdentity, + }, + }, + }, + }, + }, + { + name: "api with storage connection string", + detect: detectConfirm{ + Services: []appdetect.Project{ + { + Language: appdetect.Java, + Path: "java", + AzureDeps: []appdetect.AzureDep{ + appdetect.AzureDepStorageAccount{ + ContainerNamePropertyMap: map[string]string{ + "spring.cloud.azure.container": "container1", + }, + }, + }, + }, + }, + AzureDeps: map[string]Pair{ + appdetect.AzureDepStorageAccount{}.ResourceDisplay(): { + appdetect.AzureDepStorageAccount{ + ContainerNamePropertyMap: map[string]string{ + "spring.cloud.azure.container": "container1", + }, + }, EntryKindDetected, + }, + }, + }, + interactions: []string{ + // prompt for auth type + "Connection string", + }, + want: project.ProjectConfig{ + Services: map[string]*project.ServiceConfig{ + "java": { + Language: project.ServiceLanguageJava, + Host: project.ContainerAppTarget, + RelativePath: "java", + }, + }, + Resources: map[string]*project.ResourceConfig{ + "java": { + Type: project.ResourceTypeHostContainerApp, + Name: "java", + Props: project.ContainerAppProps{ + Port: 8080, + }, + Uses: []string{"storage"}, + }, + "storage": { + Type: project.ResourceTypeStorage, + Props: project.StorageProps{ + Containers: []string{"container1"}, + AuthType: internal.AuthTypeConnectionString, + }, + }, + }, + }, + }, + { + name: "api with service bus umi", + detect: detectConfirm{ + Services: []appdetect.Project{ + { + Language: appdetect.Java, + Path: "java", + AzureDeps: []appdetect.AzureDep{ + appdetect.AzureDepServiceBus{ + Queues: []string{"queue1"}, + IsJms: true, + }, + }, + }, + }, + AzureDeps: map[string]Pair{ + appdetect.AzureDepServiceBus{}.ResourceDisplay(): { + appdetect.AzureDepServiceBus{ + Queues: []string{"queue1"}, + IsJms: true, + }, EntryKindDetected, + }, + }, + }, + interactions: []string{ + // prompt for auth type + "User assigned managed identity", + }, + want: project.ProjectConfig{ + Services: map[string]*project.ServiceConfig{ + "java": { + Language: project.ServiceLanguageJava, + Host: project.ContainerAppTarget, + RelativePath: "java", + }, + }, + Resources: map[string]*project.ResourceConfig{ + "java": { + Type: project.ResourceTypeHostContainerApp, + Name: "java", + Props: project.ContainerAppProps{ + Port: 8080, + }, + Uses: []string{"servicebus"}, + }, + "servicebus": { + Type: project.ResourceTypeMessagingServiceBus, + Props: project.ServiceBusProps{ + Queues: []string{"queue1"}, + IsJms: true, + AuthType: internal.AuthTypeUserAssignedManagedIdentity, + }, + }, + }, + }, + }, + { + name: "api with service bus connection string", + detect: detectConfirm{ + Services: []appdetect.Project{ + { + Language: appdetect.Java, + Path: "java", + AzureDeps: []appdetect.AzureDep{ + appdetect.AzureDepServiceBus{ + Queues: []string{"queue1"}, + IsJms: true, + }, + }, + }, + }, + AzureDeps: map[string]Pair{ + appdetect.AzureDepServiceBus{}.ResourceDisplay(): { + appdetect.AzureDepServiceBus{ + Queues: []string{"queue1"}, + IsJms: true, + }, EntryKindDetected, + }, + }, + }, + interactions: []string{ + // prompt for auth type + "Connection string", + }, + want: project.ProjectConfig{ + Services: map[string]*project.ServiceConfig{ + "java": { + Language: project.ServiceLanguageJava, + Host: project.ContainerAppTarget, + RelativePath: "java", + }, + }, + Resources: map[string]*project.ResourceConfig{ + "java": { + Type: project.ResourceTypeHostContainerApp, + Name: "java", + Props: project.ContainerAppProps{ + Port: 8080, + }, + Uses: []string{"servicebus"}, + }, + "servicebus": { + Type: project.ResourceTypeMessagingServiceBus, + Props: project.ServiceBusProps{ + Queues: []string{"queue1"}, + IsJms: true, + AuthType: internal.AuthTypeConnectionString, + }, + }, + }, + }, + }, + { + name: "api with event hubs umi", + detect: detectConfirm{ + Services: []appdetect.Project{ + { + Language: appdetect.Java, + Path: "java", + AzureDeps: []appdetect.AzureDep{ + appdetect.AzureDepEventHubs{ + EventHubsNamePropertyMap: map[string]string{ + "spring.cloud.azure.eventhubs": "eventhub1", + }, + }, + }, + }, + }, + AzureDeps: map[string]Pair{ + appdetect.AzureDepEventHubs{}.ResourceDisplay(): { + appdetect.AzureDepEventHubs{ + EventHubsNamePropertyMap: map[string]string{ + "spring.cloud.azure.eventhubs": "eventhub1", + }, + }, EntryKindDetected, + }, + }, + }, + interactions: []string{ + // prompt for auth type + "User assigned managed identity", + }, + want: project.ProjectConfig{ + Services: map[string]*project.ServiceConfig{ + "java": { + Language: project.ServiceLanguageJava, + Host: project.ContainerAppTarget, + RelativePath: "java", + }, + }, + Resources: map[string]*project.ResourceConfig{ + "java": { + Type: project.ResourceTypeHostContainerApp, + Name: "java", + Props: project.ContainerAppProps{ + Port: 8080, + }, + Uses: []string{"eventhubs"}, + }, + "eventhubs": { + Type: project.ResourceTypeMessagingEventHubs, + Props: project.EventHubsProps{ + EventHubNames: []string{"eventhub1"}, + AuthType: internal.AuthTypeUserAssignedManagedIdentity, + }, + }, + }, + }, + }, + { + name: "api with event hubs connection string", + detect: detectConfirm{ + Services: []appdetect.Project{ + { + Language: appdetect.Java, + Path: "java", + AzureDeps: []appdetect.AzureDep{ + appdetect.AzureDepEventHubs{ + EventHubsNamePropertyMap: map[string]string{ + "spring.cloud.azure.eventhubs": "eventhub1", + }, + }, + }, + }, + }, + AzureDeps: map[string]Pair{ + appdetect.AzureDepEventHubs{}.ResourceDisplay(): { + appdetect.AzureDepEventHubs{ + EventHubsNamePropertyMap: map[string]string{ + "spring.cloud.azure.eventhubs": "eventhub1", + }, + }, EntryKindDetected, + }, + }, + }, + interactions: []string{ + // prompt for auth type + "Connection string", + }, + want: project.ProjectConfig{ + Services: map[string]*project.ServiceConfig{ + "java": { + Language: project.ServiceLanguageJava, + Host: project.ContainerAppTarget, + RelativePath: "java", + }, + }, + Resources: map[string]*project.ResourceConfig{ + "java": { + Type: project.ResourceTypeHostContainerApp, + Name: "java", + Props: project.ContainerAppProps{ + Port: 8080, + }, + Uses: []string{"eventhubs"}, + }, + "eventhubs": { + Type: project.ResourceTypeMessagingEventHubs, + Props: project.EventHubsProps{ + EventHubNames: []string{"eventhub1"}, + AuthType: internal.AuthTypeConnectionString, + }, + }, + }, + }, + }, + { + name: "api with event hubs kafka umi", + detect: detectConfirm{ + Services: []appdetect.Project{ + { + Language: appdetect.Java, + Path: "java", + AzureDeps: []appdetect.AzureDep{ + appdetect.AzureDepEventHubs{ + EventHubsNamePropertyMap: map[string]string{ + "spring.kafka.topic": "topic1", + }, + DependencyTypes: []appdetect.DependencyType{appdetect.SpringKafka}, + SpringBootVersion: "3.4.0", + }, + }, + }, + }, + AzureDeps: map[string]Pair{ + appdetect.AzureDepEventHubs{}.ResourceDisplay(): { + appdetect.AzureDepEventHubs{ + EventHubsNamePropertyMap: map[string]string{ + "spring.kafka.topic": "topic1", + }, + DependencyTypes: []appdetect.DependencyType{appdetect.SpringKafka}, + SpringBootVersion: "3.4.0", + }, EntryKindDetected, + }, + }, + }, + interactions: []string{ + // prompt for auth type + "User assigned managed identity", + }, + want: project.ProjectConfig{ + Services: map[string]*project.ServiceConfig{ + "java": { + Language: project.ServiceLanguageJava, + Host: project.ContainerAppTarget, + RelativePath: "java", + }, + }, + Resources: map[string]*project.ResourceConfig{ + "java": { + Type: project.ResourceTypeHostContainerApp, + Name: "java", + Props: project.ContainerAppProps{ + Port: 8080, + }, + Uses: []string{"kafka"}, + }, + "kafka": { + Type: project.ResourceTypeMessagingKafka, + Props: project.KafkaProps{ + Topics: []string{"topic1"}, + AuthType: internal.AuthTypeUserAssignedManagedIdentity, + SpringBootVersion: "3.4.0", + }, + }, + }, + }, + }, + { + name: "api with event hubs kafka connection string", + detect: detectConfirm{ + Services: []appdetect.Project{ + { + Language: appdetect.Java, + Path: "java", + AzureDeps: []appdetect.AzureDep{ + appdetect.AzureDepEventHubs{ + EventHubsNamePropertyMap: map[string]string{ + "spring.kafka.topic": "topic1", + }, + DependencyTypes: []appdetect.DependencyType{appdetect.SpringKafka}, + SpringBootVersion: "3.4.0", + }, + }, + }, + }, + AzureDeps: map[string]Pair{ + appdetect.AzureDepEventHubs{}.ResourceDisplay(): { + appdetect.AzureDepEventHubs{ + EventHubsNamePropertyMap: map[string]string{ + "spring.kafka.topic": "topic1", + }, + DependencyTypes: []appdetect.DependencyType{appdetect.SpringKafka}, + SpringBootVersion: "3.4.0", + }, EntryKindDetected, + }, + }, + }, + interactions: []string{ + // prompt for auth type + "Connection string", + }, + want: project.ProjectConfig{ + Services: map[string]*project.ServiceConfig{ + "java": { + Language: project.ServiceLanguageJava, + Host: project.ContainerAppTarget, + RelativePath: "java", + }, + }, + Resources: map[string]*project.ResourceConfig{ + "java": { + Type: project.ResourceTypeHostContainerApp, + Name: "java", Props: project.ContainerAppProps{ - Port: 1234, + Port: 8080, + }, + Uses: []string{"kafka"}, + }, + "kafka": { + Type: project.ResourceTypeMessagingKafka, + Props: project.KafkaProps{ + Topics: []string{"topic1"}, + AuthType: internal.AuthTypeConnectionString, + SpringBootVersion: "3.4.0", + }, + }, + }, + }, + }, + { + name: "api with cosmos db", + detect: detectConfirm{ + Services: []appdetect.Project{ + { + Language: appdetect.Java, + Path: "java", + DatabaseDeps: []appdetect.DatabaseDep{ + appdetect.DbCosmos, + }, + }, + }, + Databases: map[appdetect.DatabaseDep]EntryKind{ + appdetect.DbCosmos: EntryKindDetected, + }, + }, + interactions: []string{ + "cosmosdbname", + }, + want: project.ProjectConfig{ + Services: map[string]*project.ServiceConfig{ + "java": { + Language: project.ServiceLanguageJava, + Host: project.ContainerAppTarget, + RelativePath: "java", + }, + }, + Resources: map[string]*project.ResourceConfig{ + "java": { + Type: project.ResourceTypeHostContainerApp, + Name: "java", + Props: project.ContainerAppProps{ + Port: 8080, + }, + Uses: []string{"cosmos"}, + }, + "cosmos": { + Name: "cosmos", + Type: project.ResourceTypeDbCosmos, + Props: project.CosmosDBProps{ + DatabaseName: "cosmosdbname", + }, + }, + }, + }, + }, + { + name: "api with postgresql", + detect: detectConfirm{ + Services: []appdetect.Project{ + { + Language: appdetect.Java, + Path: "java", + DatabaseDeps: []appdetect.DatabaseDep{ + appdetect.DbPostgres, + }, + }, + }, + Databases: map[appdetect.DatabaseDep]EntryKind{ + appdetect.DbPostgres: EntryKindDetected, + }, + }, + interactions: []string{ + "postgresql-db", + // prompt for auth type + // todo cannot use umi here for it will check the source code + "Username and password", + }, + want: project.ProjectConfig{ + Services: map[string]*project.ServiceConfig{ + "java": { + Language: project.ServiceLanguageJava, + Host: project.ContainerAppTarget, + RelativePath: "java", + }, + }, + Resources: map[string]*project.ResourceConfig{ + "java": { + Type: project.ResourceTypeHostContainerApp, + Name: "java", + Props: project.ContainerAppProps{ + Port: 8080, + }, + Uses: []string{"postgresql"}, + }, + "postgresql": { + Type: project.ResourceTypeDbPostgres, + Name: "postgresql", + Props: project.PostgresProps{ + DatabaseName: "postgresql-db", + AuthType: internal.AuthTypePassword, + }, + }, + }, + }, + }, + { + name: "api with mysql", + detect: detectConfirm{ + Services: []appdetect.Project{ + { + Language: appdetect.Java, + Path: "java", + DatabaseDeps: []appdetect.DatabaseDep{ + appdetect.DbMySql, + }, + }, + }, + Databases: map[appdetect.DatabaseDep]EntryKind{ + appdetect.DbMySql: EntryKindDetected, + }, + }, + interactions: []string{ + "mysql-db", + // prompt for auth type + // todo cannot use umi here for it will check the source code + "Username and password", + }, + want: project.ProjectConfig{ + Services: map[string]*project.ServiceConfig{ + "java": { + Language: project.ServiceLanguageJava, + Host: project.ContainerAppTarget, + RelativePath: "java", + }, + }, + Resources: map[string]*project.ResourceConfig{ + "java": { + Type: project.ResourceTypeHostContainerApp, + Name: "java", + Props: project.ContainerAppProps{ + Port: 8080, + }, + Uses: []string{"mysql"}, + }, + "mysql": { + Type: project.ResourceTypeDbMySQL, + Name: "mysql", + Props: project.MySQLProps{ + DatabaseName: "mysql-db", + AuthType: internal.AuthTypePassword, }, }, }, @@ -216,6 +819,7 @@ func TestInitializer_prjConfigFromDetect(t *testing.T) { "my$special$db", "n", "postgres", // fill in db name + "Username and password", }, want: project.ProjectConfig{ Services: map[string]*project.ServiceConfig{ @@ -236,18 +840,25 @@ func TestInitializer_prjConfigFromDetect(t *testing.T) { Type: project.ResourceTypeDbRedis, Name: "redis", }, - "mongodb": { + "mongo": { Type: project.ResourceTypeDbMongo, - Name: "mongodb", + Name: "mongo", + Props: project.MongoDBProps{ + DatabaseName: "mongodb", + }, }, - "postgres": { + "postgresql": { Type: project.ResourceTypeDbPostgres, - Name: "postgres", + Name: "postgresql", + Props: project.PostgresProps{ + AuthType: internal.AuthTypePassword, + DatabaseName: "postgres", + }, }, "py": { Type: project.ResourceTypeHostContainerApp, Name: "py", - Uses: []string{"postgres", "mongodb", "redis"}, + Uses: []string{"postgresql", "mongo", "redis"}, Props: project.ContainerAppProps{ Port: 80, }, @@ -304,10 +915,13 @@ func TestInitializer_prjConfigFromDetect(t *testing.T) { } } + tt.detect.root = dir + spec, err := i.prjConfigFromDetect( context.Background(), dir, - tt.detect, + &tt.detect, + &scaffold.InfraSpec{}, true) // Print extra newline to avoid mangling `go test -v` final test result output while waiting for final stdin, diff --git a/cli/azd/internal/repository/detect_confirm.go b/cli/azd/internal/repository/detect_confirm.go index e7191d271ae..6885372b1db 100644 --- a/cli/azd/internal/repository/detect_confirm.go +++ b/cli/azd/internal/repository/detect_confirm.go @@ -42,11 +42,17 @@ const ( EntryKindModified EntryKind = "modified" ) +type Pair struct { + first appdetect.AzureDep + second EntryKind +} + // detectConfirm handles prompting for confirming the detected services and databases type detectConfirm struct { // detected services and databases Services []appdetect.Project Databases map[appdetect.DatabaseDep]EntryKind + AzureDeps map[string]Pair // the root directory of the project root string @@ -59,6 +65,7 @@ type detectConfirm struct { // Init initializes state from initial detection output func (d *detectConfirm) Init(projects []appdetect.Project, root string) { d.Databases = make(map[appdetect.DatabaseDep]EntryKind) + d.AzureDeps = make(map[string]Pair) d.Services = make([]appdetect.Project, 0, len(projects)) d.modified = false d.root = root @@ -73,16 +80,24 @@ func (d *detectConfirm) Init(projects []appdetect.Project, root string) { d.Databases[dbType] = EntryKindDetected } } + + for _, azureDep := range project.AzureDeps { + if _, supported := azureDepMap[azureDep.ResourceDisplay()]; supported { + d.AzureDeps[azureDep.ResourceDisplay()] = Pair{azureDep, EntryKindDetected} + } + } } d.captureUsage( fields.AppInitDetectedDatabase, - fields.AppInitDetectedServices) + fields.AppInitDetectedServices, + fields.AppInitDetectedAzureDeps) } func (d *detectConfirm) captureUsage( databases attribute.Key, - services attribute.Key) { + services attribute.Key, + azureDeps attribute.Key) { names := make([]string, 0, len(d.Services)) for _, svc := range d.Services { names = append(names, string(svc.Language)) @@ -93,9 +108,16 @@ func (d *detectConfirm) captureUsage( dbNames = append(dbNames, string(db)) } + azureDepNames := make([]string, 0, len(d.AzureDeps)) + + for _, pair := range d.AzureDeps { + azureDepNames = append(azureDepNames, pair.first.ResourceDisplay()) + } + tracing.SetUsageAttributes( databases.StringSlice(dbNames), services.StringSlice(names), + azureDeps.StringSlice(azureDepNames), ) } @@ -146,7 +168,8 @@ func (d *detectConfirm) Confirm(ctx context.Context) error { case 0: d.captureUsage( fields.AppInitConfirmedDatabases, - fields.AppInitConfirmedServices) + fields.AppInitConfirmedServices, + fields.AppInitDetectedAzureDeps) return nil case 1: if err := d.remove(ctx); err != nil { @@ -203,14 +226,21 @@ func (d *detectConfirm) render(ctx context.Context) error { } } + if len(d.Databases) > 0 { + d.console.Message(ctx, "\n"+output.WithBold("Detected databases:")+"\n") + } for db, entry := range d.Databases { switch db { case appdetect.DbPostgres: recommendedServices = append(recommendedServices, "Azure Database for PostgreSQL flexible server") + case appdetect.DbMySql: + recommendedServices = append(recommendedServices, "Azure Database for MySQL flexible server") + case appdetect.DbCosmos: + recommendedServices = append(recommendedServices, "Azure Cosmos DB for NoSQL") case appdetect.DbMongo: recommendedServices = append(recommendedServices, "Azure CosmosDB API for MongoDB") case appdetect.DbRedis: - recommendedServices = append(recommendedServices, "Azure Container Apps Redis add-on") + recommendedServices = append(recommendedServices, "Azure Cache for Redis") } status := "" @@ -224,6 +254,23 @@ func (d *detectConfirm) render(ctx context.Context) error { d.console.Message(ctx, "") } + if len(d.AzureDeps) > 0 { + d.console.Message(ctx, "\n"+output.WithBold("Detected Azure dependencies:")+"\n") + } + for azureDep, entry := range d.AzureDeps { + recommendedServices = append(recommendedServices, azureDep) + + status := "" + if entry.second == EntryKindModified { + status = " " + output.WithSuccessFormat("[Updated]") + } else if entry.second == EntryKindManual { + status = " " + output.WithSuccessFormat("[Added]") + } + + d.console.Message(ctx, " "+color.BlueString(azureDep)+status) + d.console.Message(ctx, "") + } + displayedServices := make([]string, 0, len(recommendedServices)) for _, svc := range recommendedServices { displayedServices = append(displayedServices, color.MagentaString(svc)) diff --git a/cli/azd/internal/repository/detect_confirm_test.go b/cli/azd/internal/repository/detect_confirm_test.go index 6a0a43be6ad..a46a6e8c054 100644 --- a/cli/azd/internal/repository/detect_confirm_test.go +++ b/cli/azd/internal/repository/detect_confirm_test.go @@ -75,6 +75,196 @@ func Test_detectConfirm_confirm(t *testing.T) { }, }, }, + { + name: "confirm single with storage resource", + detection: []appdetect.Project{ + { + Language: appdetect.Java, + Path: javaDir, + AzureDeps: []appdetect.AzureDep{ + appdetect.AzureDepStorageAccount{ + ContainerNamePropertyMap: map[string]string{ + "spring.cloud.azure.container": "container1", + }, + }, + }, + }, + }, + interactions: []string{ + "Confirm and continue initializing my app", + }, + want: []appdetect.Project{ + { + Language: appdetect.Java, + Path: javaDir, + AzureDeps: []appdetect.AzureDep{ + appdetect.AzureDepStorageAccount{ + ContainerNamePropertyMap: map[string]string{ + "spring.cloud.azure.container": "container1", + }, + }, + }, + }, + }, + }, + { + name: "confirm single with resources service bus", + detection: []appdetect.Project{ + { + Language: appdetect.Java, + Path: javaDir, + AzureDeps: []appdetect.AzureDep{ + appdetect.AzureDepServiceBus{ + Queues: []string{"queue1"}, + IsJms: true, + }, + }, + }, + }, + interactions: []string{ + "Confirm and continue initializing my app", + }, + want: []appdetect.Project{ + { + Language: appdetect.Java, + Path: javaDir, + AzureDeps: []appdetect.AzureDep{ + appdetect.AzureDepServiceBus{ + Queues: []string{"queue1"}, + IsJms: true, + }, + }, + }, + }, + }, + { + name: "confirm single with event hubs resource", + detection: []appdetect.Project{ + { + Language: appdetect.Java, + Path: javaDir, + AzureDeps: []appdetect.AzureDep{ + appdetect.AzureDepEventHubs{ + EventHubsNamePropertyMap: map[string]string{ + "spring.cloud.azure.eventhubs": "eventhub1", + }, + }, + }, + }, + }, + interactions: []string{ + "Confirm and continue initializing my app", + }, + want: []appdetect.Project{ + { + Language: appdetect.Java, + Path: javaDir, + AzureDeps: []appdetect.AzureDep{ + appdetect.AzureDepEventHubs{ + EventHubsNamePropertyMap: map[string]string{ + "spring.cloud.azure.eventhubs": "eventhub1", + }, + }, + }, + }, + }, + }, + { + name: "confirm single with cosmos db resource", + detection: []appdetect.Project{ + { + Language: appdetect.Java, + Path: javaDir, + DatabaseDeps: []appdetect.DatabaseDep{ + appdetect.DbCosmos, + }, + }, + }, + interactions: []string{ + "Confirm and continue initializing my app", + }, + want: []appdetect.Project{ + { + Language: appdetect.Java, + Path: javaDir, + DatabaseDeps: []appdetect.DatabaseDep{ + appdetect.DbCosmos, + }, + }, + }, + }, + { + name: "confirm single with postgresql resource", + detection: []appdetect.Project{ + { + Language: appdetect.Java, + Path: javaDir, + DatabaseDeps: []appdetect.DatabaseDep{ + appdetect.DbPostgres, + }, + }, + }, + interactions: []string{ + "Confirm and continue initializing my app", + }, + want: []appdetect.Project{ + { + Language: appdetect.Java, + Path: javaDir, + DatabaseDeps: []appdetect.DatabaseDep{ + appdetect.DbPostgres, + }, + }, + }, + }, + { + name: "confirm single with mysql resource", + detection: []appdetect.Project{ + { + Language: appdetect.Java, + Path: javaDir, + DatabaseDeps: []appdetect.DatabaseDep{ + appdetect.DbMySql, + }, + }, + }, + interactions: []string{ + "Confirm and continue initializing my app", + }, + want: []appdetect.Project{ + { + Language: appdetect.Java, + Path: javaDir, + DatabaseDeps: []appdetect.DatabaseDep{ + appdetect.DbMySql, + }, + }, + }, + }, + { + name: "confirm single with cosmos db mongo resource", + detection: []appdetect.Project{ + { + Language: appdetect.Java, + Path: javaDir, + DatabaseDeps: []appdetect.DatabaseDep{ + appdetect.DbMongo, + }, + }, + }, + interactions: []string{ + "Confirm and continue initializing my app", + }, + want: []appdetect.Project{ + { + Language: appdetect.Java, + Path: javaDir, + DatabaseDeps: []appdetect.DatabaseDep{ + appdetect.DbMongo, + }, + }, + }, + }, { name: "add a language", detection: []appdetect.Project{ diff --git a/cli/azd/internal/repository/infra_confirm.go b/cli/azd/internal/repository/infra_confirm.go index ee5a50f716b..9a9b2ffd180 100644 --- a/cli/azd/internal/repository/infra_confirm.go +++ b/cli/azd/internal/repository/infra_confirm.go @@ -3,58 +3,114 @@ package repository import ( "context" "fmt" + "os" "path/filepath" "regexp" "strconv" - "strings" + "github.com/azure/azure-dev/cli/azd/internal" "github.com/azure/azure-dev/cli/azd/internal/appdetect" + "github.com/azure/azure-dev/cli/azd/internal/binding" "github.com/azure/azure-dev/cli/azd/internal/names" "github.com/azure/azure-dev/cli/azd/internal/scaffold" "github.com/azure/azure-dev/cli/azd/pkg/input" - "github.com/azure/azure-dev/cli/azd/pkg/output/ux" ) -// A regex that matches against "likely" well-formed database names -var wellFormedDbNameRegex = regexp.MustCompile(`^[a-zA-Z\-_0-9]*$`) - // infraSpecFromDetect creates an InfraSpec from the results of app detection confirmation, // prompting for additional inputs if necessary. func (i *Initializer) infraSpecFromDetect( ctx context.Context, - detect detectConfirm) (scaffold.InfraSpec, error) { + detect *detectConfirm) (scaffold.InfraSpec, error) { spec := scaffold.InfraSpec{} for database := range detect.Databases { - if database == appdetect.DbRedis { + switch database { + case appdetect.DbRedis: spec.DbRedis = &scaffold.DatabaseRedis{} - // no further configuration needed for redis - continue - } - - dbPrompt: - for { - dbName, err := promptDbName(i.console, ctx, database) + case appdetect.DbMongo: + dbName, err := getDatabaseName(database, detect, i.console, ctx) if err != nil { return scaffold.InfraSpec{}, err } - - switch database { - case appdetect.DbMongo: - spec.DbCosmosMongo = &scaffold.DatabaseCosmosMongo{ - DatabaseName: dbName, - } - break dbPrompt - case appdetect.DbPostgres: - if dbName == "" { - i.console.Message(ctx, "Database name is required.") - continue - } - - spec.DbPostgres = &scaffold.DatabasePostgres{ - DatabaseName: dbName, - } + spec.DbCosmosMongo = &scaffold.DatabaseCosmosMongo{ + DatabaseName: dbName, + } + case appdetect.DbPostgres: + dbName, err := getDatabaseName(database, detect, i.console, ctx) + if err != nil { + return scaffold.InfraSpec{}, err + } + authType, err := chooseAuthTypeByPrompt( + database.Display(), + []internal.AuthType{internal.AuthTypeUserAssignedManagedIdentity, internal.AuthTypePassword}, + ctx, + i.console) + if err != nil { + return scaffold.InfraSpec{}, err + } + continueProvision, err := checkPasswordlessConfigurationAndContinueProvision(database, + authType, detect, i.console, ctx) + if err != nil { + return scaffold.InfraSpec{}, err + } + if !continueProvision { + continue + } + spec.DbPostgres = &scaffold.DatabasePostgres{ + DatabaseName: dbName, + AuthType: authType, + } + case appdetect.DbMySql: + dbName, err := getDatabaseName(database, detect, i.console, ctx) + if err != nil { + return scaffold.InfraSpec{}, err + } + authType, err := chooseAuthTypeByPrompt( + database.Display(), + []internal.AuthType{internal.AuthTypeUserAssignedManagedIdentity, internal.AuthTypePassword}, + ctx, + i.console) + if err != nil { + return scaffold.InfraSpec{}, err + } + if err != nil { + return scaffold.InfraSpec{}, err + } + continueProvision, err := checkPasswordlessConfigurationAndContinueProvision(database, + authType, detect, i.console, ctx) + if err != nil { + return scaffold.InfraSpec{}, err + } + if !continueProvision { + continue + } + spec.DbMySql = &scaffold.DatabaseMySql{ + DatabaseName: dbName, + AuthType: authType, + } + case appdetect.DbCosmos: + dbName, err := getDatabaseName(database, detect, i.console, ctx) + if err != nil { + return scaffold.InfraSpec{}, err } - break dbPrompt + if dbName == "" { + i.console.Message(ctx, "Database name is required.") + continue + } + containers, err := detectCosmosSqlDatabaseContainersInDirectory(detect.root) + if err != nil { + return scaffold.InfraSpec{}, err + } + spec.DbCosmos = &scaffold.DatabaseCosmosAccount{ + DatabaseName: dbName, + Containers: containers, + } + } + } + + for _, azureDep := range detect.AzureDeps { + err := i.buildInfraSpecByAzureDep(ctx, azureDep.first, &spec) + if err != nil { + return scaffold.InfraSpec{}, err } } @@ -65,7 +121,7 @@ func (i *Initializer) infraSpecFromDetect( Port: -1, } - port, err := PromptPort(i.console, ctx, name, svc) + port, err := GetOrPromptPort(i.console, ctx, name, svc) if err != nil { return scaffold.InfraSpec{}, err } @@ -77,32 +133,47 @@ func (i *Initializer) infraSpecFromDetect( } } + sourceType := toSourceType(svc.Language) for _, db := range svc.DatabaseDeps { // filter out databases that were removed if _, ok := detect.Databases[db]; !ok { continue } - switch db { - case appdetect.DbMongo: - serviceSpec.DbCosmosMongo = &scaffold.DatabaseReference{ - DatabaseName: spec.DbCosmosMongo.DatabaseName, - } case appdetect.DbPostgres: - serviceSpec.DbPostgres = &scaffold.DatabaseReference{ - DatabaseName: spec.DbPostgres.DatabaseName, - } + err = scaffold.BindToPostgres(sourceType, &serviceSpec, spec.DbPostgres) + case appdetect.DbMySql: + err = scaffold.BindToMySql(sourceType, &serviceSpec, spec.DbMySql) + case appdetect.DbMongo: + err = scaffold.BindToMongoDb(sourceType, &serviceSpec, spec.DbCosmosMongo) + case appdetect.DbCosmos: + err = scaffold.BindToCosmosDb(sourceType, &serviceSpec, spec.DbCosmos) case appdetect.DbRedis: - serviceSpec.DbRedis = &scaffold.DatabaseReference{ - DatabaseName: "redis", - } + err = scaffold.BindToRedis(sourceType, &serviceSpec, spec.DbRedis) + } + if err != nil { + return scaffold.InfraSpec{}, err + } + } + + for _, azureDep := range svc.AzureDeps { + switch azureDep.(type) { + case appdetect.AzureDepServiceBus: + err = scaffold.BindToServiceBus(sourceType, &serviceSpec, spec.AzureServiceBus) + case appdetect.AzureDepEventHubs: + err = scaffold.BindToEventHubs(sourceType, &serviceSpec, spec.AzureEventHubs) + case appdetect.AzureDepStorageAccount: + err = scaffold.BindToStorageAccount(sourceType, &serviceSpec, spec.AzureStorageAccount) } } + if err != nil { + return scaffold.InfraSpec{}, err + } spec.Services = append(spec.Services, serviceSpec) } - backends := []scaffold.ServiceReference{} - frontends := []scaffold.ServiceReference{} + var backends []scaffold.ServiceReference + var frontends []scaffold.ServiceReference for idx := range spec.Services { if spec.Services[idx].Frontend == nil && spec.Services[idx].Port != 0 { backends = append(backends, scaffold.ServiceReference{ @@ -131,6 +202,58 @@ func (i *Initializer) infraSpecFromDetect( return spec, nil } +func toSourceType(language appdetect.Language) binding.SourceType { + switch language { + case appdetect.Java: + return binding.Java + default: + return binding.Unknown + } +} + +func getDatabaseName(database appdetect.DatabaseDep, detect *detectConfirm, + console input.Console, ctx context.Context) (string, error) { + dbName := getDatabaseNameFromProjectMetadata(detect, database) + if dbName != "" { + return dbName, nil + } + for { + dbName, err := console.Prompt(ctx, input.ConsoleOptions{ + Message: fmt.Sprintf("Input the databaseName for %s "+ + "(Not databaseServerName. This url can explain the difference: "+ + "'jdbc:mysql://databaseServerName:3306/databaseName'):", database.Display()), + Help: "Hint: App database name\n\n" + + "Name of the database that the app connects to. " + + "This database will be created after running azd provision or azd up.\n" + + "You may be able to skip this step by hitting enter, in which case the database will not be created.", + }) + if err != nil { + return "", err + } + if appdetect.IsValidDatabaseName(dbName) { + return dbName, nil + } else { + console.Message(ctx, "Invalid database name. Please choose another name.") + } + } +} + +func getDatabaseNameFromProjectMetadata(detect *detectConfirm, database appdetect.DatabaseDep) string { + result := "" + for _, service := range detect.Services { + name := service.Metadata.DatabaseNameInPropertySpringDatasourceUrl[database] + if name != "" { + if result == "" { + result = name + } else { + // different project configured different db name, not use any of them. + return "" + } + } + } + return result +} + func promptPortNumber(console input.Console, ctx context.Context, promptMessage string) (int, error) { var port int for { @@ -157,62 +280,16 @@ func promptPortNumber(console input.Console, ctx context.Context, promptMessage return port, nil } -func promptDbName(console input.Console, ctx context.Context, database appdetect.DatabaseDep) (string, error) { - for { - dbName, err := console.Prompt(ctx, input.ConsoleOptions{ - Message: fmt.Sprintf("Input the name of the app database (%s)", database.Display()), - Help: "Hint: App database name\n\n" + - "Name of the database that the app connects to. " + - "This database will be created after running azd provision or azd up." + - "\nYou may be able to skip this step by hitting enter, in which case the database will not be created.", - }) - if err != nil { - return "", err - } - - if strings.ContainsAny(dbName, " ") { - console.MessageUxItem(ctx, &ux.WarningMessage{ - Description: "Database name contains whitespace. This might not be allowed by the database server.", - }) - confirm, err := console.Confirm(ctx, input.ConsoleOptions{ - Message: fmt.Sprintf("Continue with name '%s'?", dbName), - }) - if err != nil { - return "", err - } - - if !confirm { - continue - } - } else if !wellFormedDbNameRegex.MatchString(dbName) { - console.MessageUxItem(ctx, &ux.WarningMessage{ - Description: "Database name contains special characters. " + - "This might not be allowed by the database server.", - }) - confirm, err := console.Confirm(ctx, input.ConsoleOptions{ - Message: fmt.Sprintf("Continue with name '%s'?", dbName), - }) - if err != nil { - return "", err - } - - if !confirm { - continue - } - } - - return dbName, nil - } -} - -// PromptPort prompts for port selection from an appdetect project. -func PromptPort( +// GetOrPromptPort prompts for port selection from an appdetect project. +func GetOrPromptPort( console input.Console, ctx context.Context, name string, svc appdetect.Project) (int, error) { if svc.Docker == nil || svc.Docker.Path == "" { // using default builder from azd - if svc.Language == appdetect.Java || svc.Language == appdetect.DotNet { + if svc.Language == appdetect.Java { + return getJavaApplicationPort(svc), nil + } else if svc.Language == appdetect.DotNet { return 8080, nil } return 80, nil @@ -223,12 +300,8 @@ func PromptPort( switch len(ports) { case 1: // only one port was exposed, that's the one return ports[0].Number, nil - case 0: // no ports exposed, prompt for port - port, err := promptPortNumber(console, ctx, "What port does '"+name+"' listen on?") - if err != nil { - return -1, err - } - return port, nil + case 0: // no ports exposed, not expose port + return 0, nil } // multiple ports exposed, prompt for selection @@ -258,3 +331,101 @@ func PromptPort( return port, nil } + +func getJavaApplicationPort(svc appdetect.Project) int { + if !shouldExposePort(svc) { + return 0 + } + if svc.Metadata.ServerPort != 0 { + return svc.Metadata.ServerPort + } else { + return 8080 + } +} + +func shouldExposePort(svc appdetect.Project) bool { + return svc.Metadata.ContainsDependencySpringCloudEurekaServer || + svc.Metadata.ContainsDependencySpringCloudConfigServer || + svc.Metadata.ContainsDependencyAboutEmbeddedWebServer +} + +func (i *Initializer) buildInfraSpecByAzureDep( + ctx context.Context, + azureDep appdetect.AzureDep, + spec *scaffold.InfraSpec) error { + authType, err := chooseAuthTypeByPrompt( + azureDep.ResourceDisplay(), + []internal.AuthType{internal.AuthTypeUserAssignedManagedIdentity, internal.AuthTypeConnectionString}, + ctx, + i.console) + if err != nil { + return err + } + switch dependency := azureDep.(type) { + case appdetect.AzureDepServiceBus: + spec.AzureServiceBus = &scaffold.AzureDepServiceBus{ + IsJms: dependency.IsJms, + Queues: dependency.Queues, + AuthType: authType, + } + case appdetect.AzureDepEventHubs: + spec.AzureEventHubs = &scaffold.AzureDepEventHubs{ + EventHubNames: appdetect.DistinctValues(dependency.EventHubsNamePropertyMap), + AuthType: authType, + UseKafka: dependency.UseKafka(), + SpringBootVersion: dependency.SpringBootVersion, + } + case appdetect.AzureDepStorageAccount: + spec.AzureStorageAccount = &scaffold.AzureDepStorageAccount{ + ContainerNames: appdetect.DistinctValues(dependency.ContainerNamePropertyMap), + AuthType: authType, + } + } + return nil +} + +func detectCosmosSqlDatabaseContainersInDirectory(root string) ([]scaffold.CosmosSqlDatabaseContainer, error) { + var result []scaffold.CosmosSqlDatabaseContainer + err := filepath.Walk(root, func(path string, info os.FileInfo, err error) error { + if err != nil { + return err + } + if !info.IsDir() && filepath.Ext(path) == ".java" { + container, err := detectCosmosSqlDatabaseContainerInFile(path) + if err != nil { + return err + } + if len(container.ContainerName) != 0 { + result = append(result, container) + } + } + return nil + }) + return result, err +} + +func detectCosmosSqlDatabaseContainerInFile(filePath string) (scaffold.CosmosSqlDatabaseContainer, error) { + var result scaffold.CosmosSqlDatabaseContainer + result.PartitionKeyPaths = make([]string, 0) + content, err := os.ReadFile(filePath) + if err != nil { + return result, err + } + // todo: + // 1. Maybe "@Container" is not "com.azure.spring.data.cosmos.core.mapping.Container" + // 2. Maybe "@Container" is imported by "com.azure.spring.data.cosmos.core.mapping.*" + containerRegex := regexp.MustCompile(`@Container\s*\(containerName\s*=\s*"([^"]+)"\)`) + partitionKeyRegex := regexp.MustCompile(`@PartitionKey\s*(?:\n\s*)?(?:private|public|protected)?\s*\w+\s+(\w+);`) + + matches := containerRegex.FindAllStringSubmatch(string(content), -1) + if len(matches) != 1 { + return result, nil + } + result.ContainerName = matches[0][1] + + matches = partitionKeyRegex.FindAllStringSubmatch(string(content), -1) + for _, match := range matches { + result.PartitionKeyPaths = append(result.PartitionKeyPaths, match[1]) + } + return result, nil +} diff --git a/cli/azd/internal/repository/infra_confirm_test.go b/cli/azd/internal/repository/infra_confirm_test.go index 7ccfdfeab25..3de1739faf5 100644 --- a/cli/azd/internal/repository/infra_confirm_test.go +++ b/cli/azd/internal/repository/infra_confirm_test.go @@ -4,16 +4,50 @@ import ( "context" "fmt" "os" + "path/filepath" "strings" "testing" + "github.com/azure/azure-dev/cli/azd/internal" "github.com/azure/azure-dev/cli/azd/internal/appdetect" + "github.com/azure/azure-dev/cli/azd/internal/binding" "github.com/azure/azure-dev/cli/azd/internal/scaffold" "github.com/azure/azure-dev/cli/azd/pkg/input" + "github.com/azure/azure-dev/cli/azd/pkg/osutil" + "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) func TestInitializer_infraSpecFromDetect(t *testing.T) { + dbPostgres := &scaffold.DatabasePostgres{ + DatabaseName: "myappdb", + AuthType: "password", + } + envsForPostgres, _ := binding.GetBindingEnvsForCommonSourceToPostgresql(internal.AuthTypePassword) + scaffoldStorageAccount := scaffold.AzureDepStorageAccount{ + ContainerNames: []string{"container1"}, + AuthType: internal.AuthTypeConnectionString, + } + envsForStorage, _ := binding.GetServiceBindingEnvsForStorageAccount(internal.AuthTypeConnectionString) + envsForMongo, _ := binding.GetBindingEnvsForSpringBootToMongoDb(internal.AuthTypeConnectionString) + scaffoldServiceBus := scaffold.AzureDepServiceBus{ + Queues: []string{"queue1"}, + IsJms: true, + AuthType: internal.AuthTypeConnectionString, + } + envsForServiceBus, _ := binding.GetBindingEnvsForSpringBootToServiceBusJms(internal.AuthTypeConnectionString) + scaffoldEventHubs := scaffold.AzureDepEventHubs{ + EventHubNames: []string{"eventhub1"}, + AuthType: internal.AuthTypeConnectionString, + UseKafka: true, + } + envsForEventHubs, _ := binding.GetBindingEnvsForSpringBootToEventHubsKafka("3.x", internal.AuthTypeConnectionString) + envsForCosmos, _ := binding.GetBindingEnvsForSpringBootToCosmosNoSQL(internal.AuthTypeUserAssignedManagedIdentity) + scaffoldMysql := scaffold.DatabaseMySql{ + DatabaseName: "mysql-db", + AuthType: internal.AuthTypePassword, + } + envsForMysql, _ := binding.GetBindingEnvsForSpringBootToMysql(internal.AuthTypePassword) tests := []struct { name string detect detectConfirm @@ -76,19 +110,11 @@ func TestInitializer_infraSpecFromDetect(t *testing.T) { }, }, }, - interactions: []string{ - // prompt for port -- hit multiple validation cases - "notAnInteger", - "-2", - "65536", - "1234", - }, + interactions: []string{}, want: scaffold.InfraSpec{ Services: []scaffold.ServiceSpec{ { - Name: "dotnet", - Port: 1234, - Backend: &scaffold.Backend{}, + Name: "dotnet", }, }, }, @@ -138,6 +164,277 @@ func TestInitializer_infraSpecFromDetect(t *testing.T) { }, }, }, + { + name: "api with storage", + detect: detectConfirm{ + Services: []appdetect.Project{ + { + Language: appdetect.Java, + Path: "java", + AzureDeps: []appdetect.AzureDep{ + appdetect.AzureDepStorageAccount{ + ContainerNamePropertyMap: map[string]string{ + "spring.cloud.azure.container": "container1", + }, + }, + }, + }, + }, + AzureDeps: map[string]Pair{ + "storage": { + first: appdetect.AzureDepStorageAccount{ + ContainerNamePropertyMap: map[string]string{ + "spring.cloud.azure.container": "container1", + }, + }, + second: EntryKindDetected, + }, + }, + }, + interactions: []string{ + "Connection string", + }, + want: scaffold.InfraSpec{ + Services: []scaffold.ServiceSpec{ + { + Name: "java", + Port: 8080, + Backend: &scaffold.Backend{}, + AzureStorageAccount: &scaffoldStorageAccount, + Envs: envsForStorage, + }, + }, + AzureStorageAccount: &scaffoldStorageAccount, + }, + }, + { + name: "api with mongo", + detect: detectConfirm{ + Services: []appdetect.Project{ + { + Language: appdetect.Java, + Path: "java", + DatabaseDeps: []appdetect.DatabaseDep{ + appdetect.DbMongo, + }, + }, + }, + Databases: map[appdetect.DatabaseDep]EntryKind{ + appdetect.DbMongo: EntryKindDetected, + }, + }, + interactions: []string{ + "mongodb-name", + }, + want: scaffold.InfraSpec{ + Services: []scaffold.ServiceSpec{ + { + Name: "java", + Port: 8080, + Backend: &scaffold.Backend{}, + DbCosmosMongo: &scaffold.DatabaseCosmosMongo{ + DatabaseName: "mongodb-name", + }, + Envs: envsForMongo, + }, + }, + DbCosmosMongo: &scaffold.DatabaseCosmosMongo{ + DatabaseName: "mongodb-name", + }, + }, + }, + { + name: "api with service bus", + detect: detectConfirm{ + Services: []appdetect.Project{ + { + Language: appdetect.Java, + Path: "java", + AzureDeps: []appdetect.AzureDep{ + appdetect.AzureDepServiceBus{ + Queues: []string{"queue1"}, + IsJms: true, + }, + }, + }, + }, + AzureDeps: map[string]Pair{ + "storage": { + first: appdetect.AzureDepServiceBus{ + Queues: []string{"queue1"}, + IsJms: true, + }, + second: EntryKindDetected, + }, + }, + }, + interactions: []string{ + "Connection string", + }, + want: scaffold.InfraSpec{ + Services: []scaffold.ServiceSpec{ + { + Name: "java", + Port: 8080, + Backend: &scaffold.Backend{}, + AzureServiceBus: &scaffoldServiceBus, + Envs: envsForServiceBus, + }, + }, + AzureServiceBus: &scaffoldServiceBus, + }, + }, + { + name: "api with event hubs", + detect: detectConfirm{ + Services: []appdetect.Project{ + { + Language: appdetect.Java, + Path: "java", + AzureDeps: []appdetect.AzureDep{ + appdetect.AzureDepEventHubs{ + EventHubsNamePropertyMap: map[string]string{ + "spring.cloud.azure.kafka": "eventhub1", + }, + DependencyTypes: []appdetect.DependencyType{appdetect.SpringKafka}, + }, + }, + }, + }, + AzureDeps: map[string]Pair{ + "eventhubs": { + first: appdetect.AzureDepEventHubs{ + EventHubsNamePropertyMap: map[string]string{ + "spring.cloud.azure.kafka": "eventhub1", + }, + DependencyTypes: []appdetect.DependencyType{appdetect.SpringKafka}, + }, + second: EntryKindDetected, + }, + }, + }, + interactions: []string{ + "Connection string", + }, + want: scaffold.InfraSpec{ + Services: []scaffold.ServiceSpec{ + { + Name: "java", + Port: 8080, + Backend: &scaffold.Backend{}, + AzureEventHubs: &scaffoldEventHubs, + Envs: envsForEventHubs, + }, + }, + AzureEventHubs: &scaffoldEventHubs, + }, + }, + { + name: "api with cosmos db", + detect: detectConfirm{ + Services: []appdetect.Project{ + { + Language: appdetect.Java, + Path: "java", + DatabaseDeps: []appdetect.DatabaseDep{ + appdetect.DbCosmos, + }, + }, + }, + Databases: map[appdetect.DatabaseDep]EntryKind{ + appdetect.DbCosmos: EntryKindDetected, + }, + }, + interactions: []string{ + "cosmos-db-name", + }, + want: scaffold.InfraSpec{ + Services: []scaffold.ServiceSpec{ + { + Name: "java", + Port: 8080, + Backend: &scaffold.Backend{}, + DbCosmos: &scaffold.DatabaseCosmosAccount{ + DatabaseName: "cosmos-db-name", + }, + Envs: envsForCosmos, + }, + }, + DbCosmos: &scaffold.DatabaseCosmosAccount{ + DatabaseName: "cosmos-db-name", + }, + }, + }, + { + name: "api with mysql", + detect: detectConfirm{ + Services: []appdetect.Project{ + { + Language: appdetect.Java, + Path: "java", + DatabaseDeps: []appdetect.DatabaseDep{ + appdetect.DbMySql, + }, + }, + }, + Databases: map[appdetect.DatabaseDep]EntryKind{ + appdetect.DbMySql: EntryKindDetected, + }, + }, + interactions: []string{ + // prompt for dbname + "mysql-db", + "Username and password", + }, + want: scaffold.InfraSpec{ + Services: []scaffold.ServiceSpec{ + { + Name: "java", + Port: 8080, + Backend: &scaffold.Backend{}, + DbMySql: &scaffoldMysql, + Envs: envsForMysql, + }, + }, + DbMySql: &scaffoldMysql, + }, + }, + { + name: "api with cosmos db mongo", + detect: detectConfirm{ + Services: []appdetect.Project{ + { + Language: appdetect.Java, + Path: "java", + DatabaseDeps: []appdetect.DatabaseDep{ + appdetect.DbMongo, + }, + }, + }, + Databases: map[appdetect.DatabaseDep]EntryKind{ + appdetect.DbMongo: EntryKindDetected, + }, + }, + interactions: []string{ + "cosmos-db-mongo-name", + }, + want: scaffold.InfraSpec{ + Services: []scaffold.ServiceSpec{ + { + Name: "java", + Port: 8080, + Backend: &scaffold.Backend{}, + DbCosmosMongo: &scaffold.DatabaseCosmosMongo{ + DatabaseName: "cosmos-db-mongo-name", + }, + Envs: envsForMongo, + }, + }, + DbCosmosMongo: &scaffold.DatabaseCosmosMongo{ + DatabaseName: "cosmos-db-mongo-name", + }, + }, + }, { name: "api and web with db", detect: detectConfirm{ @@ -166,11 +463,13 @@ func TestInitializer_infraSpecFromDetect(t *testing.T) { "n", "my$special$db", "n", - "myappdb", // fill in db name + "myappdb", // fill in db name + "Username and password", // confirm db authentication }, want: scaffold.InfraSpec{ DbPostgres: &scaffold.DatabasePostgres{ DatabaseName: "myappdb", + AuthType: "password", }, Services: []scaffold.ServiceSpec{ { @@ -183,9 +482,8 @@ func TestInitializer_infraSpecFromDetect(t *testing.T) { }, }, }, - DbPostgres: &scaffold.DatabaseReference{ - DatabaseName: "myappdb", - }, + DbPostgres: dbPostgres, + Envs: envsForPostgres, }, { Name: "js", @@ -218,7 +516,10 @@ func TestInitializer_infraSpecFromDetect(t *testing.T) { nil), } - spec, err := i.infraSpecFromDetect(context.Background(), tt.detect) + dir := t.TempDir() + tt.detect.root = dir + + spec, err := i.infraSpecFromDetect(context.Background(), &tt.detect) // Print extra newline to avoid mangling `go test -v` final test result output while waiting for final stdin, // which may result in incorrect `gotestsum` reporting @@ -229,3 +530,152 @@ func TestInitializer_infraSpecFromDetect(t *testing.T) { }) } } + +func TestDetectCosmosSqlDatabaseContainerInFile(t *testing.T) { + tests := []struct { + javaFileContent string + expectedContainers scaffold.CosmosSqlDatabaseContainer + }{ + { + javaFileContent: "", + expectedContainers: scaffold.CosmosSqlDatabaseContainer{ + ContainerName: "", + PartitionKeyPaths: []string{}, + }, + }, + { + javaFileContent: "@Container(containerName = \"users\")", + expectedContainers: scaffold.CosmosSqlDatabaseContainer{ + ContainerName: "users", + PartitionKeyPaths: []string{}, + }, + }, + { + javaFileContent: "" + + "@Container(containerName = \"users\")\n" + + "public class User {\n" + + " @Id\n " + + "private String id;\n" + + " private String firstName;\n" + + " @PartitionKey\n" + + " private String lastName;", + expectedContainers: scaffold.CosmosSqlDatabaseContainer{ + ContainerName: "users", + PartitionKeyPaths: []string{ + "lastName", + }, + }, + }, + { + javaFileContent: "" + + "@Container(containerName = \"users\")\n" + + "public class User {\n" + + " @Id\n " + + "private String id;\n" + + " private String firstName;\n" + + " @PartitionKey private String lastName;", + expectedContainers: scaffold.CosmosSqlDatabaseContainer{ + ContainerName: "users", + PartitionKeyPaths: []string{ + "lastName", + }, + }, + }, + } + for _, tt := range tests { + t.Run(tt.javaFileContent, func(t *testing.T) { + tempDir := t.TempDir() + tempFile := filepath.Join(tempDir, "Example.java") + file, err := os.Create(tempFile) + assert.NoError(t, err) + file.Close() + + err = os.WriteFile(tempFile, []byte(tt.javaFileContent), osutil.PermissionFile) + assert.NoError(t, err) + + container, err := detectCosmosSqlDatabaseContainerInFile(tempFile) + assert.NoError(t, err) + assert.Equal(t, tt.expectedContainers, container) + }) + } +} + +func Test_getJavaApplicationPort(t *testing.T) { + tests := []struct { + name string + svc appdetect.Project + expected int + }{ + { + name: "not configure anything", + svc: appdetect.Project{ + Metadata: appdetect.Metadata{}, + }, + expected: 0, + }, + { + name: "only configure ServerPort", + svc: appdetect.Project{ + Metadata: appdetect.Metadata{ + ServerPort: 8888, + }, + }, + expected: 0, + }, + { + name: "only configure ContainsDependencySpringCloudEurekaServer", + svc: appdetect.Project{ + Metadata: appdetect.Metadata{ + ContainsDependencySpringCloudEurekaServer: true, + }, + }, + expected: 8080, + }, + { + name: "only configure ContainsDependencySpringCloudConfigServer", + svc: appdetect.Project{ + Metadata: appdetect.Metadata{ + ContainsDependencySpringCloudConfigServer: true, + }, + }, + expected: 8080, + }, + { + name: "only configure ContainsDependencyAboutEmbeddedWebServer", + svc: appdetect.Project{ + Metadata: appdetect.Metadata{ + ContainsDependencyAboutEmbeddedWebServer: true, + }, + }, + expected: 8080, + }, + { + name: "configure multiple dependencies", + svc: appdetect.Project{ + Metadata: appdetect.Metadata{ + ContainsDependencySpringCloudEurekaServer: true, + ContainsDependencySpringCloudConfigServer: true, + ContainsDependencyAboutEmbeddedWebServer: true, + }, + }, + expected: 8080, + }, + { + name: "configure ServerPort and multiple dependencies", + svc: appdetect.Project{ + Metadata: appdetect.Metadata{ + ServerPort: 8888, + ContainsDependencySpringCloudEurekaServer: true, + ContainsDependencySpringCloudConfigServer: true, + ContainsDependencyAboutEmbeddedWebServer: true, + }, + }, + expected: 8888, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.expected, getJavaApplicationPort(tt.svc)) + }) + } +} diff --git a/cli/azd/internal/repository/testdata/empty/azureyaml_created.txt b/cli/azd/internal/repository/testdata/empty/azureyaml_created.txt index 7318d2a5007..5443f055e86 100644 --- a/cli/azd/internal/repository/testdata/empty/azureyaml_created.txt +++ b/cli/azd/internal/repository/testdata/empty/azureyaml_created.txt @@ -1,3 +1,3 @@ -# yaml-language-server: $schema=https://raw.githubusercontent.com/Azure/azure-dev/main/schemas/v1.0/azure.yaml.json +# yaml-language-server: $schema=https://raw.githubusercontent.com/azure-javaee/azure-dev/feature/sjad/schemas/alpha/azure.yaml.json name: "" diff --git a/cli/azd/internal/repository/util.go b/cli/azd/internal/repository/util.go new file mode 100644 index 00000000000..3e5f563646a --- /dev/null +++ b/cli/azd/internal/repository/util.go @@ -0,0 +1,106 @@ +package repository + +import "strings" + +//cspell:disable + +// LabelName cleans up a string to be used as a RFC 1123 Label name. +// It does not enforce the 63 character limit. +// +// RFC 1123 Label name: +// - contain only lowercase alphanumeric characters or '-' +// - start with an alphanumeric character +// - end with an alphanumeric character +// +// Examples: +// - myproject, MYPROJECT -> myproject +// - myProject, myProjecT, MyProject, MyProjecT -> my-project +// - my.project, My.Project, my-project, My-Project -> my-project +func LabelName(name string) string { + hasSeparator, n := cleanAlphaNumeric(name) + if hasSeparator { + return labelNameFromSeparators(n) + } + + return labelNameFromCasing(name) +} + +//cspell:enable + +// cleanAlphaNumeric removes non-alphanumeric characters from the name. +// +// It also returns whether the name uses word separators. +func cleanAlphaNumeric(name string) (hasSeparator bool, cleaned string) { + sb := strings.Builder{} + hasSeparator = false + for _, c := range name { + if isAsciiAlphaNumeric(c) { + sb.WriteRune(c) + } else if isSeparator(c) { + hasSeparator = true + sb.WriteRune(c) + } + } + + return hasSeparator, sb.String() +} + +func isAsciiAlphaNumeric(r rune) bool { + return ('0' <= r && r <= '9') || ('A' <= r && r <= 'Z') || ('a' <= r && r <= 'z') +} + +func isSeparator(r rune) bool { + return r == '-' || r == '_' || r == '.' +} + +func lowerCase(r rune) rune { + if 'A' <= r && r <= 'Z' { + r += 'a' - 'A' + } + return r +} + +// Converts camel-cased or Pascal-cased names into lower-cased dash-separated names. +// Example: MyProject, myProject -> my-project +func labelNameFromCasing(name string) string { + result := strings.Builder{} + // previously seen upper-case character + prevUpperCase := -2 // -2 to avoid matching the first character + + for i, c := range name { + if 'A' <= c && c <= 'Z' { + if prevUpperCase == i-1 { // handle runs of upper-case word + prevUpperCase = i + result.WriteRune(lowerCase(c)) + continue + } + + if i > 0 && i != len(name)-1 { + result.WriteRune('-') + } + + prevUpperCase = i + } + + if isAsciiAlphaNumeric(c) { + result.WriteRune(lowerCase(c)) + } + } + + return result.String() +} + +// Converts all word-separated names into lower-cased dash-separated names. +// Examples: my.project, my_project, My-Project -> my-project +func labelNameFromSeparators(name string) string { + result := strings.Builder{} + for i, c := range name { + if isAsciiAlphaNumeric(c) { + result.WriteRune(lowerCase(c)) + } else if i > 0 && i != len(name)-1 && isSeparator(c) { + result.WriteRune('-') + } + } + + return result.String() +} diff --git a/cli/azd/internal/repository/util_test.go b/cli/azd/internal/repository/util_test.go new file mode 100644 index 00000000000..56a2c467756 --- /dev/null +++ b/cli/azd/internal/repository/util_test.go @@ -0,0 +1,67 @@ +package repository + +import ( + "testing" +) + +//cspell:disable + +func TestLabelName(t *testing.T) { + tests := []struct { + name string + input string + expected string + }{ + {"Lowercase", "myproject", "myproject"}, + {"Uppercase", "MYPROJECT", "myproject"}, + {"MixedCase", "myProject", "my-project"}, + {"MixedCaseEnd", "myProjecT", "my-project"}, + {"TitleCase", "MyProject", "my-project"}, + {"TitleCaseEnd", "MyProjecT", "my-project"}, + {"WithDot", "my.project", "my-project"}, + {"WithDotTitleCase", "My.Project", "my-project"}, + {"WithHyphen", "my-project", "my-project"}, + {"WithHyphenTitleCase", "My-Project", "my-project"}, + {"StartWithNumber", "1myproject", "1myproject"}, + {"EndWithNumber", "myproject2", "myproject2"}, + {"MixedWithNumbers", "my2Project3", "my2-project3"}, + {"SpecialCharacters", "my_project!@#", "my-project"}, + {"EmptyString", "", ""}, + {"OnlySpecialCharacters", "@#$%^&*", ""}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := LabelName(tt.input) + if result != tt.expected { + t.Errorf("LabelName(%q) = %q, want %q", tt.input, result, tt.expected) + } + }) + } +} + +func TestLabelNameEdgeCases(t *testing.T) { + tests := []struct { + name string + input string + expected string + }{ + {"SingleCharacter", "A", "a"}, + {"TwoCharacters", "Ab", "ab"}, + {"StartEndHyphens", "-abc-", "abc"}, + {"LongString", + "ThisIsOneVeryLongStringThatExceedsTheSixtyThreeCharacterLimitForRFC1123LabelNames", + "this-is-one-very-long-string-that-exceeds-the-sixty-three-character-limit-for-rfc1123-label-names"}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := LabelName(tt.input) + if result != tt.expected { + t.Errorf("LabelName(%q) = %q, want %q", tt.input, result, tt.expected) + } + }) + } +} + +//cspell:enable diff --git a/cli/azd/internal/scaffold/bicep_env.go b/cli/azd/internal/scaffold/bicep_env.go new file mode 100644 index 00000000000..1a4d059dca3 --- /dev/null +++ b/cli/azd/internal/scaffold/bicep_env.go @@ -0,0 +1,248 @@ +package scaffold + +import ( + "fmt" + "strings" + + "github.com/azure/azure-dev/cli/azd/internal" + "github.com/azure/azure-dev/cli/azd/internal/binding" +) + +const placeholderOfSourceClientId = "__PlaceHolder__SourceClientId" + +func ToBicepEnv(name string, value string) BicepEnv { + if binding.IsBindingEnv(value) { + if value == binding.SourceUserAssignedManagedIdentityClientId { + return BicepEnv{ + BicepEnvType: BicepEnvTypePlainText, + Name: name, + PlainTextValue: placeholderOfSourceClientId, + } + } + target, infoType := binding.ToTargetAndInfoType(value) + bicepEnvValue, ok := bicepEnv[target.Type][infoType] + if !ok { + panic(unsupportedType(target.Type, infoType)) + } + if strings.HasPrefix(bicepEnvValue, "'") && strings.HasSuffix(bicepEnvValue, "'") { + bicepEnvValue = bicepEnvValue[1 : len(bicepEnvValue)-1] + bicepEnvValue = "'" + binding.ReplaceBindingEnv(value, bicepEnvValue) + "'" + } else { + bicepEnvValue = binding.ReplaceBindingEnv(value, bicepEnvValue) + } + if isSecret(infoType) { + if isKeyVaultSecret(bicepEnvValue) { + return BicepEnv{ + BicepEnvType: BicepEnvTypeKeyVaultSecret, + Name: name, + SecretName: secretName(value), + SecretValue: unwrapKeyVaultSecretValue(bicepEnvValue), + } + } else { + return BicepEnv{ + BicepEnvType: BicepEnvTypeSecret, + Name: name, + SecretName: secretName(value), + SecretValue: bicepEnvValue, + } + } + } else { + if target.Type == binding.AzureContainerApp && target.Name != "" { + bicepEnvValue = strings.ReplaceAll(bicepEnvValue, "{{BackendName}}", BicepName(target.Name)) + } + return BicepEnv{ + BicepEnvType: BicepEnvTypePlainText, + Name: name, + PlainTextValue: bicepEnvValue, + } + } + } else { + return BicepEnv{ + BicepEnvType: BicepEnvTypePlainText, + Name: name, + PlainTextValue: toBicepEnvPlainTextValue(value), + } + } +} + +func IsPlaceholderOfSourceClientId(value string) bool { + return value == placeholderOfSourceClientId +} + +func ShouldAddToBicepFile(spec ServiceSpec, name string) bool { + return !willBeAddedByServiceConnector(spec, name) +} + +func willBeAddedByServiceConnector(spec ServiceSpec, name string) bool { + if (spec.DbPostgres != nil && spec.DbPostgres.AuthType == internal.AuthTypeUserAssignedManagedIdentity) || + (spec.DbMySql != nil && spec.DbMySql.AuthType == internal.AuthTypeUserAssignedManagedIdentity) { + return name == "spring.datasource.url" || + name == "spring.datasource.username" || + name == "spring.datasource.azure.passwordless-enabled" || + name == "spring.cloud.azure.credential.client-id" || + name == "spring.cloud.azure.credential.managed-identity-enabled" + } else { + return false + } +} + +// inputStringExample -> 'inputStringExample' +func addQuotation(input string) string { + return fmt.Sprintf("'%s'", input) +} + +// 'inputStringExample' -> 'inputStringExample' +// '${inputSingleVariableExample}' -> inputSingleVariableExample +// '${HOST}:${PORT}' -> '${HOST}:${PORT}' +func removeQuotationIfItIsASingleVariable(input string) string { + prefix := "'${" + suffix := "}'" + if strings.HasPrefix(input, prefix) && strings.HasSuffix(input, suffix) { + prefixTrimmed := strings.TrimPrefix(input, prefix) + trimmed := strings.TrimSuffix(prefixTrimmed, suffix) + if !strings.ContainsAny(trimmed, "}") { + return trimmed + } else { + return input + } + } else { + return input + } +} + +// The BicepEnv.PlainTextValue is handled as variable by default. +// If the value is string, it should contain ('). +// Here are some examples of input and output: +// inputStringExample -> 'inputStringExample' +// ${inputSingleVariableExample} -> inputSingleVariableExample +// ${HOST}:${PORT} -> '${HOST}:${PORT}' +func toBicepEnvPlainTextValue(input string) string { + return removeQuotationIfItIsASingleVariable(addQuotation(input)) +} + +// BicepEnv +// +// For Name and SecretName, they are handled as string by default. +// Which means quotation will be added before they are used in bicep file, because they are always string value. +// +// For PlainTextValue and SecretValue, they are handled as variable by default. +// When they are string value, quotation should be contained by themselves. +// Set variable as default is mainly to avoid this problem: +// https://learn.microsoft.com/en-us/azure/azure-resource-manager/bicep/linter-rule-simplify-interpolation +type BicepEnv struct { + BicepEnvType BicepEnvType + Name string + PlainTextValue string + SecretName string + SecretValue string +} + +type BicepEnvType string + +const ( + BicepEnvTypePlainText BicepEnvType = "plainText" + BicepEnvTypeSecret BicepEnvType = "secret" + BicepEnvTypeKeyVaultSecret BicepEnvType = "keyVaultSecret" +) + +// Note: The value is handled as variable. +// If the value is string, it should contain quotation inside itself. +var bicepEnv = map[binding.TargetType]map[binding.InfoType]string{ + binding.AzureDatabaseForPostgresql: { + binding.InfoTypeHost: "postgreServer.outputs.fqdn", + binding.InfoTypePort: "'5432'", + binding.InfoTypeDatabaseName: "postgreSqlDatabaseName", + binding.InfoTypeUsername: "postgreSqlDatabaseUser", + binding.InfoTypePassword: "postgreSqlDatabasePassword", + binding.InfoTypeUrl: "'postgresql://${postgreSqlDatabaseUser}:${postgreSqlDatabasePassword}@" + + "${postgreServer.outputs.fqdn}:5432/${postgreSqlDatabaseName}'", + binding.InfoTypeJdbcUrl: "'jdbc:postgresql://${postgreServer.outputs.fqdn}:5432/" + + "${postgreSqlDatabaseName}'", + }, + binding.AzureDatabaseForMysql: { + binding.InfoTypeHost: "mysqlServer.outputs.fqdn", + binding.InfoTypePort: "'3306'", + binding.InfoTypeDatabaseName: "mysqlDatabaseName", + binding.InfoTypeUsername: "mysqlDatabaseUser", + binding.InfoTypePassword: "mysqlDatabasePassword", + binding.InfoTypeUrl: "'mysql://${mysqlDatabaseUser}:${mysqlDatabasePassword}@" + + "${mysqlServer.outputs.fqdn}:3306/${mysqlDatabaseName}'", + binding.InfoTypeJdbcUrl: "'jdbc:mysql://${mysqlServer.outputs.fqdn}:3306/${mysqlDatabaseName}'", + }, + binding.AzureCacheForRedis: { + binding.InfoTypeHost: "redis.outputs.hostName", + binding.InfoTypePort: "string(redis.outputs.sslPort)", + binding.InfoTypeEndpoint: "'${redis.outputs.hostName}:${redis.outputs.sslPort}'", + binding.InfoTypePassword: wrapToKeyVaultSecretValue("redisConn.outputs.keyVaultUrlForPass"), + binding.InfoTypeUrl: wrapToKeyVaultSecretValue("redisConn.outputs.keyVaultUrlForUrl"), + }, + binding.AzureCosmosDBForMongoDB: { + binding.InfoTypeDatabaseName: "mongoDatabaseName", + binding.InfoTypeUrl: wrapToKeyVaultSecretValue( + "cosmos.outputs.exportedSecrets['MONGODB-URL'].secretUri", + ), + }, + binding.AzureCosmosDBForNoSQL: { + binding.InfoTypeEndpoint: "cosmos.outputs.endpoint", + binding.InfoTypeDatabaseName: "cosmosDatabaseName", + }, + binding.AzureServiceBus: { + binding.InfoTypeNamespace: "serviceBusNamespace.outputs.name", + binding.InfoTypeConnectionString: wrapToKeyVaultSecretValue( + "serviceBusConnectionString.outputs.keyVaultUrl", + ), + }, + binding.AzureEventHubs: { + binding.InfoTypeNamespace: "eventHubNamespace.outputs.name", + binding.InfoTypeEndpoint: "'${eventHubNamespace.outputs.name}.servicebus.windows.net:9093'", + binding.InfoTypeConnectionString: wrapToKeyVaultSecretValue( + "eventHubsConnectionString.outputs.keyVaultUrl", + ), + }, + binding.AzureStorageAccount: { + binding.InfoTypeAccountName: "storageAccountName", + binding.InfoTypeConnectionString: wrapToKeyVaultSecretValue( + "storageAccountConnectionString.outputs.keyVaultUrl", + ), + }, + binding.AzureOpenAiModel: { + binding.InfoTypeEndpoint: "account.outputs.endpoint", + }, + binding.AzureContainerApp: { + binding.InfoTypeHost: "'https://{{BackendName}}.${containerAppsEnvironment.outputs.defaultDomain}'", + }, +} + +func unsupportedType(targetType binding.TargetType, infoType binding.InfoType) string { + return fmt.Sprintf( + "unsupported connection info type for resource type. targetType = %s, targetType = %s", + targetType, infoType) +} + +func isSecret(info binding.InfoType) bool { + return info == binding.InfoTypePassword || info == binding.InfoTypeUrl || + info == binding.InfoTypeConnectionString +} + +func secretName(envValue string) string { + target, infoType := binding.ToTargetAndInfoType(envValue) + name := fmt.Sprintf("%s-%s", target.Type, infoType) + lowerCaseName := strings.ToLower(name) + noDotName := strings.Replace(lowerCaseName, ".", "-", -1) + noUnderscoreName := strings.Replace(noDotName, "_", "-", -1) + return noUnderscoreName +} + +var keyVaultSecretPrefix = "keyvault:" + +func isKeyVaultSecret(value string) bool { + return strings.HasPrefix(value, keyVaultSecretPrefix) +} + +func wrapToKeyVaultSecretValue(value string) string { + return fmt.Sprintf("%s%s", keyVaultSecretPrefix, value) +} + +func unwrapKeyVaultSecretValue(value string) string { + return strings.TrimPrefix(value, keyVaultSecretPrefix) +} diff --git a/cli/azd/internal/scaffold/bicep_env_test.go b/cli/azd/internal/scaffold/bicep_env_test.go new file mode 100644 index 00000000000..91e3d662b83 --- /dev/null +++ b/cli/azd/internal/scaffold/bicep_env_test.go @@ -0,0 +1,206 @@ +package scaffold + +import ( + "fmt" + "testing" + + "github.com/azure/azure-dev/cli/azd/internal" + "github.com/azure/azure-dev/cli/azd/internal/binding" + "github.com/stretchr/testify/assert" +) + +func TestToBicepEnv(t *testing.T) { + tests := []struct { + name string + envName string + envValue string + want BicepEnv + }{ + { + name: "Plain text", + envName: "enable-customer-related-feature", + envValue: "true", + want: BicepEnv{ + BicepEnvType: BicepEnvTypePlainText, + Name: "enable-customer-related-feature", + PlainTextValue: "'true'", // Note: Quotation add automatically + }, + }, + { + name: "Plain text which is used for binding, but it's not a binding env", + envName: "spring.jms.servicebus.pricing-tier", + envValue: "premium", + want: BicepEnv{ + BicepEnvType: BicepEnvTypePlainText, + Name: "spring.jms.servicebus.pricing-tier", + PlainTextValue: "'premium'", // Note: Quotation add automatically + }, + }, + { + name: "Plain text which is a binding env", + envName: "POSTGRES_PORT", + envValue: binding.ToBindingEnv(binding.Target{Type: binding.AzureDatabaseForPostgresql}, + binding.InfoTypePort), + want: BicepEnv{ + BicepEnvType: BicepEnvTypePlainText, + Name: "POSTGRES_PORT", + PlainTextValue: "'5432'", + }, + }, + { + name: "Plain text which is a binding env: SourceUserAssignedManagedIdentityClientId", + envName: "spring.cloud.azure.credential.client-id", + envValue: binding.SourceUserAssignedManagedIdentityClientId, + want: BicepEnv{ + BicepEnvType: BicepEnvTypePlainText, + Name: "spring.cloud.azure.credential.client-id", + PlainTextValue: placeholderOfSourceClientId, + }, + }, + { + name: "Secret", + envName: "POSTGRES_PASSWORD", + envValue: binding.ToBindingEnv(binding.Target{Type: binding.AzureDatabaseForPostgresql}, + binding.InfoTypePassword), + want: BicepEnv{ + BicepEnvType: BicepEnvTypeSecret, + Name: "POSTGRES_PASSWORD", + SecretName: "azure-db-postgresql-password", + SecretValue: "postgreSqlDatabasePassword", + }, + }, + { + name: "KeuVault Secret", + envName: "REDIS_PASSWORD", + envValue: binding.ToBindingEnv(binding.Target{Type: binding.AzureCacheForRedis}, + binding.InfoTypePassword), + want: BicepEnv{ + BicepEnvType: BicepEnvTypeKeyVaultSecret, + Name: "REDIS_PASSWORD", + SecretName: "azure-db-redis-password", + SecretValue: "redisConn.outputs.keyVaultUrlForPass", + }, + }, + { + name: "Eureka server", + envName: "eureka.client.serviceUrl.defaultZone", + envValue: fmt.Sprintf("%s/eureka", binding.ToBindingEnv(binding.Target{ + Type: binding.AzureContainerApp, + Name: "eurekaServerName", + }, binding.InfoTypeHost)), + want: BicepEnv{ + BicepEnvType: BicepEnvTypePlainText, + Name: "eureka.client.serviceUrl.defaultZone", + PlainTextValue: "'https://eurekaServerName.${containerAppsEnvironment.outputs.defaultDomain}/eureka'", + }, + }, + { + name: "Config server", + envName: "spring.config.import", + envValue: fmt.Sprintf("optional:configserver:%s?fail-fast=true", binding.ToBindingEnv(binding.Target{ + Type: binding.AzureContainerApp, + Name: "config-server-name", + }, binding.InfoTypeHost)), + want: BicepEnv{ + BicepEnvType: BicepEnvTypePlainText, + Name: "spring.config.import", + PlainTextValue: "'optional:configserver:" + + "https://configServerName.${containerAppsEnvironment.outputs.defaultDomain}?fail-fast=true'", + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := ToBicepEnv(tt.envName, tt.envValue) + assert.Equal(t, tt.want, actual) + }) + } +} + +func TestToBicepEnvPlainTextValue(t *testing.T) { + tests := []struct { + name string + in string + want string + }{ + { + name: "string", + in: "inputStringExample", + want: "'inputStringExample'", + }, + { + name: "single variable", + in: "${inputSingleVariableExample}", + want: "inputSingleVariableExample", + }, + { + name: "multiple variable", + in: "${HOST}:${PORT}", + want: "'${HOST}:${PORT}'", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := toBicepEnvPlainTextValue(tt.in) + assert.Equal(t, tt.want, actual) + }) + } +} + +func TestShouldAddToBicepFile(t *testing.T) { + tests := []struct { + name string + infraSpec ServiceSpec + propertyName string + want bool + }{ + { + name: "not related property and not using mysql and postgres", + infraSpec: ServiceSpec{}, + propertyName: "test", + want: true, + }, + { + name: "not using mysql and postgres", + infraSpec: ServiceSpec{}, + propertyName: "spring.datasource.url", + want: true, + }, + { + name: "not using user assigned managed identity", + infraSpec: ServiceSpec{ + DbMySql: &DatabaseMySql{ + AuthType: internal.AuthTypePassword, + }, + }, + propertyName: "spring.datasource.url", + want: true, + }, + { + name: "not service connector added property", + infraSpec: ServiceSpec{ + DbMySql: &DatabaseMySql{ + AuthType: internal.AuthTypePassword, + }, + }, + propertyName: "test", + want: true, + }, + { + name: "should not added", + infraSpec: ServiceSpec{ + DbMySql: &DatabaseMySql{ + AuthType: internal.AuthTypePassword, + }, + }, + propertyName: "spring.datasource.url", + want: true, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := ShouldAddToBicepFile(tt.infraSpec, tt.propertyName) + assert.Equal(t, tt.want, actual) + }) + } +} diff --git a/cli/azd/internal/scaffold/scaffold.go b/cli/azd/internal/scaffold/scaffold.go index f9ce4752ea9..e2c11be9477 100644 --- a/cli/azd/internal/scaffold/scaffold.go +++ b/cli/azd/internal/scaffold/scaffold.go @@ -11,6 +11,7 @@ import ( "strings" "text/template" + "github.com/azure/azure-dev/cli/azd/internal" "github.com/azure/azure-dev/cli/azd/pkg/osutil" "github.com/azure/azure-dev/cli/azd/resources" "github.com/psanford/memfs" @@ -24,12 +25,16 @@ const templateRoot = "scaffold/templates" // To execute a named template, call Execute with the defined name. func Load() (*template.Template, error) { funcMap := template.FuncMap{ - "bicepName": BicepName, - "containerAppName": ContainerAppName, - "upper": strings.ToUpper, - "lower": strings.ToLower, - "alphaSnakeUpper": AlphaSnakeUpper, - "formatParam": FormatParameter, + "bicepName": BicepName, + "containerAppName": ContainerAppName, + "upper": strings.ToUpper, + "lower": strings.ToLower, + "alphaSnakeUpper": AlphaSnakeUpper, + "formatParam": FormatParameter, + "hasPrefix": strings.HasPrefix, + "toBicepEnv": ToBicepEnv, + "isPlaceholderOfSourceClientId": IsPlaceholderOfSourceClientId, + "shouldAddToBicepFile": ShouldAddToBicepFile, } t, err := template.New("templates"). @@ -76,6 +81,18 @@ func supportingFiles(spec InfraSpec) []string { files = append(files, "/modules/fetch-container-image.bicep") } + if spec.AzureServiceBus != nil && spec.AzureServiceBus.AuthType == internal.AuthTypeConnectionString { + files = append(files, "/modules/set-servicebus-namespace-connection-string.bicep") + } + + if spec.AzureEventHubs != nil && spec.AzureEventHubs.AuthType == internal.AuthTypeConnectionString { + files = append(files, "/modules/set-event-hubs-namespace-connection-string.bicep") + } + + if spec.AzureStorageAccount != nil && spec.AzureStorageAccount.AuthType == internal.AuthTypeConnectionString { + files = append(files, "/modules/set-storage-account-connection-string.bicep") + } + return files } @@ -201,12 +218,21 @@ func executeToFS(targetFS *memfs.FS, tmpl *template.Template, name string, path } func preExecExpand(spec *InfraSpec) { - // postgres requires specific password seeding parameters + // postgres and mysql requires specific password seeding parameters if spec.DbPostgres != nil { spec.Parameters = append(spec.Parameters, Parameter{ - Name: "databasePassword", - Value: "$(secretOrRandomPassword ${AZURE_KEY_VAULT_NAME} databasePassword)", + Name: "postgreSqlDatabasePassword", + Value: "$(secretOrRandomPassword ${AZURE_KEY_VAULT_NAME} postgreSqlDatabasePassword)", + Type: "string", + Secret: true, + }) + } + if spec.DbMySql != nil { + spec.Parameters = append(spec.Parameters, + Parameter{ + Name: "mysqlDatabasePassword", + Value: "$(secretOrRandomPassword ${AZURE_KEY_VAULT_NAME} mysqlDatabasePassword)", Type: "string", Secret: true, }) diff --git a/cli/azd/internal/scaffold/scaffold_test.go b/cli/azd/internal/scaffold/scaffold_test.go index 238043c3673..5fc518f8b2a 100644 --- a/cli/azd/internal/scaffold/scaffold_test.go +++ b/cli/azd/internal/scaffold/scaffold_test.go @@ -2,6 +2,7 @@ package scaffold import ( "context" + "github.com/azure/azure-dev/cli/azd/internal" "os" "path/filepath" "strings" @@ -98,13 +99,11 @@ func TestExecInfra(t *testing.T) { }, }, }, - DbCosmosMongo: &DatabaseReference{ + DbCosmosMongo: &DatabaseCosmosMongo{ DatabaseName: "appdb", }, - DbRedis: &DatabaseReference{ - DatabaseName: "redis", - }, - DbPostgres: &DatabaseReference{ + DbRedis: &DatabaseRedis{}, + DbPostgres: &DatabasePostgres{ DatabaseName: "appdb", }, }, @@ -133,7 +132,7 @@ func TestExecInfra(t *testing.T) { { Name: "api", Port: 3100, - DbPostgres: &DatabaseReference{ + DbPostgres: &DatabasePostgres{ DatabaseName: "appdb", }, }, @@ -150,7 +149,7 @@ func TestExecInfra(t *testing.T) { { Name: "api", Port: 3100, - DbCosmosMongo: &DatabaseReference{ + DbCosmosMongo: &DatabaseCosmosMongo{ DatabaseName: "appdb", }, }, @@ -161,17 +160,168 @@ func TestExecInfra(t *testing.T) { "API with Redis", InfraSpec{ DbRedis: &DatabaseRedis{}, + Services: []ServiceSpec{ + { + Name: "api", + Port: 3100, + DbRedis: &DatabaseRedis{}, + }, + }, + }, + }, + { + "API with Storage Account", + InfraSpec{ + AzureStorageAccount: &AzureDepStorageAccount{ + ContainerNames: []string{"container1"}, + }, + Services: []ServiceSpec{ + { + Name: "api", + Port: 3100, + AzureStorageAccount: &AzureDepStorageAccount{}, + }, + }, + }, + }, + { + "API with Service Bus", + InfraSpec{ + AzureServiceBus: &AzureDepServiceBus{ + Queues: []string{"queue1"}, + AuthType: internal.AuthTypeUserAssignedManagedIdentity, + IsJms: true, + }, Services: []ServiceSpec{ { Name: "api", Port: 3100, - DbRedis: &DatabaseReference{ - DatabaseName: "redis", + AzureServiceBus: &AzureDepServiceBus{ + Queues: []string{"queue1"}, + AuthType: internal.AuthTypeUserAssignedManagedIdentity, + IsJms: true, }, }, }, }, }, + { + "API with Event Hubs", + InfraSpec{ + AzureEventHubs: &AzureDepEventHubs{ + EventHubNames: []string{"eventhub1"}, + AuthType: internal.AuthTypeUserAssignedManagedIdentity, + UseKafka: true, + SpringBootVersion: "3.4.0", + }, + Services: []ServiceSpec{ + { + Name: "api", + Port: 3100, + AzureEventHubs: &AzureDepEventHubs{ + EventHubNames: []string{"eventhub1"}, + AuthType: internal.AuthTypeUserAssignedManagedIdentity, + UseKafka: true, + SpringBootVersion: "3.4.0", + }, + }, + }, + }, + }, + { + "API with Cosmos DB", + InfraSpec{ + DbCosmos: &DatabaseCosmosAccount{ + DatabaseName: "cosmos-db", + Containers: []CosmosSqlDatabaseContainer{ + { + ContainerName: "container1", + PartitionKeyPaths: []string{"/partitionKey"}, + }, + }, + }, + Services: []ServiceSpec{ + { + Name: "api", + Port: 3100, + DbCosmos: &DatabaseCosmosAccount{ + DatabaseName: "cosmos-db", + Containers: []CosmosSqlDatabaseContainer{ + { + ContainerName: "container1", + PartitionKeyPaths: []string{"/partitionKey"}, + }, + }, + }, + }, + }, + }, + }, + { + "API with MySQL password", + InfraSpec{ + DbMySql: &DatabaseMySql{ + DatabaseName: "appdb", + DatabaseUser: "appuser", + AuthType: internal.AuthTypePassword, + }, + Services: []ServiceSpec{ + { + Name: "api", + Port: 3100, + DbMySql: &DatabaseMySql{ + DatabaseName: "appdb", + DatabaseUser: "appuser", + AuthType: internal.AuthTypePassword, + }, + }, + }, + }, + }, + { + "API with MySQL umi", + InfraSpec{ + DbMySql: &DatabaseMySql{ + DatabaseName: "appdb", + DatabaseUser: "appuser", + AuthType: internal.AuthTypeUserAssignedManagedIdentity, + }, + Services: []ServiceSpec{ + { + Name: "api", + Port: 3100, + DbMySql: &DatabaseMySql{ + DatabaseName: "appdb", + DatabaseUser: "appuser", + AuthType: internal.AuthTypeUserAssignedManagedIdentity, + }, + }, + }, + }, + }, + // with azd add, users could add only mongo resource + { + "Only Mongo", + InfraSpec{ + DbCosmosMongo: &DatabaseCosmosMongo{}, + }, + }, + // with azd add, users could add only redis resource + { + "Only Redis", + InfraSpec{ + DbRedis: &DatabaseRedis{}, + }, + }, + // with azd add, users could add only postgresql resource + { + "Only Postgres", + InfraSpec{ + DbPostgres: &DatabasePostgres{ + AuthType: internal.AuthTypeUserAssignedManagedIdentity, + }, + }, + }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { diff --git a/cli/azd/internal/scaffold/spec.go b/cli/azd/internal/scaffold/spec.go index 763b83c322e..4ecc76a518c 100644 --- a/cli/azd/internal/scaffold/spec.go +++ b/cli/azd/internal/scaffold/spec.go @@ -3,6 +3,9 @@ package scaffold import ( "fmt" "strings" + + "github.com/azure/azure-dev/cli/azd/internal" + "github.com/azure/azure-dev/cli/azd/internal/binding" ) type InfraSpec struct { @@ -11,11 +14,17 @@ type InfraSpec struct { // Databases to create DbPostgres *DatabasePostgres - DbCosmosMongo *DatabaseCosmosMongo + DbMySql *DatabaseMySql DbRedis *DatabaseRedis + DbCosmosMongo *DatabaseCosmosMongo + DbCosmos *DatabaseCosmosAccount // ai models AIModels []AIModel + + AzureServiceBus *AzureDepServiceBus + AzureEventHubs *AzureDepEventHubs + AzureStorageAccount *AzureDepStorageAccount } type Parameter struct { @@ -28,6 +37,23 @@ type Parameter struct { type DatabasePostgres struct { DatabaseUser string DatabaseName string + AuthType internal.AuthType +} + +type DatabaseMySql struct { + DatabaseUser string + DatabaseName string + AuthType internal.AuthType +} + +type CosmosSqlDatabaseContainer struct { + ContainerName string + PartitionKeyPaths []string +} + +type DatabaseCosmosAccount struct { + DatabaseName string + Containers []CosmosSqlDatabaseContainer } type DatabaseCosmosMongo struct { @@ -51,11 +77,30 @@ type AIModelModel struct { Version string } +type AzureDepServiceBus struct { + Queues []string + TopicsAndSubscriptions map[string][]string + AuthType internal.AuthType + IsJms bool +} + +type AzureDepEventHubs struct { + EventHubNames []string + AuthType internal.AuthType + UseKafka bool + SpringBootVersion string +} + +type AzureDepStorageAccount struct { + ContainerNames []string + AuthType internal.AuthType +} + type ServiceSpec struct { Name string Port int - Env map[string]string + Envs map[string]string // Front-end properties. Frontend *Frontend @@ -64,12 +109,18 @@ type ServiceSpec struct { Backend *Backend // Connection to a database - DbPostgres *DatabaseReference - DbCosmosMongo *DatabaseReference - DbRedis *DatabaseReference + DbPostgres *DatabasePostgres + DbMySql *DatabaseMySql + DbRedis *DatabaseRedis + DbCosmosMongo *DatabaseCosmosMongo + DbCosmos *DatabaseCosmosAccount // AI model connections AIModels []AIModelReference + + AzureServiceBus *AzureDepServiceBus + AzureEventHubs *AzureDepEventHubs + AzureStorageAccount *AzureDepStorageAccount } type Frontend struct { @@ -84,10 +135,6 @@ type ServiceReference struct { Name string } -type DatabaseReference struct { - DatabaseName string -} - type AIModelReference struct { Name string } @@ -140,3 +187,16 @@ func serviceDefPlaceholder(serviceName string) Parameter { Secret: true, } } + +func AddNewEnvironmentVariable(serviceSpec *ServiceSpec, name string, value string) error { + merged, err := binding.MergeMapWithDuplicationCheck(serviceSpec.Envs, + map[string]string{ + name: value, + }, + ) + if err != nil { + return err + } + serviceSpec.Envs = merged + return nil +} diff --git a/cli/azd/internal/scaffold/spec_service_binding.go b/cli/azd/internal/scaffold/spec_service_binding.go new file mode 100644 index 00000000000..b2313708ed8 --- /dev/null +++ b/cli/azd/internal/scaffold/spec_service_binding.go @@ -0,0 +1,107 @@ +package scaffold + +import ( + "strconv" + + "github.com/azure/azure-dev/cli/azd/internal" + "github.com/azure/azure-dev/cli/azd/internal/binding" +) + +func BindToPostgres(sourceType binding.SourceType, serviceSpec *ServiceSpec, postgres *DatabasePostgres) error { + serviceSpec.DbPostgres = postgres + return addBindingEnvs(serviceSpec, + binding.Source{Type: sourceType}, + binding.Target{Type: binding.AzureDatabaseForPostgresql, AuthType: postgres.AuthType}) +} + +func BindToMySql(sourceType binding.SourceType, serviceSpec *ServiceSpec, mysql *DatabaseMySql) error { + serviceSpec.DbMySql = mysql + return addBindingEnvs(serviceSpec, + binding.Source{Type: sourceType}, + binding.Target{Type: binding.AzureDatabaseForMysql, AuthType: mysql.AuthType}) +} + +func BindToMongoDb(sourceType binding.SourceType, serviceSpec *ServiceSpec, mongo *DatabaseCosmosMongo) error { + serviceSpec.DbCosmosMongo = mongo + return addBindingEnvs(serviceSpec, + binding.Source{Type: sourceType}, + binding.Target{Type: binding.AzureCosmosDBForMongoDB, AuthType: internal.AuthTypeConnectionString}) +} + +func BindToCosmosDb(sourceType binding.SourceType, serviceSpec *ServiceSpec, cosmos *DatabaseCosmosAccount) error { + serviceSpec.DbCosmos = cosmos + return addBindingEnvs(serviceSpec, + binding.Source{Type: sourceType}, + binding.Target{Type: binding.AzureCosmosDBForNoSQL, AuthType: internal.AuthTypeUserAssignedManagedIdentity}) +} + +func BindToRedis(sourceType binding.SourceType, serviceSpec *ServiceSpec, redis *DatabaseRedis) error { + serviceSpec.DbRedis = redis + return addBindingEnvs(serviceSpec, + binding.Source{Type: sourceType}, + binding.Target{Type: binding.AzureCacheForRedis, AuthType: internal.AuthTypePassword}) +} + +func BindToServiceBus(sourceType binding.SourceType, serviceSpec *ServiceSpec, serviceBus *AzureDepServiceBus) error { + serviceSpec.AzureServiceBus = serviceBus + return addBindingEnvs(serviceSpec, + binding.Source{ + Type: sourceType, + Metadata: map[binding.MetadataType]string{binding.IsSpringBootJms: strconv.FormatBool(serviceBus.IsJms)}}, + binding.Target{Type: binding.AzureServiceBus, AuthType: serviceBus.AuthType}) +} + +func BindToEventHubs(sourceType binding.SourceType, serviceSpec *ServiceSpec, eventHubs *AzureDepEventHubs) error { + serviceSpec.AzureEventHubs = eventHubs + return addBindingEnvs(serviceSpec, + binding.Source{ + Type: sourceType, + Metadata: map[binding.MetadataType]string{ + binding.IsSpringBootKafka: strconv.FormatBool(eventHubs.UseKafka), + binding.SpringBootVersion: eventHubs.SpringBootVersion}}, + binding.Target{Type: binding.AzureEventHubs, AuthType: eventHubs.AuthType}) +} + +func BindToStorageAccount(sourceType binding.SourceType, serviceSpec *ServiceSpec, + account *AzureDepStorageAccount) error { + serviceSpec.AzureStorageAccount = account + return addBindingEnvs(serviceSpec, + binding.Source{Type: sourceType}, + binding.Target{Type: binding.AzureStorageAccount, AuthType: account.AuthType}) +} + +func BindToAIModels(sourceType binding.SourceType, serviceSpec *ServiceSpec, model string) error { + serviceSpec.AIModels = append(serviceSpec.AIModels, AIModelReference{Name: model}) + return addBindingEnvs(serviceSpec, binding.Source{Type: sourceType}, + binding.Target{Type: binding.AzureOpenAiModel, AuthType: internal.AuthTypeUnspecified}) +} + +func addBindingEnvs(serviceSpec *ServiceSpec, source binding.Source, target binding.Target) error { + envs, err := binding.GetBindingEnvs(source, target) + if err != nil { + return err + } + serviceSpec.Envs, err = binding.MergeMapWithDuplicationCheck(serviceSpec.Envs, envs) + if err != nil { + return err + } + return nil +} + +// BindToContainerApp a call b +// todo: +// 1. Add field in ServiceSpec to identify b's app type like Eureka server and Config server. +// 2. Create GetServiceBindingEnvsForContainerApp +// 3. Merge GetServiceBindingEnvsForEurekaServer and GetServiceBindingEnvsForConfigServer into +// GetServiceBindingEnvsForContainerApp. +// 4. Delete printHintsAboutUseHostContainerApp use GetServiceBindingEnvsForContainerApp instead +func BindToContainerApp(a *ServiceSpec, b *ServiceSpec) { + if a.Frontend == nil { + a.Frontend = &Frontend{} + } + a.Frontend.Backends = append(a.Frontend.Backends, ServiceReference{Name: b.Name}) + if b.Backend == nil { + b.Backend = &Backend{} + } + b.Backend.Frontends = append(b.Backend.Frontends, ServiceReference{Name: a.Name}) +} diff --git a/cli/azd/internal/tracing/fields/fields.go b/cli/azd/internal/tracing/fields/fields.go index 52562e181c6..6b3bf726609 100644 --- a/cli/azd/internal/tracing/fields/fields.go +++ b/cli/azd/internal/tracing/fields/fields.go @@ -240,8 +240,9 @@ const ( const ( InitMethod = attribute.Key("init.method") - AppInitDetectedDatabase = attribute.Key("appinit.detected.databases") - AppInitDetectedServices = attribute.Key("appinit.detected.services") + AppInitDetectedDatabase = attribute.Key("appinit.detected.databases") + AppInitDetectedServices = attribute.Key("appinit.detected.services") + AppInitDetectedAzureDeps = attribute.Key("appinit.detected.azuredeps") AppInitConfirmedDatabases = attribute.Key("appinit.confirmed.databases") AppInitConfirmedServices = attribute.Key("appinit.confirmed.services") @@ -249,6 +250,9 @@ const ( AppInitModifyAddCount = attribute.Key("appinit.modify_add.count") AppInitModifyRemoveCount = attribute.Key("appinit.modify_remove.count") + // AppInitJavaDetect indicates if java detector has started or finished + AppInitJavaDetect = attribute.Key("appinit.java.detect") + // The last step recorded during the app init process. AppInitLastStep = attribute.Key("appinit.lastStep") ) diff --git a/cli/azd/internal/useragent_test.go b/cli/azd/internal/useragent_test.go index a33a53cda15..beb986ca485 100644 --- a/cli/azd/internal/useragent_test.go +++ b/cli/azd/internal/useragent_test.go @@ -15,16 +15,20 @@ func TestUserAgentStringScenarios(t *testing.T) { azDevIdentifier := fmt.Sprintf("azdev/%s %s", version, runtimeInfo()) t.Run("default", func(t *testing.T) { + t.Setenv("GITHUB_ACTIONS", "false") + t.Setenv(AzdUserAgentEnvVar, "") require.Equal(t, azDevIdentifier, UserAgent()) }) t.Run("withUserAgent", func(t *testing.T) { + t.Setenv("GITHUB_ACTIONS", "false") t.Setenv(AzdUserAgentEnvVar, "dev_user_agent") require.Equal(t, fmt.Sprintf("%s dev_user_agent", azDevIdentifier), UserAgent()) }) t.Run("onGitHubActions", func(t *testing.T) { t.Setenv("GITHUB_ACTIONS", "true") + t.Setenv(AzdUserAgentEnvVar, "") require.Equal(t, fmt.Sprintf("%s GhActions", azDevIdentifier), UserAgent()) }) diff --git a/cli/azd/pkg/pipeline/pipeline_manager_test.go b/cli/azd/pkg/pipeline/pipeline_manager_test.go index 6396a37f925..945a99726e7 100644 --- a/cli/azd/pkg/pipeline/pipeline_manager_test.go +++ b/cli/azd/pkg/pipeline/pipeline_manager_test.go @@ -773,7 +773,8 @@ func createPipelineManager( mockContext.Console, args, mockContext.Container, - project.NewImportManager(project.NewDotNetImporter(nil, nil, nil, nil, mockContext.AlphaFeaturesManager)), + project.NewImportManager( + project.NewDotNetImporter(nil, nil, nil, nil, mockContext.AlphaFeaturesManager)), &mockUserConfigManager{}, ) } diff --git a/cli/azd/pkg/project/framework_service_docker.go b/cli/azd/pkg/project/framework_service_docker.go index 6ed24b5f7b0..eaa93bd3f67 100644 --- a/cli/azd/pkg/project/framework_service_docker.go +++ b/cli/azd/pkg/project/framework_service_docker.go @@ -14,6 +14,8 @@ import ( "path/filepath" "strings" + "go.opentelemetry.io/otel/trace" + "github.com/Azure/azure-sdk-for-go/sdk/azcore/to" "github.com/azure/azure-dev/cli/azd/internal" "github.com/azure/azure-dev/cli/azd/internal/appdetect" @@ -30,8 +32,8 @@ import ( "github.com/azure/azure-dev/cli/azd/pkg/output" "github.com/azure/azure-dev/cli/azd/pkg/tools" "github.com/azure/azure-dev/cli/azd/pkg/tools/docker" + "github.com/azure/azure-dev/cli/azd/pkg/tools/maven" "github.com/azure/azure-dev/cli/azd/pkg/tools/pack" - "go.opentelemetry.io/otel/trace" ) type DockerProjectOptions struct { @@ -200,6 +202,22 @@ func (p *dockerProject) Build( return &ServiceBuildResult{Restore: restoreOutput}, nil } + // if it's a java project without Dockerfile, we help to package jar and add a default Dockerfile for Docker build + if serviceConfig.Language == ServiceLanguageJava && serviceConfig.Docker.Path == "" { + mvnCli := maven.NewCli(exec.NewCommandRunner(nil)) + err := mvnCli.CleanPackage(ctx, serviceConfig.RelativePath, serviceConfig.Project.Path) + if err != nil { + return nil, err + } + defaultDockerfilePath, err := addDefaultDockerfileForJavaProject(serviceConfig.Name) + if err != nil { + return nil, err + } + serviceConfig.Docker = DockerProjectOptions{ + Path: defaultDockerfilePath, + } + } + dockerOptions := getDockerOptionsWithDefaults(serviceConfig.Docker) resolveParameters := func(source []string) ([]string, error) { @@ -447,6 +465,7 @@ func (p *dockerProject) packBuild( } builder := DefaultBuilderImage + buildContext := svc.Path() environ := []string{} userDefinedImage := false if os.Getenv("AZD_BUILDER_IMAGE") != "" { @@ -458,6 +477,16 @@ func (p *dockerProject) packBuild( // Always default to port 80 for consistency across languages environ = append(environ, "ORYX_RUNTIME_PORT=80") + // For multi-module project, specify parent directory and submodule for pack build + if svc.ParentPath != "" { + buildContext = svc.ParentPath + svcRelPath, err := filepath.Rel(buildContext, svc.Path()) + if err != nil { + return nil, err + } + environ = append(environ, fmt.Sprintf("BP_MAVEN_BUILT_MODULE=%s", filepath.ToSlash(svcRelPath))) + } + if svc.Language == ServiceLanguageJava { environ = append(environ, "ORYX_RUNTIME_PORT=8080") } @@ -518,7 +547,7 @@ func (p *dockerProject) packBuild( err = packCli.Build( ctx, - svc.Path(), + buildContext, builder, imageName, environ, @@ -608,3 +637,28 @@ func getDockerOptionsWithDefaults(options DockerProjectOptions) DockerProjectOpt return options } + +// todo: hardcode jdk-21 as base image here, may need more accurate java version detection. +const DefaultDockerfileForJavaProject = `FROM openjdk:21-jdk-slim +COPY ./target/*.jar /app.jar +ENTRYPOINT ["java", "-jar", "/app.jar"]` + +func addDefaultDockerfileForJavaProject(svcName string) (string, error) { + log.Printf("Dockerfile not found for java project %s, will provide a default one", svcName) + dockerfileDir, err := os.MkdirTemp("", svcName) + if err != nil { + return "", fmt.Errorf("error creating temp Dockerfile directory: %w", err) + } + + dockerfilePath := filepath.Join(dockerfileDir, "Dockerfile") + file, err := os.Create(dockerfilePath) + if err != nil { + return "", fmt.Errorf("error creating Dockerfile at %s: %w", dockerfilePath, err) + } + defer file.Close() + + if _, err = file.WriteString(DefaultDockerfileForJavaProject); err != nil { + return "", fmt.Errorf("error writing Dockerfile at %s: %w", dockerfilePath, err) + } + return dockerfilePath, nil +} diff --git a/cli/azd/pkg/project/importer.go b/cli/azd/pkg/project/importer.go index 26fbde3a07e..3494d76b81c 100644 --- a/cli/azd/pkg/project/importer.go +++ b/cli/azd/pkg/project/importer.go @@ -167,7 +167,7 @@ func (im *ImportManager) ProjectInfrastructure(ctx context.Context, projectConfi composeEnabled := im.dotNetImporter.alphaFeatureManager.IsEnabled(featureCompose) if composeEnabled && len(projectConfig.Resources) > 0 { - return tempInfra(ctx, projectConfig) + return tempInfra(ctx, projectConfig, im.dotNetImporter.console) } if !composeEnabled && len(projectConfig.Resources) > 0 { @@ -209,7 +209,7 @@ func (im *ImportManager) SynthAllInfrastructure(ctx context.Context, projectConf composeEnabled := im.dotNetImporter.alphaFeatureManager.IsEnabled(featureCompose) if composeEnabled && len(projectConfig.Resources) > 0 { - return infraFsForProject(ctx, projectConfig) + return infraFsForProject(ctx, projectConfig, im.dotNetImporter.console) } if !composeEnabled && len(projectConfig.Resources) > 0 { diff --git a/cli/azd/pkg/project/importer_test.go b/cli/azd/pkg/project/importer_test.go index 168e5c93261..c11062f6ba0 100644 --- a/cli/azd/pkg/project/importer_test.go +++ b/cli/azd/pkg/project/importer_test.go @@ -329,7 +329,7 @@ func TestImportManagerProjectInfrastructureAspire(t *testing.T) { require.NoError(t, err) defer os.Remove(path) - // Use an a dotnet project and use the mock to simulate an Aspire project + // Use a dotnet project and use the mock to simulate an Aspire project r, e := manager.ProjectInfrastructure(*mockContext.Context, &ProjectConfig{ Services: map[string]*ServiceConfig{ "test": { @@ -356,7 +356,7 @@ func TestImportManagerProjectInfrastructureAspire(t *testing.T) { // If we fetch the infrastructure again, we expect that the manifest is already cached and `dotnet run` on the apphost // will not be invoked again. - // Use an a dotnet project and use the mock to simulate an Aspire project + // Use a dotnet project and use the mock to simulate an Aspire project _, e = manager.ProjectInfrastructure(*mockContext.Context, &ProjectConfig{ Services: map[string]*ServiceConfig{ "test": { @@ -392,10 +392,13 @@ resources: - api postgresdb: type: db.postgres + authType: password mongodb: type: db.mongo + authType: userAssignedManagedIdentity redis: type: db.redis + authType: password ` func Test_ImportManager_ProjectInfrastructure_FromResources(t *testing.T) { @@ -405,11 +408,15 @@ func Test_ImportManager_ProjectInfrastructure_FromResources(t *testing.T) { im := &ImportManager{ dotNetImporter: &DotNetImporter{ alphaFeatureManager: alpha.NewFeaturesManagerWithConfig(config.NewEmptyConfig()), + console: mocks.NewMockContext(context.Background()).Console, }, } prjConfig := &ProjectConfig{} err := yaml.Unmarshal([]byte(prjWithResources), prjConfig) + for key, res := range prjConfig.Resources { + res.Name = key + } require.NoError(t, err) infra, err := im.ProjectInfrastructure(context.Background(), prjConfig) @@ -436,12 +443,16 @@ func TestImportManager_SynthAllInfrastructure_FromResources(t *testing.T) { im := &ImportManager{ dotNetImporter: &DotNetImporter{ alphaFeatureManager: alpha.NewFeaturesManagerWithConfig(config.NewEmptyConfig()), + console: mocks.NewMockContext(context.Background()).Console, }, } prjConfig := &ProjectConfig{} err := yaml.Unmarshal([]byte(prjWithResources), prjConfig) require.NoError(t, err) + for key, res := range prjConfig.Resources { + res.Name = key + } projectFs, err := im.SynthAllInfrastructure(context.Background(), prjConfig) require.NoError(t, err) diff --git a/cli/azd/pkg/project/project.go b/cli/azd/pkg/project/project.go index ab2b66fd37a..8cd4401086e 100644 --- a/cli/azd/pkg/project/project.go +++ b/cli/azd/pkg/project/project.go @@ -275,19 +275,25 @@ func Save(ctx context.Context, projectConfig *ProjectConfig, projectFilePath str copy.Services[name] = &svcCopy } + for name, resource := range projectConfig.Resources { + resourceCopy := *resource + resourceCopy.Project = © + + copy.Resources[name] = &resourceCopy + } + projectBytes, err := yaml.Marshal(copy) if err != nil { return fmt.Errorf("marshalling project yaml: %w", err) } - version := "v1.0" + version := "alpha" if projectConfig.MetaSchemaVersion != "" { version = projectConfig.MetaSchemaVersion } - annotation := fmt.Sprintf( - "# yaml-language-server: $schema=https://raw.githubusercontent.com/Azure/azure-dev/main/schemas/%s/azure.yaml.json", - version) + annotation := fmt.Sprintf("# yaml-language-server: $schema=https://raw.githubusercontent.com/azure-javaee/"+ + "azure-dev/feature/sjad/schemas/%s/azure.yaml.json", version) projectFileContents := bytes.NewBufferString(annotation + "\n\n") _, err = projectFileContents.Write(projectBytes) if err != nil { diff --git a/cli/azd/pkg/project/resources.go b/cli/azd/pkg/project/resources.go index 9c1494ec15e..1d52e07b2c8 100644 --- a/cli/azd/pkg/project/resources.go +++ b/cli/azd/pkg/project/resources.go @@ -5,6 +5,7 @@ package project import ( "fmt" + "github.com/azure/azure-dev/cli/azd/internal" "github.com/braydonk/yaml" ) @@ -22,11 +23,20 @@ func AllResourceTypes() []ResourceType { } const ( - ResourceTypeDbRedis ResourceType = "db.redis" - ResourceTypeDbPostgres ResourceType = "db.postgres" - ResourceTypeDbMongo ResourceType = "db.mongo" - ResourceTypeHostContainerApp ResourceType = "host.containerapp" - ResourceTypeOpenAiModel ResourceType = "ai.openai.model" + ResourceTypeDbRedis ResourceType = "db.redis" + ResourceTypeDbPostgres ResourceType = "db.postgres" + ResourceTypeDbMySQL ResourceType = "db.mysql" + ResourceTypeDbMongo ResourceType = "db.mongo" + ResourceTypeDbCosmos ResourceType = "db.cosmos" + ResourceTypeHostContainerApp ResourceType = "host.containerapp" + ResourceTypeOpenAiModel ResourceType = "ai.openai.model" + ResourceTypeMessagingServiceBus ResourceType = "messaging.servicebus" + ResourceTypeMessagingEventHubs ResourceType = "messaging.eventhubs" + ResourceTypeMessagingKafka ResourceType = "messaging.kafka" + ResourceTypeStorage ResourceType = "storage" + + ResourceTypeJavaEurekaServer ResourceType = "java.eureka.server" + ResourceTypeJavaConfigServer ResourceType = "java.config.server" ) func (r ResourceType) String() string { @@ -35,12 +45,28 @@ func (r ResourceType) String() string { return "Redis" case ResourceTypeDbPostgres: return "PostgreSQL" + case ResourceTypeDbMySQL: + return "MySQL" case ResourceTypeDbMongo: return "MongoDB" + case ResourceTypeDbCosmos: + return "CosmosDB" case ResourceTypeHostContainerApp: return "Container App" case ResourceTypeOpenAiModel: return "Open AI Model" + case ResourceTypeMessagingServiceBus: + return "Service Bus" + case ResourceTypeMessagingEventHubs: + return "Event Hubs" + case ResourceTypeMessagingKafka: + return "Kafka" + case ResourceTypeStorage: + return "Storage Account" + case ResourceTypeJavaEurekaServer: + return "Java Eureka Server" + case ResourceTypeJavaConfigServer: + return "Java Config Server" } return "" @@ -89,6 +115,46 @@ func (r *ResourceConfig) MarshalYAML() (interface{}, error) { if err != nil { return nil, err } + case ResourceTypeDbPostgres: + err := marshalRawProps(raw.Props.(PostgresProps)) + if err != nil { + return nil, err + } + case ResourceTypeDbMySQL: + err := marshalRawProps(raw.Props.(MySQLProps)) + if err != nil { + return nil, err + } + case ResourceTypeDbMongo: + err := marshalRawProps(raw.Props.(MongoDBProps)) + if err != nil { + return nil, err + } + case ResourceTypeDbCosmos: + err := marshalRawProps(raw.Props.(CosmosDBProps)) + if err != nil { + return nil, err + } + case ResourceTypeMessagingServiceBus: + err := marshalRawProps(raw.Props.(ServiceBusProps)) + if err != nil { + return nil, err + } + case ResourceTypeMessagingEventHubs: + err := marshalRawProps(raw.Props.(EventHubsProps)) + if err != nil { + return nil, err + } + case ResourceTypeMessagingKafka: + err := marshalRawProps(raw.Props.(KafkaProps)) + if err != nil { + return nil, err + } + case ResourceTypeStorage: + err := marshalRawProps(raw.Props.(StorageProps)) + if err != nil { + return nil, err + } } return raw, nil @@ -128,6 +194,54 @@ func (r *ResourceConfig) UnmarshalYAML(value *yaml.Node) error { return err } raw.Props = cap + case ResourceTypeDbMySQL: + mp := MySQLProps{} + if err := unmarshalProps(&mp); err != nil { + return err + } + raw.Props = mp + case ResourceTypeDbPostgres: + pp := PostgresProps{} + if err := unmarshalProps(&pp); err != nil { + return err + } + raw.Props = pp + case ResourceTypeDbMongo: + mp := MongoDBProps{} + if err := unmarshalProps(&mp); err != nil { + return err + } + raw.Props = mp + case ResourceTypeDbCosmos: + cp := CosmosDBProps{} + if err := unmarshalProps(&cp); err != nil { + return err + } + raw.Props = cp + case ResourceTypeMessagingServiceBus: + sb := ServiceBusProps{} + if err := unmarshalProps(&sb); err != nil { + return err + } + raw.Props = sb + case ResourceTypeMessagingEventHubs: + eh := EventHubsProps{} + if err := unmarshalProps(&eh); err != nil { + return err + } + raw.Props = eh + case ResourceTypeMessagingKafka: + kp := KafkaProps{} + if err := unmarshalProps(&kp); err != nil { + return err + } + raw.Props = kp + case ResourceTypeStorage: + sp := StorageProps{} + if err := unmarshalProps(&sp); err != nil { + return err + } + raw.Props = sp } *r = ResourceConfig(raw) @@ -155,3 +269,49 @@ type AIModelPropsModel struct { Name string `yaml:"name,omitempty"` Version string `yaml:"version,omitempty"` } + +type MySQLProps struct { + DatabaseName string `yaml:"databaseName,omitempty"` + AuthType internal.AuthType `yaml:"authType,omitempty"` +} + +type PostgresProps struct { + DatabaseName string `yaml:"databaseName,omitempty"` + AuthType internal.AuthType `yaml:"authType,omitempty"` +} + +type MongoDBProps struct { + DatabaseName string `yaml:"databaseName,omitempty"` +} + +type CosmosDBProps struct { + Containers []CosmosDBContainerProps `yaml:"containers,omitempty"` + DatabaseName string `yaml:"databaseName,omitempty"` +} + +type CosmosDBContainerProps struct { + ContainerName string `yaml:"containerName,omitempty"` + PartitionKeyPaths []string `yaml:"partitionKeyPaths,omitempty"` +} + +type ServiceBusProps struct { + Queues []string `yaml:"queues,omitempty"` + IsJms bool `yaml:"isJms,omitempty"` + AuthType internal.AuthType `yaml:"authType,omitempty"` +} + +type EventHubsProps struct { + EventHubNames []string `yaml:"eventHubNames,omitempty"` + AuthType internal.AuthType `yaml:"authType,omitempty"` +} + +type KafkaProps struct { + Topics []string `yaml:"topics,omitempty"` + AuthType internal.AuthType `yaml:"authType,omitempty"` + SpringBootVersion string `yaml:"springBootVersion,omitempty"` +} + +type StorageProps struct { + Containers []string `yaml:"containers,omitempty"` + AuthType internal.AuthType `yaml:"authType,omitempty"` +} diff --git a/cli/azd/pkg/project/scaffold_gen.go b/cli/azd/pkg/project/scaffold_gen.go index 120f1c63211..66cdb477428 100644 --- a/cli/azd/pkg/project/scaffold_gen.go +++ b/cli/azd/pkg/project/scaffold_gen.go @@ -10,8 +10,13 @@ import ( "os" "path/filepath" "slices" + "strconv" "strings" + "github.com/azure/azure-dev/cli/azd/internal" + "github.com/azure/azure-dev/cli/azd/internal/binding" + "github.com/azure/azure-dev/cli/azd/pkg/input" + "github.com/azure/azure-dev/cli/azd/internal/scaffold" "github.com/azure/azure-dev/cli/azd/pkg/infra/provisioning" "github.com/azure/azure-dev/cli/azd/pkg/osutil" @@ -19,13 +24,13 @@ import ( ) // Generates the in-memory contents of an `infra` directory. -func infraFs(_ context.Context, prjConfig *ProjectConfig) (fs.FS, error) { +func infraFs(cxt context.Context, prjConfig *ProjectConfig, console input.Console) (fs.FS, error) { t, err := scaffold.Load() if err != nil { return nil, fmt.Errorf("loading scaffold templates: %w", err) } - infraSpec, err := infraSpec(prjConfig) + infraSpec, err := infraSpec(prjConfig, console, cxt) if err != nil { return nil, fmt.Errorf("generating infrastructure spec: %w", err) } @@ -41,13 +46,14 @@ func infraFs(_ context.Context, prjConfig *ProjectConfig) (fs.FS, error) { // Returns the infrastructure configuration that points to a temporary, generated `infra` directory on the filesystem. func tempInfra( ctx context.Context, - prjConfig *ProjectConfig) (*Infra, error) { + prjConfig *ProjectConfig, + console input.Console) (*Infra, error) { tmpDir, err := os.MkdirTemp("", "azd-infra") if err != nil { return nil, fmt.Errorf("creating temporary directory: %w", err) } - files, err := infraFs(ctx, prjConfig) + files, err := infraFs(ctx, prjConfig, console) if err != nil { return nil, err } @@ -89,8 +95,9 @@ func tempInfra( // Generates the filesystem of all infrastructure files to be placed, rooted at the project directory. // The content only includes `./infra` currently. -func infraFsForProject(ctx context.Context, prjConfig *ProjectConfig) (fs.FS, error) { - infraFS, err := infraFs(ctx, prjConfig) +func infraFsForProject(ctx context.Context, prjConfig *ProjectConfig, + console input.Console) (fs.FS, error) { + infraFS, err := infraFs(ctx, prjConfig, console) if err != nil { return nil, err } @@ -111,7 +118,8 @@ func infraFsForProject(ctx context.Context, prjConfig *ProjectConfig) (fs.FS, er return nil } - err = generatedFS.MkdirAll(filepath.Join(infraPathPrefix, filepath.Dir(path)), osutil.PermissionDirectoryOwnerOnly) + err = generatedFS.MkdirAll(filepath.Join(infraPathPrefix, filepath.Dir(path)), + osutil.PermissionDirectoryOwnerOnly) if err != nil { return err } @@ -130,53 +138,98 @@ func infraFsForProject(ctx context.Context, prjConfig *ProjectConfig) (fs.FS, er return generatedFS, nil } -func infraSpec(projectConfig *ProjectConfig) (*scaffold.InfraSpec, error) { +func infraSpec(projectConfig *ProjectConfig, + console input.Console, ctx context.Context) (*scaffold.InfraSpec, error) { infraSpec := scaffold.InfraSpec{} - // backends -> frontends - backendMapping := map[string]string{} - - for _, res := range projectConfig.Resources { - switch res.Type { + for _, resource := range projectConfig.Resources { + switch resource.Type { case ResourceTypeDbRedis: infraSpec.DbRedis = &scaffold.DatabaseRedis{} case ResourceTypeDbMongo: infraSpec.DbCosmosMongo = &scaffold.DatabaseCosmosMongo{ - DatabaseName: res.Name, + DatabaseName: resource.Props.(MongoDBProps).DatabaseName, } case ResourceTypeDbPostgres: infraSpec.DbPostgres = &scaffold.DatabasePostgres{ - DatabaseName: res.Name, + DatabaseName: resource.Props.(PostgresProps).DatabaseName, DatabaseUser: "pgadmin", + AuthType: resource.Props.(PostgresProps).AuthType, + } + case ResourceTypeDbMySQL: + infraSpec.DbMySql = &scaffold.DatabaseMySql{ + DatabaseName: resource.Props.(MySQLProps).DatabaseName, + DatabaseUser: "mysqladmin", + AuthType: resource.Props.(MySQLProps).AuthType, + } + case ResourceTypeDbCosmos: + infraSpec.DbCosmos = &scaffold.DatabaseCosmosAccount{ + DatabaseName: resource.Props.(CosmosDBProps).DatabaseName, + } + containers := resource.Props.(CosmosDBProps).Containers + for _, container := range containers { + infraSpec.DbCosmos.Containers = append(infraSpec.DbCosmos.Containers, + scaffold.CosmosSqlDatabaseContainer{ + ContainerName: container.ContainerName, + PartitionKeyPaths: container.PartitionKeyPaths, + }) + } + case ResourceTypeMessagingServiceBus: + props := resource.Props.(ServiceBusProps) + infraSpec.AzureServiceBus = &scaffold.AzureDepServiceBus{ + Queues: props.Queues, + AuthType: props.AuthType, + IsJms: props.IsJms, + } + case ResourceTypeMessagingEventHubs: + props := resource.Props.(EventHubsProps) + infraSpec.AzureEventHubs = &scaffold.AzureDepEventHubs{ + EventHubNames: props.EventHubNames, + AuthType: props.AuthType, + UseKafka: false, + } + case ResourceTypeMessagingKafka: + props := resource.Props.(KafkaProps) + infraSpec.AzureEventHubs = &scaffold.AzureDepEventHubs{ + EventHubNames: props.Topics, + AuthType: props.AuthType, + UseKafka: true, + SpringBootVersion: props.SpringBootVersion, + } + case ResourceTypeStorage: + props := resource.Props.(StorageProps) + infraSpec.AzureStorageAccount = &scaffold.AzureDepStorageAccount{ + ContainerNames: props.Containers, + AuthType: props.AuthType, } case ResourceTypeHostContainerApp: - svcSpec := scaffold.ServiceSpec{ - Name: res.Name, + serviceSpec := scaffold.ServiceSpec{ + Name: resource.Name, Port: -1, } - - err := mapContainerApp(res, &svcSpec, &infraSpec) + err := handleContainerAppProps(resource, &serviceSpec, &infraSpec) if err != nil { return nil, err } - - err = mapHostUses(res, &svcSpec, backendMapping, projectConfig) - if err != nil { - return nil, err + if _, ok := projectConfig.Services[resource.Name]; ok { + serviceSpec.Envs, err = binding.MergeMapWithDuplicationCheck(serviceSpec.Envs, + projectConfig.Services[resource.Name].Env) + if err != nil { + return nil, err + } } - - infraSpec.Services = append(infraSpec.Services, svcSpec) + infraSpec.Services = append(infraSpec.Services, serviceSpec) case ResourceTypeOpenAiModel: - props := res.Props.(AIModelProps) + props := resource.Props.(AIModelProps) if len(props.Model.Name) == 0 { - return nil, fmt.Errorf("resources.%s.model is required", res.Name) + return nil, fmt.Errorf("resources.%s.model is required", resource.Name) } if len(props.Model.Version) == 0 { - return nil, fmt.Errorf("resources.%s.version is required", res.Name) + return nil, fmt.Errorf("resources.%s.version is required", resource.Name) } infraSpec.AIModels = append(infraSpec.AIModels, scaffold.AIModel{ - Name: res.Name, + Name: resource.Name, Model: scaffold.AIModelModel{ Name: props.Model.Name, Version: props.Model.Version, @@ -185,16 +238,14 @@ func infraSpec(projectConfig *ProjectConfig) (*scaffold.InfraSpec, error) { } } - // create reverse frontends -> backends mapping - for i := range infraSpec.Services { - svc := &infraSpec.Services[i] - if front, ok := backendMapping[svc.Name]; ok { - if svc.Backend == nil { - svc.Backend = &scaffold.Backend{} - } + err := mapUses(&infraSpec, projectConfig) + if err != nil { + return nil, err + } - svc.Backend.Frontends = append(svc.Backend.Frontends, scaffold.ServiceReference{Name: front}) - } + err = printEnvListAboutUses(&infraSpec, projectConfig, console, ctx) + if err != nil { + return nil, err } slices.SortFunc(infraSpec.Services, func(a, b scaffold.ServiceSpec) int { @@ -204,21 +255,187 @@ func infraSpec(projectConfig *ProjectConfig) (*scaffold.InfraSpec, error) { return &infraSpec, nil } -func mapContainerApp(res *ResourceConfig, svcSpec *scaffold.ServiceSpec, infraSpec *scaffold.InfraSpec) error { - props := res.Props.(ContainerAppProps) +func mapUses(infraSpec *scaffold.InfraSpec, projectConfig *ProjectConfig) error { + for i := range infraSpec.Services { + userSpec := &infraSpec.Services[i] + userResourceName := userSpec.Name + userResource, ok := projectConfig.Resources[userResourceName] + if !ok { + return fmt.Errorf("service (%s) exist, but there isn't a resource with that name", + userResourceName) + } + sourceType := sourceType(projectConfig, userResourceName) + for _, usedResourceName := range userResource.Uses { + usedResource, ok := projectConfig.Resources[usedResourceName] + if !ok { + return fmt.Errorf("in azure.yaml, (%s) uses (%s), but (%s) doesn't", + userResourceName, usedResourceName, usedResourceName) + } + var err error + switch usedResource.Type { + case ResourceTypeDbPostgres: + err = scaffold.BindToPostgres(sourceType, userSpec, infraSpec.DbPostgres) + case ResourceTypeDbMySQL: + err = scaffold.BindToMySql(sourceType, userSpec, infraSpec.DbMySql) + case ResourceTypeDbMongo: + err = scaffold.BindToMongoDb(sourceType, userSpec, infraSpec.DbCosmosMongo) + case ResourceTypeDbCosmos: + err = scaffold.BindToCosmosDb(sourceType, userSpec, infraSpec.DbCosmos) + case ResourceTypeDbRedis: + err = scaffold.BindToRedis(sourceType, userSpec, infraSpec.DbRedis) + case ResourceTypeMessagingServiceBus: + err = scaffold.BindToServiceBus(sourceType, userSpec, infraSpec.AzureServiceBus) + case ResourceTypeMessagingKafka, ResourceTypeMessagingEventHubs: + err = scaffold.BindToEventHubs(sourceType, userSpec, infraSpec.AzureEventHubs) + case ResourceTypeStorage: + err = scaffold.BindToStorageAccount(sourceType, userSpec, infraSpec.AzureStorageAccount) + case ResourceTypeOpenAiModel: + err = scaffold.BindToAIModels(sourceType, userSpec, usedResource.Name) + case ResourceTypeHostContainerApp: + usedSpec := getServiceSpecByName(infraSpec, usedResource.Name) + if usedSpec == nil { + return fmt.Errorf("'%s' uses '%s', but %s doesn't exist", userSpec.Name, usedResource.Name, + usedResource.Name) + } + scaffold.BindToContainerApp(userSpec, usedSpec) + default: + return fmt.Errorf("resource (%s) uses (%s), but the type of (%s) is (%s), which is unsupported", + userResource.Name, usedResource.Name, usedResource.Name, usedResource.Type) + } + if err != nil { + return err + } + } + } + return nil +} + +func sourceType(projectConfig *ProjectConfig, userResourceName string) binding.SourceType { + userService := projectConfig.Services[userResourceName] + if userService == nil { + return binding.Unknown + } + switch userService.Language { + case ServiceLanguageJava: + return binding.Java + default: + return binding.Unknown + } +} + +func printEnvListAboutUses(infraSpec *scaffold.InfraSpec, projectConfig *ProjectConfig, + console input.Console, ctx context.Context) error { + for i := range infraSpec.Services { + userSpec := &infraSpec.Services[i] + userResourceName := userSpec.Name + userResource, ok := projectConfig.Resources[userResourceName] + if !ok { + return fmt.Errorf("service (%s) exist, but there isn't a resource with that name", + userResourceName) + } + sourceType := sourceType(projectConfig, userResourceName) + for _, usedResourceName := range userResource.Uses { + usedResource, ok := projectConfig.Resources[usedResourceName] + if !ok { + return fmt.Errorf("in azure.yaml, (%s) uses (%s), but (%s) doesn't", + userResourceName, usedResourceName, usedResourceName) + } + console.Message(ctx, fmt.Sprintf("\nInformation about environment variables:\n"+ + "In azure.yaml, '%s' uses '%s'. \n"+ + "The 'uses' relationship is implemented by environment variables. \n"+ + "Please make sure your application used the right environment variable. \n"+ + "Here is the list of environment variables: ", + userResourceName, usedResourceName)) + var variables map[string]string + var err error + switch usedResource.Type { + case ResourceTypeDbPostgres: + variables, err = binding.GetBindingEnvs( + binding.Source{Type: sourceType}, + binding.Target{Type: binding.AzureDatabaseForPostgresql, AuthType: infraSpec.DbPostgres.AuthType}) + case ResourceTypeDbMySQL: + variables, err = binding.GetBindingEnvs( + binding.Source{Type: sourceType}, + binding.Target{Type: binding.AzureDatabaseForMysql, AuthType: infraSpec.DbMySql.AuthType}) + case ResourceTypeDbMongo: + variables, err = binding.GetBindingEnvs( + binding.Source{Type: sourceType}, + binding.Target{ + Type: binding.AzureCosmosDBForMongoDB, + AuthType: internal.AuthTypeConnectionString, + }) + case ResourceTypeDbCosmos: + variables, err = binding.GetBindingEnvs( + binding.Source{Type: sourceType}, + binding.Target{ + Type: binding.AzureCosmosDBForNoSQL, + AuthType: internal.AuthTypeUserAssignedManagedIdentity, + }) + case ResourceTypeDbRedis: + variables, err = binding.GetBindingEnvs( + binding.Source{Type: sourceType}, + binding.Target{Type: binding.AzureCacheForRedis, AuthType: internal.AuthTypePassword}) + case ResourceTypeMessagingServiceBus: + variables, err = binding.GetBindingEnvs( + binding.Source{ + Type: sourceType, + Metadata: map[binding.MetadataType]string{ + binding.IsSpringBootJms: strconv.FormatBool(infraSpec.AzureServiceBus.IsJms)}}, + binding.Target{Type: binding.AzureServiceBus, AuthType: infraSpec.AzureServiceBus.AuthType}) + case ResourceTypeMessagingKafka: + variables, err = binding.GetBindingEnvs( + binding.Source{ + Type: sourceType, + Metadata: map[binding.MetadataType]string{ + binding.IsSpringBootKafka: strconv.FormatBool(true), + binding.SpringBootVersion: infraSpec.AzureEventHubs.SpringBootVersion}}, + binding.Target{Type: binding.AzureEventHubs, AuthType: infraSpec.AzureEventHubs.AuthType}) + case ResourceTypeMessagingEventHubs: + variables, err = binding.GetBindingEnvs( + binding.Source{ + Type: sourceType, + Metadata: map[binding.MetadataType]string{ + binding.IsSpringBootKafka: "false"}}, + binding.Target{Type: binding.AzureEventHubs, AuthType: infraSpec.AzureEventHubs.AuthType}) + case ResourceTypeStorage: + variables, err = binding.GetBindingEnvs( + binding.Source{Type: sourceType}, + binding.Target{Type: binding.AzureStorageAccount, AuthType: infraSpec.AzureStorageAccount.AuthType}) + case ResourceTypeHostContainerApp: + printHintsAboutUseHostContainerApp(userResourceName, usedResourceName, console, ctx) + default: + return fmt.Errorf("resource (%s) uses (%s), but the type of (%s) is (%s), "+ + "which is doesn't add necessary environment variable", + userResource.Name, usedResource.Name, usedResource.Name, usedResource.Type) + } + if err != nil { + return err + } + for key := range variables { + console.Message(ctx, fmt.Sprintf(" %s=xxx", key)) + } + console.Message(ctx, "\n") + } + } + return nil +} + +func handleContainerAppProps( + resourceConfig *ResourceConfig, serviceSpec *scaffold.ServiceSpec, infraSpec *scaffold.InfraSpec) error { + props := resourceConfig.Props.(ContainerAppProps) for _, envVar := range props.Env { if len(envVar.Value) == 0 && len(envVar.Secret) == 0 { return fmt.Errorf( "environment variable %s for host %s is invalid: both value and secret are empty", envVar.Name, - res.Name) + resourceConfig.Name) } if len(envVar.Value) > 0 && len(envVar.Secret) > 0 { return fmt.Errorf( "environment variable %s for host %s is invalid: both value and secret are set", envVar.Name, - res.Name) + resourceConfig.Name) } isSecret := len(envVar.Secret) > 0 @@ -233,49 +450,19 @@ func mapContainerApp(res *ResourceConfig, svcSpec *scaffold.ServiceSpec, infraSp // Here, DB_HOST is not a secret, but DB_SECRET is. And yet, DB_HOST will be marked as a secret. // This is a limitation of the current implementation, but it's safer to mark both as secrets above. evaluatedValue := genBicepParamsFromEnvSubst(value, isSecret, infraSpec) - svcSpec.Env[envVar.Name] = evaluatedValue + err := scaffold.AddNewEnvironmentVariable(serviceSpec, envVar.Name, evaluatedValue) + if err != nil { + return err + } } port := props.Port - if port < 1 || port > 65535 { - return fmt.Errorf("port value %d for host %s must be between 1 and 65535", port, res.Name) - } - - svcSpec.Port = port - return nil -} - -func mapHostUses( - res *ResourceConfig, - svcSpec *scaffold.ServiceSpec, - backendMapping map[string]string, - prj *ProjectConfig) error { - for _, use := range res.Uses { - useRes, ok := prj.Resources[use] - if !ok { - return fmt.Errorf("resource %s uses %s, which does not exist", res.Name, use) - } - - switch useRes.Type { - case ResourceTypeDbMongo: - svcSpec.DbCosmosMongo = &scaffold.DatabaseReference{DatabaseName: useRes.Name} - case ResourceTypeDbPostgres: - svcSpec.DbPostgres = &scaffold.DatabaseReference{DatabaseName: useRes.Name} - case ResourceTypeDbRedis: - svcSpec.DbRedis = &scaffold.DatabaseReference{DatabaseName: useRes.Name} - case ResourceTypeHostContainerApp: - if svcSpec.Frontend == nil { - svcSpec.Frontend = &scaffold.Frontend{} - } - - svcSpec.Frontend.Backends = append(svcSpec.Frontend.Backends, - scaffold.ServiceReference{Name: use}) - backendMapping[use] = res.Name // record the backend -> frontend mapping - case ResourceTypeOpenAiModel: - svcSpec.AIModels = append(svcSpec.AIModels, scaffold.AIModelReference{Name: use}) - } + if port < 0 || port > 65535 { + return fmt.Errorf("port value for '%s' must be between 0 and 65535 (port = 0 means ingress disabled), "+ + "but it's %d ", resourceConfig.Name, port) } + serviceSpec.Port = port return nil } @@ -288,10 +475,11 @@ func setParameter(spec *scaffold.InfraSpec, name string, value string, isSecret } // prevent auto-generated parameters from being overwritten with different values - if valStr, ok := parameters.Value.(string); !ok || ok && valStr != value { + if valStr, ok := parameters.Value.(string); !ok || valStr != value { // if you are a maintainer and run into this error, consider using a different, unique name panic(fmt.Sprintf( - "parameter collision: parameter %s already set to %s, cannot set to %s", name, parameters.Value, value)) + "parameter collision: parameter %s already set to %s, cannot set to %s", name, parameters.Value, + value)) } return @@ -311,6 +499,7 @@ func setParameter(spec *scaffold.InfraSpec, name string, value string, isSecret // // If the string is a literal, it is returned as is. // If isSecret is true, the parameter is marked as a secret. +// The returned value is string, all expression inside are wrapped by "${}". func genBicepParamsFromEnvSubst( s string, isSecret bool, @@ -325,16 +514,16 @@ func genBicepParamsFromEnvSubst( var result string if len(names) == 0 { - // literal string with no expressions, quote the value as a Bicep string - result = "'" + s + "'" + // literal string with no expressions + result = s } else if len(names) == 1 { // single expression, return the bicep parameter name to reference the expression - result = scaffold.BicepName(names[0]) + result = "${" + scaffold.BicepName(names[0]) + "}" } else { // multiple expressions // construct the string with all expressions replaced by parameter references as a Bicep interpolated string previous := 0 - result = "'" + result = "" for i, loc := range locations { // replace each expression with references by variable name result += s[previous:loc.start] @@ -343,8 +532,28 @@ func genBicepParamsFromEnvSubst( result += "}" previous = loc.stop + 1 } - result += "'" } return result } + +func getServiceSpecByName(infraSpec *scaffold.InfraSpec, name string) *scaffold.ServiceSpec { + for i := range infraSpec.Services { + if infraSpec.Services[i].Name == name { + return &infraSpec.Services[i] + } + } + return nil +} + +// todo: merge it into scaffold.BindToContainerApp +func printHintsAboutUseHostContainerApp(userResourceName string, usedResourceName string, + console input.Console, ctx context.Context) { + if console == nil { + return + } + console.Message(ctx, fmt.Sprintf("Environment variables in %s:", userResourceName)) + console.Message(ctx, fmt.Sprintf("%s_BASE_URL=xxx", strings.ToUpper(usedResourceName))) + console.Message(ctx, fmt.Sprintf("Environment variables in %s:", usedResourceName)) + console.Message(ctx, fmt.Sprintf("%s_BASE_URL=xxx", strings.ToUpper(userResourceName))) +} diff --git a/cli/azd/pkg/project/scaffold_gen_test.go b/cli/azd/pkg/project/scaffold_gen_test.go index 85cf4125075..a3c11a38119 100644 --- a/cli/azd/pkg/project/scaffold_gen_test.go +++ b/cli/azd/pkg/project/scaffold_gen_test.go @@ -18,23 +18,23 @@ func Test_genBicepParamsFromEnvSubst(t *testing.T) { want string wantParams []scaffold.Parameter }{ - {"foo", false, "'foo'", nil}, - {"${MY_VAR}", false, "myVar", []scaffold.Parameter{{Name: "myVar", Value: "${MY_VAR}", Type: "string"}}}, + {"foo", false, "foo", nil}, + {"${MY_VAR}", false, "${myVar}", []scaffold.Parameter{{Name: "myVar", Value: "${MY_VAR}", Type: "string"}}}, - {"${MY_SECRET}", true, "mySecret", + {"${MY_SECRET}", true, "${mySecret}", []scaffold.Parameter{ {Name: "mySecret", Value: "${MY_SECRET}", Type: "string", Secret: true}}}, - {"Hello, ${world:=okay}!", false, "world", + {"Hello, ${world:=okay}!", false, "${world}", []scaffold.Parameter{ {Name: "world", Value: "${world:=okay}", Type: "string"}}}, - {"${CAT} and ${DOG}", false, "'${cat} and ${dog}'", + {"${CAT} and ${DOG}", false, "${cat} and ${dog}", []scaffold.Parameter{ {Name: "cat", Value: "${CAT}", Type: "string"}, {Name: "dog", Value: "${DOG}", Type: "string"}}}, - {"${DB_HOST:='local'}:${DB_USERNAME:='okay'}", true, "'${dbHost}:${dbUsername}'", + {"${DB_HOST:='local'}:${DB_USERNAME:='okay'}", true, "${dbHost}:${dbUsername}", []scaffold.Parameter{ {Name: "dbHost", Value: "${DB_HOST:='local'}", Type: "string", Secret: true}, {Name: "dbUsername", Value: "${DB_USERNAME:='okay'}", Type: "string", Secret: true}}}, diff --git a/cli/azd/pkg/project/service_config.go b/cli/azd/pkg/project/service_config.go index aa3cf7bf640..387c7288fd3 100644 --- a/cli/azd/pkg/project/service_config.go +++ b/cli/azd/pkg/project/service_config.go @@ -20,6 +20,8 @@ type ServiceConfig struct { ResourceName osutil.ExpandableString `yaml:"resourceName,omitempty"` // The ARM api version to use for the service. If not specified, the latest version is used. ApiVersion string `yaml:"apiVersion,omitempty"` + // The path to the parent directory of the project + ParentPath string `yaml:"parentPath,omitempty"` // The relative path to the project folder from the project root RelativePath string `yaml:"project"` // The azure hosting model to use, ex) appservice, function, containerapp @@ -45,6 +47,8 @@ type ServiceConfig struct { DotNetContainerApp *DotNetContainerAppOptions `yaml:"-,omitempty"` // Custom configuration for the service target Config map[string]any `yaml:"config,omitempty"` + // Environment variables for service + Env map[string]string `yaml:"env,omitempty"` // Computed lazily by useDotnetPublishForDockerBuild and cached. This is true when the project // is a dotnet project and there is not an explicit Dockerfile in the project directory. useDotNetPublishForDockerBuild *bool diff --git a/cli/azd/pkg/tools/maven/maven.go b/cli/azd/pkg/tools/maven/maven.go index 43d8d91db59..15fbd5d73b6 100644 --- a/cli/azd/pkg/tools/maven/maven.go +++ b/cli/azd/pkg/tools/maven/maven.go @@ -1,9 +1,12 @@ package maven import ( + "archive/zip" + "bufio" "context" "errors" "fmt" + "io" "log" "os" "path/filepath" @@ -13,6 +16,7 @@ import ( osexec "os/exec" + "github.com/azure/azure-dev/cli/azd/internal" "github.com/azure/azure-dev/cli/azd/pkg/exec" "github.com/azure/azure-dev/cli/azd/pkg/tools" ) @@ -73,6 +77,8 @@ func (m *Cli) mvnCmd() (string, error) { return m.mvnCmdStr, nil } +const downloadedMavenVersion = "3.9.9" + func getMavenPath(projectPath string, rootProjectPath string) (string, error) { mvnw, err := getMavenWrapperPath(projectPath, rootProjectPath) if mvnw != "" { @@ -92,10 +98,7 @@ func getMavenPath(projectPath string, rootProjectPath string) (string, error) { return "", fmt.Errorf("failed looking up mvn in PATH: %w", err) } - return "", errors.New( - "maven could not be found. Install either Maven or Maven Wrapper by " + - "visiting https://maven.apache.org/ or https://maven.apache.org/wrapper/", - ) + return getDownloadedMvnCommand(downloadedMavenVersion) } // getMavenWrapperPath finds the path to mvnw in the project directory, up to the root project directory. @@ -242,3 +245,203 @@ func NewCli(commandRunner exec.CommandRunner) *Cli { commandRunner: commandRunner, } } + +func (cli *Cli) CleanPackage(ctx context.Context, relativePath string, projectPath string) error { + mvnCmd, err := cli.mvnCmd() + if err != nil { + return err + } + runArgs := exec.NewRunArgs(mvnCmd, "clean", "package", "-DskipTests", "-am", "-pl", relativePath).WithCwd(projectPath) + _, err = cli.commandRunner.Run(ctx, runArgs) + if err != nil { + return fmt.Errorf("error running mvn clean package for module: %s. error = %w", relativePath, err) + } + return nil +} + +func (cli *Cli) EffectivePom(ctx context.Context, pomPath string) (string, error) { + mvnCmd, err := cli.mvnCmd() + if err != nil { + return "", err + } + pomDir := filepath.Dir(pomPath) + runArgs := exec.NewRunArgs(mvnCmd, "help:effective-pom", "-f", pomPath).WithCwd(pomDir) + result, err := cli.commandRunner.Run(ctx, runArgs) + if err != nil { + return "", fmt.Errorf("failed to run mvn help:effective-pom for pom file: %s. error = %w", pomPath, err) + } + + return getEffectivePomFromConsoleOutput(result.Stdout) +} + +func getEffectivePomFromConsoleOutput(consoleOutput string) (string, error) { + var effectivePom strings.Builder + scanner := bufio.NewScanner(strings.NewReader(consoleOutput)) + inProject := false + for scanner.Scan() { + line := scanner.Text() + if strings.HasPrefix(strings.TrimSpace(line), "") { + effectivePom.WriteString(line) + break + } + if inProject { + effectivePom.WriteString(line) + } + } + if err := scanner.Err(); err != nil { + return "", fmt.Errorf("failed to scan console output. %w", err) + } + return effectivePom.String(), nil +} + +func getDownloadedMvnCommand(mavenVersion string) (string, error) { + mavenCommand, err := getAzdMvnCommand(mavenVersion) + if err != nil { + return "", err + } + if fileExists(mavenCommand) { + log.Println("Skip downloading maven because it already exists.") + return mavenCommand, nil + } + log.Println("Downloading maven") + mavenDir, err := getAzdMvnDir() + if err != nil { + return "", err + } + if _, err := os.Stat(mavenDir); os.IsNotExist(err) { + err = os.MkdirAll(mavenDir, os.ModePerm) + if err != nil { + return "", fmt.Errorf("unable to create directory: %w", err) + } + } + + mavenZipFilePath := filepath.Join(mavenDir, mavenZipFileName(mavenVersion)) + err = downloadMaven(mavenVersion, mavenZipFilePath) + if err != nil { + return "", err + } + err = unzip(mavenZipFilePath, mavenDir) + if err != nil { + return "", fmt.Errorf("failed to unzip maven bin.zip: %w", err) + } + return mavenCommand, nil +} + +func getAzdMvnDir() (string, error) { + userHome, err := os.UserHomeDir() + if err != nil { + return "", fmt.Errorf("unable to get user home directory: %w", err) + } + return filepath.Join(userHome, ".azd", "java", "maven"), nil +} + +func getAzdMvnCommand(mavenVersion string) (string, error) { + mavenDir, err := getAzdMvnDir() + if err != nil { + return "", err + } + azdMvnCommand := filepath.Join(mavenDir, "apache-maven-"+mavenVersion, "bin", "mvn") + return azdMvnCommand, nil +} + +func mavenZipFileName(mavenVersion string) string { + return "apache-maven-" + mavenVersion + "-bin.zip" +} + +func mavenUrl(mavenVersion string) string { + return "https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/" + + mavenVersion + "/" + mavenZipFileName(mavenVersion) +} + +func downloadMaven(mavenVersion string, filePath string) error { + requestUrl := mavenUrl(mavenVersion) + data, err := internal.Download(requestUrl) + if err != nil { + return err + } + return os.WriteFile(filePath, data, 0600) +} + +func unzip(src string, destinationFolder string) error { + reader, err := zip.OpenReader(src) + if err != nil { + return err + } + defer func(reader *zip.ReadCloser) { + err := reader.Close() + if err != nil { + log.Println("failed to close ReadCloser. %w", err) + } + }(reader) + + for _, file := range reader.File { + destinationPath, err := getValidDestPath(destinationFolder, file.Name) + if err != nil { + return err + } + if file.FileInfo().IsDir() { + err := os.MkdirAll(destinationPath, os.ModePerm) + if err != nil { + return err + } + } else { + if err = os.MkdirAll(filepath.Dir(destinationPath), os.ModePerm); err != nil { + return err + } + + outFile, err := os.OpenFile(destinationPath, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, file.Mode()) + if err != nil { + return err + } + defer func(outFile *os.File) { + err := outFile.Close() + if err != nil { + log.Println("failed to close file. %w", err) + } + }(outFile) + + rc, err := file.Open() + if err != nil { + return err + } + defer func(rc io.ReadCloser) { + err := rc.Close() + if err != nil { + log.Println("failed to close file. %w", err) + } + }(rc) + + for { + _, err = io.CopyN(outFile, rc, 1_000_000) + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + } + } + } + return nil +} + +func getValidDestPath(destinationFolder string, fileName string) (string, error) { + destinationPath := filepath.Clean(filepath.Join(destinationFolder, fileName)) + if !strings.HasPrefix(destinationPath, destinationFolder+string(os.PathSeparator)) { + return "", fmt.Errorf("%s: illegal file path", fileName) + } + return destinationPath, nil +} + +func fileExists(path string) bool { + if path == "" { + return false + } + if _, err := os.Stat(path); err == nil { + return true + } else { + return false + } +} diff --git a/cli/azd/pkg/tools/maven/maven_test.go b/cli/azd/pkg/tools/maven/maven_test.go index 4a906a1b4db..8a4096c6995 100644 --- a/cli/azd/pkg/tools/maven/maven_test.go +++ b/cli/azd/pkg/tools/maven/maven_test.go @@ -20,6 +20,7 @@ func Test_getMavenPath(t *testing.T) { rootPath := os.TempDir() sourcePath := filepath.Join(rootPath, "src") projectPath := filepath.Join(sourcePath, "api") + azdMvn, _ := getAzdMvnCommand(downloadedMavenVersion) pathDir := os.TempDir() @@ -43,8 +44,10 @@ func Test_getMavenPath(t *testing.T) { {name: "MvnwProjectPath", mvnwPath: []string{projectPath}, want: filepath.Join(projectPath, mvnwWithExt())}, {name: "MvnwSrcPath", mvnwPath: []string{sourcePath}, want: filepath.Join(sourcePath, mvnwWithExt())}, {name: "MvnwRootPath", mvnwPath: []string{rootPath}, want: filepath.Join(rootPath, mvnwWithExt())}, - {name: "MvnwFirst", mvnwPath: []string{rootPath}, want: filepath.Join(rootPath, mvnwWithExt()), - mvnPath: []string{pathDir}, envVar: map[string]string{"PATH": pathDir}}, + { + name: "MvnwFirst", mvnwPath: []string{rootPath}, want: filepath.Join(rootPath, mvnwWithExt()), + mvnPath: []string{pathDir}, envVar: map[string]string{"PATH": pathDir}, + }, { name: "MvnwProjectPathRelative", mvnwPath: []string{projectPath}, @@ -69,7 +72,10 @@ func Test_getMavenPath(t *testing.T) { envVar: map[string]string{"PATH": pathDir}, want: filepath.Join(pathDir, mvnWithExt()), }, - {name: "NotFound", want: "", wantErr: true}, + { + name: "Use azd downloaded maven", + want: azdMvn, + }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { @@ -94,7 +100,8 @@ func Test_getMavenPath(t *testing.T) { wd, err := os.Getwd() require.NoError(t, err) - log.Printf("rootPath: %s, cwd: %s, getMavenPath(%s, %s)\n", rootPath, wd, args.projectPath, args.rootProjectPath) + log.Printf("rootPath: %s, cwd: %s, getMavenPath(%s, %s)\n", rootPath, wd, args.projectPath, + args.rootProjectPath) actual, err := getMavenPath(args.projectPath, args.rootProjectPath) if tt.wantErr { diff --git a/cli/azd/resources/scaffold/base/abbreviations.json b/cli/azd/resources/scaffold/base/abbreviations.json index dc62141f9da..4d4a4c62d6c 100644 --- a/cli/azd/resources/scaffold/base/abbreviations.json +++ b/cli/azd/resources/scaffold/base/abbreviations.json @@ -33,6 +33,7 @@ "dataMigrationServices": "dms-", "dBforMySQLServers": "mysql-", "dBforPostgreSQLServers": "psql-", + "deploymentScript": "dc-", "devicesIotHubs": "iot-", "devicesProvisioningServices": "provs-", "devicesProvisioningServicesCertificates": "pcert-", diff --git a/cli/azd/resources/scaffold/base/modules/set-event-hubs-namespace-connection-string.bicep b/cli/azd/resources/scaffold/base/modules/set-event-hubs-namespace-connection-string.bicep new file mode 100644 index 00000000000..64245640096 --- /dev/null +++ b/cli/azd/resources/scaffold/base/modules/set-event-hubs-namespace-connection-string.bicep @@ -0,0 +1,21 @@ +param eventHubsNamespaceName string +param connectionStringSecretName string +param keyVaultName string + +resource eventHubsNamespace 'Microsoft.EventHub/namespaces@2024-01-01' existing = { + name: eventHubsNamespaceName +} + +resource keyVault 'Microsoft.KeyVault/vaults@2022-07-01' existing = { + name: keyVaultName +} + +resource connectionStringSecret 'Microsoft.KeyVault/vaults/secrets@2022-07-01' = { + name: connectionStringSecretName + parent: keyVault + properties: { + value: listKeys(concat(resourceId('Microsoft.EventHub/namespaces', eventHubsNamespaceName), '/AuthorizationRules/RootManageSharedAccessKey'), eventHubsNamespace.apiVersion).primaryConnectionString + } +} + +output keyVaultUrl string = 'https://${keyVaultName}${environment().suffixes.keyvaultDns}/secrets/${connectionStringSecretName}' diff --git a/cli/azd/resources/scaffold/base/modules/set-redis-conn.bicep b/cli/azd/resources/scaffold/base/modules/set-redis-conn.bicep index 813f96fbcbf..fbe41132a20 100644 --- a/cli/azd/resources/scaffold/base/modules/set-redis-conn.bicep +++ b/cli/azd/resources/scaffold/base/modules/set-redis-conn.bicep @@ -27,3 +27,6 @@ resource urlSecret 'Microsoft.KeyVault/vaults/secrets@2022-07-01' = { } } +output keyVaultUrlForPass string = 'https://${keyVaultName}${environment().suffixes.keyvaultDns}/secrets/${passwordSecretName}' +output keyVaultUrlForUrl string = 'https://${keyVaultName}${environment().suffixes.keyvaultDns}/secrets/${urlSecretName}' + diff --git a/cli/azd/resources/scaffold/base/modules/set-servicebus-namespace-connection-string.bicep b/cli/azd/resources/scaffold/base/modules/set-servicebus-namespace-connection-string.bicep new file mode 100644 index 00000000000..b58a707370d --- /dev/null +++ b/cli/azd/resources/scaffold/base/modules/set-servicebus-namespace-connection-string.bicep @@ -0,0 +1,21 @@ +param serviceBusNamespaceName string +param connectionStringSecretName string +param keyVaultName string + +resource serviceBusNamespace 'Microsoft.ServiceBus/namespaces@2022-10-01-preview' existing = { + name: serviceBusNamespaceName +} + +resource keyVault 'Microsoft.KeyVault/vaults@2022-07-01' existing = { + name: keyVaultName +} + +resource serviceBusConnectionStringSecret 'Microsoft.KeyVault/vaults/secrets@2022-07-01' = { + name: connectionStringSecretName + parent: keyVault + properties: { + value: listKeys(concat(resourceId('Microsoft.ServiceBus/namespaces', serviceBusNamespaceName), '/AuthorizationRules/RootManageSharedAccessKey'), serviceBusNamespace.apiVersion).primaryConnectionString + } +} + +output keyVaultUrl string = 'https://${keyVaultName}${environment().suffixes.keyvaultDns}/secrets/${connectionStringSecretName}' diff --git a/cli/azd/resources/scaffold/base/modules/set-storage-account-connection-string.bicep b/cli/azd/resources/scaffold/base/modules/set-storage-account-connection-string.bicep new file mode 100644 index 00000000000..6e0a7da7912 --- /dev/null +++ b/cli/azd/resources/scaffold/base/modules/set-storage-account-connection-string.bicep @@ -0,0 +1,21 @@ +param storageAccountName string +param connectionStringSecretName string +param keyVaultName string + +resource storageAccount 'Microsoft.Storage/storageAccounts@2023-05-01' existing = { + name: storageAccountName +} + +resource keyVault 'Microsoft.KeyVault/vaults@2022-07-01' existing = { + name: keyVaultName +} + +resource connectionStringSecret 'Microsoft.KeyVault/vaults/secrets@2022-07-01' = { + name: connectionStringSecretName + parent: keyVault + properties: { + value: 'DefaultEndpointsProtocol=https;AccountName=${storageAccount.name};AccountKey=${storageAccount.listKeys().keys[0].value};EndpointSuffix=${environment().suffixes.storage}' + } +} + +output keyVaultUrl string = 'https://${keyVaultName}${environment().suffixes.keyvaultDns}/secrets/${connectionStringSecretName}' diff --git a/cli/azd/resources/scaffold/templates/main.bicept b/cli/azd/resources/scaffold/templates/main.bicept index 8ec2feb9f17..c46fa0355b1 100644 --- a/cli/azd/resources/scaffold/templates/main.bicept +++ b/cli/azd/resources/scaffold/templates/main.bicept @@ -67,4 +67,10 @@ output AZURE_RESOURCE_REDIS_ID string = resources.outputs.AZURE_RESOURCE_REDIS_I {{- if .DbPostgres}} output AZURE_RESOURCE_{{alphaSnakeUpper .DbPostgres.DatabaseName}}_ID string = resources.outputs.AZURE_RESOURCE_{{alphaSnakeUpper .DbPostgres.DatabaseName}}_ID {{- end}} +{{- if .DbMySql}} +output AZURE_MYSQL_FLEXIBLE_SERVER_ID string = resources.outputs.AZURE_MYSQL_FLEXIBLE_SERVER_ID +{{- end}} +{{- if .AzureEventHubs }} +output AZURE_EVENT_HUBS_ID string = resources.outputs.AZURE_EVENT_HUBS_ID +{{- end}} {{ end}} diff --git a/cli/azd/resources/scaffold/templates/next-steps.mdt b/cli/azd/resources/scaffold/templates/next-steps.mdt index 7fe72dec118..932f0c80f20 100644 --- a/cli/azd/resources/scaffold/templates/next-steps.mdt +++ b/cli/azd/resources/scaffold/templates/next-steps.mdt @@ -21,7 +21,7 @@ To troubleshoot any issues, see [troubleshooting](#troubleshooting). Configure environment variables for running services by updating `settings` in [main.parameters.json](./infra/main.parameters.json). {{- range .Services}} -{{- if or .DbPostgres .DbCosmosMongo .DbRedis }} +{{- if or .DbPostgres .DbMySql .DbCosmosMongo .DbRedis }} #### Database connections for `{{.Name}}` @@ -32,6 +32,9 @@ They allow connection to the database instances, and can be modified or adapted - `POSTGRES_URL` - The URL of the Azure Postgres Flexible Server database instance. Individual components are also available as: `POSTGRES_HOST`, `POSTGRES_PORT`, `POSTGRES_DATABASE`, `POSTGRES_USERNAME`, `POSTGRES_PASSWORD`. {{- end}} +{{- if .DbMySql }} +- `MYSQL_*` environment variables are configured in [{{.Name}}.bicep](./infra/app/{{.Name}}.bicep) to connect to the Mysql database. Modify these variables to match your application's needs. +{{- end}} {{- if .DbCosmosMongo }} - `MONGODB_URL` - The URL of the Azure Cosmos DB (MongoDB) instance. {{- end}} @@ -71,6 +74,9 @@ This includes: {{- if .DbPostgres}} - Azure Postgres Flexible Server to host the '{{.DbPostgres.DatabaseName}}' database. {{- end}} +{{- if .DbMySql}} +- [app/db-mysql.bicep](./infra/app/db-mysql.bicep) - Azure MySQL Flexible Server to host the '{{.DbMySql.DatabaseName}}' database. +{{- end}} {{- if .DbCosmosMongo}} - Azure Cosmos DB (MongoDB) to host the '{{.DbCosmosMongo.DatabaseName}}' database. {{- end}} diff --git a/cli/azd/resources/scaffold/templates/resources.bicept b/cli/azd/resources/scaffold/templates/resources.bicept index 26180abdc28..23dc34b63ff 100644 --- a/cli/azd/resources/scaffold/templates/resources.bicept +++ b/cli/azd/resources/scaffold/templates/resources.bicept @@ -61,11 +61,21 @@ module containerAppsEnvironment 'br/public:avm/res/app/managed-environment:0.4.5 name: '${abbrs.appManagedEnvironments}${resourceToken}' location: location zoneRedundant: false + {{- if (or (and .DbPostgres (eq .DbPostgres.AuthType "userAssignedManagedIdentity")) (and .DbMySql (eq .DbMySql.AuthType "userAssignedManagedIdentity")))}} + roleAssignments: [ + { + principalId: connectionCreatorIdentity.outputs.principalId + principalType: 'ServicePrincipal' + roleDefinitionIdOrName: 'b24988ac-6180-42a0-ab88-20f7382dd24c' + } + ] + {{- end}} } } {{- end}} {{- if .DbCosmosMongo}} +var mongoDatabaseName = '{{ .DbCosmosMongo.DatabaseName }}' module cosmos 'br/public:avm/res/document-db/database-account:0.8.1' = { name: 'cosmos' params: { @@ -84,13 +94,11 @@ module cosmos 'br/public:avm/res/document-db/database-account:0.8.1' = { virtualNetworkRules: [] publicNetworkAccess: 'Enabled' } - {{- if .DbCosmosMongo.DatabaseName}} mongodbDatabases: [ { - name: '{{ .DbCosmosMongo.DatabaseName }}' + name: mongoDatabaseName } ] - {{- end}} secretsExportConfiguration: { keyVaultResourceId: keyVault.outputs.resourceId primaryWriteConnectionStringSecretName: 'MONGODB-URL' @@ -99,10 +107,62 @@ module cosmos 'br/public:avm/res/document-db/database-account:0.8.1' = { } } {{- end}} - +{{- if .DbCosmos }} +var cosmosDatabaseName = '{{ .DbCosmos.DatabaseName }}' +module cosmos 'br/public:avm/res/document-db/database-account:0.8.1' = { + name: 'cosmos' + params: { + name: '${abbrs.documentDBDatabaseAccounts}${resourceToken}' + tags: tags + location: location + locations: [ + { + failoverPriority: 0 + isZoneRedundant: false + locationName: location + } + ] + networkRestrictions: { + ipRules: [] + virtualNetworkRules: [] + publicNetworkAccess: 'Enabled' + } + sqlDatabases: [ + { + name: cosmosDatabaseName + containers: [ + {{- range .DbCosmos.Containers}} + { + name: '{{ .ContainerName }}' + paths: [ + {{- range $path := .PartitionKeyPaths}} + '{{ $path }}' + {{- end}} + ] + } + {{- end}} + ] + } + ] + sqlRoleAssignmentsPrincipalIds: [ + {{- range .Services}} + {{- if .DbCosmos }} + {{bicepName .Name}}Identity.outputs.principalId + {{- end}} + {{- end}} + ] + sqlRoleDefinitions: [ + { + name: 'service-access-cosmos-sql-role' + } + ] + } +} +{{- end}} {{- if .DbPostgres}} -var databaseName = '{{ .DbPostgres.DatabaseName }}' -var databaseUser = 'psqladmin' + +var postgreSqlDatabaseName = '{{ .DbPostgres.DatabaseName }}' +var postgreSqlDatabaseUser = '{{ .DbPostgres.DatabaseUser }}' module postgreServer 'br/public:avm/res/db-for-postgre-sql/flexible-server:0.1.4' = { name: 'postgreServer' params: { @@ -111,8 +171,9 @@ module postgreServer 'br/public:avm/res/db-for-postgre-sql/flexible-server:0.1.4 skuName: 'Standard_B1ms' tier: 'Burstable' // Non-required parameters - administratorLogin: databaseUser - administratorLoginPassword: databasePassword + tags: tags + administratorLogin: postgreSqlDatabaseUser + administratorLoginPassword: postgreSqlDatabasePassword geoRedundantBackup: 'Disabled' passwordAuth:'Enabled' firewallRules: [ @@ -124,13 +185,274 @@ module postgreServer 'br/public:avm/res/db-for-postgre-sql/flexible-server:0.1.4 ] databases: [ { - name: databaseName + name: postgreSqlDatabaseName } ] location: location + {{- if (and .DbPostgres (eq .DbPostgres.AuthType "userAssignedManagedIdentity")) }} + roleAssignments: [ + { + principalId: connectionCreatorIdentity.outputs.principalId + principalType: 'ServicePrincipal' + roleDefinitionIdOrName: 'b24988ac-6180-42a0-ab88-20f7382dd24c' + } + { + principalId: principalId + roleDefinitionIdOrName: 'b24988ac-6180-42a0-ab88-20f7382dd24c' + } + ] + {{- end}} } } {{- end}} +{{- if .DbMySql}} + +var mysqlDatabaseName = '{{ .DbMySql.DatabaseName }}' +var mysqlDatabaseUser = '{{ .DbMySql.DatabaseUser }}' +{{- if (and .DbMySql (eq .DbMySql.AuthType "userAssignedManagedIdentity")) }} +module mysqlIdentity 'br/public:avm/res/managed-identity/user-assigned-identity:0.2.1' = { + name: 'mysqlIdentity' + params: { + name: '${abbrs.managedIdentityUserAssignedIdentities}mysql-${resourceToken}' + location: location + roleAssignments: [ + { + principalId: connectionCreatorIdentity.outputs.principalId + principalType: 'ServicePrincipal' + roleDefinitionIdOrName: 'b24988ac-6180-42a0-ab88-20f7382dd24c' + } + ] + } +} +{{- end}} +module mysqlServer 'br/public:avm/res/db-for-my-sql/flexible-server:0.4.1' = { + name: 'mysqlServer' + params: { + // Required parameters + name: '${abbrs.dBforMySQLServers}${resourceToken}' + skuName: 'Standard_B1ms' + tier: 'Burstable' + // Non-required parameters + administratorLogin: mysqlDatabaseUser + administratorLoginPassword: mysqlDatabasePassword + geoRedundantBackup: 'Disabled' + firewallRules: [ + { + name: 'AllowAllIps' + startIpAddress: '0.0.0.0' + endIpAddress: '255.255.255.255' + } + ] + databases: [ + { + name: mysqlDatabaseName + } + ] + location: location + highAvailability: 'Disabled' + {{- if (and .DbMySql (eq .DbMySql.AuthType "userAssignedManagedIdentity")) }} + managedIdentities: { + userAssignedResourceIds: [ + mysqlIdentity.outputs.resourceId + ] + } + roleAssignments: [ + { + principalId: connectionCreatorIdentity.outputs.principalId + principalType: 'ServicePrincipal' + roleDefinitionIdOrName: 'b24988ac-6180-42a0-ab88-20f7382dd24c' + } + ] + {{- end}} + } +} +{{- end}} +{{- if (or (and .DbPostgres (eq .DbPostgres.AuthType "userAssignedManagedIdentity")) (and .DbMySql (eq .DbMySql.AuthType "userAssignedManagedIdentity")))}} + +module connectionCreatorIdentity 'br/public:avm/res/managed-identity/user-assigned-identity:0.2.1' = { + name: 'connectionCreatorIdentity' + params: { + name: '${abbrs.managedIdentityUserAssignedIdentities}cci-${resourceToken}' + location: location + } +} +{{- end}} +{{- range .Services}} +{{- if (and .DbPostgres (eq .DbPostgres.AuthType "userAssignedManagedIdentity")) }} +var {{bicepName .Name}}PostgresConnectionName = 'connection_${uniqueString(subscription().id, resourceGroup().id, location, '{{bicepName .Name}}', 'Postgres')}' +module {{bicepName .Name}}CreateConnectionToPostgreSql 'br/public:avm/res/resources/deployment-script:0.4.0' = { + name: '{{bicepName .Name}}CreateConnectionToPostgreSql' + params: { + kind: 'AzureCLI' + name: '${abbrs.deploymentScript}{{bicepName .Name}}-connection-to-pg-${resourceToken}' + azCliVersion: '2.63.0' + location: location + managedIdentities: { + userAssignedResourcesIds: [ + connectionCreatorIdentity.outputs.resourceId + ] + } + runOnce: false + retentionInterval: 'P1D' + scriptContent: 'apk update; apk add g++; apk add unixodbc-dev; az extension add --name containerapp; az extension add --name serviceconnector-passwordless --upgrade; az containerapp connection create postgres-flexible --connection ${ {{bicepName .Name}}PostgresConnectionName } --source-id ${ {{bicepName .Name}}.outputs.resourceId} --target-id ${postgreServer.outputs.resourceId}/databases/${postgreSqlDatabaseName} --client-type springBoot --user-identity client-id=${ {{bicepName .Name}}Identity.outputs.clientId} subs-id=${subscription().subscriptionId} user-object-id=${connectionCreatorIdentity.outputs.principalId} -c main --yes;' + } +} +{{- end}} +{{- end}} +{{- range .Services}} +{{- if (and .DbMySql (eq .DbMySql.AuthType "userAssignedManagedIdentity")) }} +var {{bicepName .Name}}MysqlConnectionName = 'connection_${uniqueString(subscription().id, resourceGroup().id, location, '{{bicepName .Name}}', 'MySql')}' +module {{bicepName .Name}}CreateConnectionToMysql 'br/public:avm/res/resources/deployment-script:0.4.0' = { + name: '{{bicepName .Name}}CreateConnectionToMysql' + params: { + kind: 'AzureCLI' + name: '${abbrs.deploymentScript}{{bicepName .Name}}-connection-to-mysql-${resourceToken}' + azCliVersion: '2.63.0' + location: location + managedIdentities: { + userAssignedResourcesIds: [ + connectionCreatorIdentity.outputs.resourceId + ] + } + runOnce: false + retentionInterval: 'P1D' + scriptContent: 'apk update; apk add g++; apk add unixodbc-dev; az extension add --name containerapp; az extension add --name serviceconnector-passwordless --upgrade; az containerapp connection create mysql-flexible --connection ${ {{bicepName .Name}}MysqlConnectionName } --source-id ${ {{bicepName .Name}}.outputs.resourceId} --target-id ${mysqlServer.outputs.resourceId}/databases/${mysqlDatabaseName} --client-type springBoot --user-identity client-id=${ {{bicepName .Name}}Identity.outputs.clientId} subs-id=${subscription().subscriptionId} user-object-id=${connectionCreatorIdentity.outputs.principalId} mysql-identity-id=${mysqlIdentity.outputs.resourceId} -c main --yes;' + } +} +{{- end}} +{{- end}} +{{- if .AzureEventHubs }} + +module eventHubNamespace 'br/public:avm/res/event-hub/namespace:0.7.1' = { + name: 'eventHubNamespace' + params: { + name: '${abbrs.eventHubNamespaces}${resourceToken}' + location: location + roleAssignments: [ + {{- range .Services}} + {{- if (and .AzureEventHubs (eq .AzureEventHubs.AuthType "userAssignedManagedIdentity")) }} + { + principalId: {{bicepName .Name}}Identity.outputs.principalId + principalType: 'ServicePrincipal' + roleDefinitionIdOrName: subscriptionResourceId('Microsoft.Authorization/roleDefinitions', 'f526a384-b230-433a-b45c-95f59c4a2dec') + } + {{- end}} + {{- end}} + ] + {{- if (and .AzureEventHubs (eq .AzureEventHubs.AuthType "connectionString")) }} + disableLocalAuth: false + {{- end}} + eventhubs: [ + {{- range $eventHubName := .AzureEventHubs.EventHubNames}} + { + name: '{{ $eventHubName }}' + } + {{- end}} + ] + } +} +{{- if (and .AzureEventHubs (eq .AzureEventHubs.AuthType "connectionString")) }} +module eventHubsConnectionString './modules/set-event-hubs-namespace-connection-string.bicep' = { + name: 'eventHubsConnectionString' + params: { + eventHubsNamespaceName: eventHubNamespace.outputs.name + connectionStringSecretName: 'EVENT-HUBS-CONNECTION-STRING' + keyVaultName: keyVault.outputs.name + } +} +{{end}} +{{end}} +{{- if .AzureStorageAccount }} +var storageAccountName = '${abbrs.storageStorageAccounts}${resourceToken}' +module storageAccount 'br/public:avm/res/storage/storage-account:0.14.3' = { + name: 'storageAccount' + params: { + name: storageAccountName + publicNetworkAccess: 'Enabled' + blobServices: { + containers: [ + {{- range $index, $element := .AzureStorageAccount.ContainerNames}} + { + name: '{{ $element }}' + } + {{- end}} + ] + } + location: location + roleAssignments: [ + {{- range .Services}} + {{- if (and .AzureStorageAccount (eq .AzureStorageAccount.AuthType "userAssignedManagedIdentity")) }} + { + principalId: {{bicepName .Name}}Identity.outputs.principalId + principalType: 'ServicePrincipal' + roleDefinitionIdOrName: subscriptionResourceId('Microsoft.Authorization/roleDefinitions', 'b7e6dc6d-f1e8-4753-8033-0f276bb0955b') + } + {{- end}} + {{- end}} + ] + networkAcls: { + defaultAction: 'Allow' + } + tags: tags + } +} + +{{- if (and .AzureStorageAccount (eq .AzureStorageAccount.AuthType "connectionString")) }} +module storageAccountConnectionString './modules/set-storage-account-connection-string.bicep' = { + name: 'storageAccountConnectionString' + params: { + storageAccountName: storageAccountName + connectionStringSecretName: 'STORAGE-ACCOUNT-CONNECTION-STRING' + keyVaultName: keyVault.outputs.name + } +} +{{end}} +{{end}} + +{{- if .AzureServiceBus }} + +module serviceBusNamespace 'br/public:avm/res/service-bus/namespace:0.10.0' = { + name: 'serviceBusNamespace' + params: { + // Required parameters + name: '${abbrs.serviceBusNamespaces}${resourceToken}' + // Non-required parameters + location: location + roleAssignments: [ + {{- range .Services}} + {{- if (and .AzureServiceBus (eq .AzureServiceBus.AuthType "userAssignedManagedIdentity")) }} + { + principalId: {{bicepName .Name}}Identity.outputs.principalId + principalType: 'ServicePrincipal' + roleDefinitionIdOrName: subscriptionResourceId('Microsoft.Authorization/roleDefinitions', '090c5cfd-751d-490a-894a-3ce6f1109419') + } + {{- end}} + {{- end}} + ] + {{- if (and .AzureServiceBus (eq .AzureServiceBus.AuthType "connectionString")) }} + disableLocalAuth: false + {{- end}} + queues: [ + {{- range $queue := .AzureServiceBus.Queues}} + { + name: '{{ $queue }}' + } + {{- end}} + ] + } +} + +{{- if (and .AzureServiceBus (eq .AzureServiceBus.AuthType "connectionString")) }} +module serviceBusConnectionString './modules/set-servicebus-namespace-connection-string.bicep' = { + name: 'serviceBusConnectionString' + params: { + serviceBusNamespaceName: serviceBusNamespace.outputs.name + connectionStringSecretName: 'SERVICEBUS-CONNECTION-STRING' + keyVaultName: keyVault.outputs.name + } +} +{{end}} +{{end}} {{- if .AIModels}} var accountName = '${abbrs.cognitiveServicesAccounts}${resourceToken}' @@ -172,13 +494,22 @@ resource localUserOpenAIIdentity 'Microsoft.Authorization/roleAssignments@2022-0 } {{- end}} -{{- range .Services}} +{{- range $service := .Services}} module {{bicepName .Name}}Identity 'br/public:avm/res/managed-identity/user-assigned-identity:0.2.1' = { name: '{{bicepName .Name}}identity' params: { name: '${abbrs.managedIdentityUserAssignedIdentities}{{bicepName .Name}}-${resourceToken}' location: location + {{- if (or (and .DbPostgres (eq .DbPostgres.AuthType "userAssignedManagedIdentity")) (and .DbMySql (eq .DbMySql.AuthType "userAssignedManagedIdentity")))}} + roleAssignments: [ + { + principalId: connectionCreatorIdentity.outputs.principalId + principalType: 'ServicePrincipal' + roleDefinitionIdOrName: 'b24988ac-6180-42a0-ab88-20f7382dd24c' + } + ] + {{- end}} } } @@ -197,7 +528,7 @@ module {{bicepName .Name}}FetchLatestImage './modules/fetch-container-image.bice name: '{{bicepName .Name}}-fetch-image' params: { exists: {{bicepName .Name}}Exists - name: '{{.Name}}' + name: '{{containerAppName .Name}}' } } @@ -215,10 +546,9 @@ var {{bicepName .Name}}Env = map(filter({{bicepName .Name}}AppSettingsArray, i = module {{bicepName .Name}} 'br/public:avm/res/app/container-app:0.8.0' = { name: '{{bicepName .Name}}' params: { - name: '{{.Name}}' + name: '{{containerAppName .Name}}' {{- if ne .Port 0}} ingressTargetPort: {{.Port}} - {{- end}} {{- if (and .Backend .Backend.Frontends)}} corsPolicy: { allowedOrigins: [ @@ -231,38 +561,29 @@ module {{bicepName .Name}} 'br/public:avm/res/app/container-app:0.8.0' = { ] } {{- end}} + {{- else}} + disableIngress: true + {{- end}} scaleMinReplicas: 1 scaleMaxReplicas: 10 secrets: { secureList: union([ - {{- if .DbCosmosMongo}} + {{- range $name, $value := .Envs}} + {{- if (shouldAddToBicepFile $service $name) }} + {{- if (eq (toBicepEnv $name $value).BicepEnvType "keyVaultSecret") }} { - name: 'mongodb-url' - identity:{{bicepName .Name}}Identity.outputs.resourceId - keyVaultUrl: cosmos.outputs.exportedSecrets['MONGODB-URL'].secretUri + name: '{{ (toBicepEnv $name $value).SecretName }}' + identity:{{bicepName $service.Name}}Identity.outputs.resourceId + keyVaultUrl: {{ (toBicepEnv $name $value).SecretValue }} } {{- end}} - {{- if .DbPostgres}} - { - name: 'db-pass' - value: databasePassword - } + {{- if (eq (toBicepEnv $name $value).BicepEnvType "secret") }} { - name: 'db-url' - value: 'postgresql://${databaseUser}:${databasePassword}@${postgreServer.outputs.fqdn}:5432/${databaseName}' + name: '{{ (toBicepEnv $name $value).SecretName }}' + value: {{ (toBicepEnv $name $value).SecretValue }} } {{- end}} - {{- if .DbRedis}} - { - name: 'redis-pass' - identity:{{bicepName .Name}}Identity.outputs.resourceId - keyVaultUrl: '${keyVault.outputs.uri}secrets/REDIS-PASSWORD' - } - { - name: 'redis-url' - identity:{{bicepName .Name}}Identity.outputs.resourceId - keyVaultUrl: '${keyVault.outputs.uri}secrets/REDIS-URL' - } + {{- end}} {{- end}} ], map({{bicepName .Name}}Secrets, secret => { @@ -279,83 +600,55 @@ module {{bicepName .Name}} 'br/public:avm/res/app/container-app:0.8.0' = { memory: '1.0Gi' } env: union([ + {{- range $name, $value := .Envs }} + {{- if (shouldAddToBicepFile $service $name) }} + {{- if (or (eq (toBicepEnv $name $value).BicepEnvType "keyVaultSecret") (eq (toBicepEnv $name $value).BicepEnvType "secret")) }} { - name: 'APPLICATIONINSIGHTS_CONNECTION_STRING' - value: monitoring.outputs.applicationInsightsConnectionString - } - { - name: 'AZURE_CLIENT_ID' - value: {{bicepName .Name}}Identity.outputs.clientId - } - {{- if .DbCosmosMongo}} - { - name: 'MONGODB_URL' - secretRef: 'mongodb-url' + name: '{{ (toBicepEnv $name $value).Name }}' + secretRef: '{{ (toBicepEnv $name $value).SecretName }}' } {{- end}} - {{- if .DbPostgres}} - { - name: 'POSTGRES_HOST' - value: postgreServer.outputs.fqdn - } - { - name: 'POSTGRES_USERNAME' - value: databaseUser - } + {{- if (eq (toBicepEnv $name $value).BicepEnvType "plainText") }} { - name: 'POSTGRES_DATABASE' - value: databaseName - } - { - name: 'POSTGRES_PASSWORD' - secretRef: 'db-pass' - } - { - name: 'POSTGRES_URL' - secretRef: 'db-url' - } - { - name: 'POSTGRES_PORT' - value: '5432' + name: '{{ (toBicepEnv $name $value).Name }}' + {{- if (isPlaceholderOfSourceClientId (toBicepEnv $name $value).PlainTextValue)}} + value: {{bicepName $service.Name}}Identity.outputs.clientId + {{- else}} + value: {{ (toBicepEnv $name $value).PlainTextValue }} + {{- end}} } {{- end}} - {{- if .DbRedis}} - { - name: 'REDIS_HOST' - value: redis.outputs.hostName - } - { - name: 'REDIS_PORT' - value: string(redis.outputs.sslPort) - } + {{- end}} + {{- end}} { - name: 'REDIS_URL' - secretRef: 'redis-url' + name: 'APPLICATIONINSIGHTS_CONNECTION_STRING' + value: monitoring.outputs.applicationInsightsConnectionString } { - name: 'REDIS_ENDPOINT' - value: '${redis.outputs.hostName}:${string(redis.outputs.sslPort)}' + name: 'AZURE_CLIENT_ID' + value: {{bicepName .Name}}Identity.outputs.clientId } + {{- if .Frontend}} + {{- range $i, $e := .Frontend.Backends}} { - name: 'REDIS_PASSWORD' - secretRef: 'redis-pass' + name: '{{upper .Name}}_BASE_URL' + value: 'https://${ {{bicepName .Name}}.outputs.name}.${containerAppsEnvironment.outputs.defaultDomain}' } {{- end}} - {{- if .AIModels}} - { - name: 'AZURE_OPENAI_ENDPOINT' - value: account.outputs.endpoint - } {{- end}} - {{- if .Frontend}} - {{- range $i, $e := .Frontend.Backends}} + {{- if .Backend}} + {{- range $i, $e := .Backend.Frontends}} { name: '{{upper .Name}}_BASE_URL' - value: 'https://{{.Name}}.${containerAppsEnvironment.outputs.defaultDomain}' + value: 'https://{{containerAppName .Name}}.${containerAppsEnvironment.outputs.defaultDomain}' } {{- end}} {{- end}} {{- if ne .Port 0}} + { + name: 'server.port' + value: '{{ .Port }}' + } { name: 'PORT' value: '{{ .Port }}' @@ -382,6 +675,15 @@ module {{bicepName .Name}} 'br/public:avm/res/app/container-app:0.8.0' = { environmentResourceId: containerAppsEnvironment.outputs.resourceId location: location tags: union(tags, { 'azd-service-name': '{{.Name}}' }) + {{- if (or (and .DbPostgres (eq .DbPostgres.AuthType "userAssignedManagedIdentity")) (and .DbMySql (eq .DbMySql.AuthType "userAssignedManagedIdentity")))}} + roleAssignments: [ + { + principalId: connectionCreatorIdentity.outputs.principalId + principalType: 'ServicePrincipal' + roleDefinitionIdOrName: 'b24988ac-6180-42a0-ab88-20f7382dd24c' + } + ] + {{- end}} } } {{- end}} @@ -409,7 +711,7 @@ module redisConn './modules/set-redis-conn.bicep' = { } {{- end}} -{{- if .Services}} +{{- if (or .Services .DbCosmosMongo .DbRedis)}} // Create a keyvault to store secrets module keyVault 'br/public:avm/res/key-vault/vault:0.6.1' = { name: 'keyvault' @@ -435,10 +737,16 @@ module keyVault 'br/public:avm/res/key-vault/vault:0.6.1' = { {{- end}} ] secrets: [ - {{- if .DbPostgres}} + {{- if (and .DbPostgres (eq .DbPostgres.AuthType "password")) }} + { + name: 'postgresql-password' + value: postgreSqlDatabasePassword + } + {{- end}} + {{- if (and .DbMySql (eq .DbMySql.AuthType "password")) }} { - name: 'db-pass' - value: databasePassword + name: 'mysql-password' + value: mysqlDatabasePassword } {{- end}} ] @@ -465,4 +773,13 @@ output AZURE_RESOURCE_REDIS_ID string = redis.outputs.resourceId {{- if .DbPostgres}} output AZURE_RESOURCE_{{alphaSnakeUpper .DbPostgres.DatabaseName}}_ID string = '${postgreServer.outputs.resourceId}/databases/{{.DbPostgres.DatabaseName}}' {{- end}} +{{- if .DbMySql}} +output AZURE_MYSQL_FLEXIBLE_SERVER_ID string = mysqlServer.outputs.resourceId +{{- end}} +{{- if .AzureEventHubs }} +output AZURE_EVENT_HUBS_ID string = eventHubNamespace.outputs.resourceId +{{- end}} +{{- if .AzureServiceBus }} +output AZURE_SERVICE_BUS_ID string = serviceBusNamespace.outputs.resourceId +{{- end}} {{ end}} diff --git a/cli/azd/test/functional/init_test.go b/cli/azd/test/functional/init_test.go index da748fa3e2b..3e4809947a5 100644 --- a/cli/azd/test/functional/init_test.go +++ b/cli/azd/test/functional/init_test.go @@ -203,6 +203,7 @@ func Test_CLI_Init_From_App_With_Infra(t *testing.T) { "Use code in the current directory\n"+ "Confirm and continue initializing my app\n"+ "appdb\n"+ + "User assigned managed identity\n"+ "TESTENV\n", "init", ) diff --git a/ext/vscode/package.json b/ext/vscode/package.json index f9f06a3f6f2..83261a28432 100644 --- a/ext/vscode/package.json +++ b/ext/vscode/package.json @@ -185,11 +185,16 @@ "explorer/context": [ { "submenu": "azure-dev.explorer.submenu", - "when": "resourceFilename =~ /azure.yaml/i", + "when": "resourceFilename =~ /(azure.yaml|pom.xml)/i", "group": "azure-dev" } ], "azure-dev.explorer.submenu": [ + { + "when": "resourceFilename =~ /pom.xml/i", + "command": "azure-dev.commands.cli.init", + "group": "10provision@10" + }, { "when": "resourceFilename =~ /azure.yaml/i", "command": "azure-dev.commands.cli.provision", diff --git a/ext/vscode/package.nls.json b/ext/vscode/package.nls.json index 3b633c1f39f..f0b358ad186 100644 --- a/ext/vscode/package.nls.json +++ b/ext/vscode/package.nls.json @@ -1,7 +1,7 @@ { "azure-dev.commands_category": "Azure Developer CLI (azd)", - "azure-dev.commands.cli.init.title": "Initialize App (init)", + "azure-dev.commands.cli.init.title": "Generate Azure Deployment Script (init)", "azure-dev.commands.cli.provision.title": "Provision Azure Resources (provision)", "azure-dev.commands.cli.deploy.title": "Deploy to Azure (deploy)", "azure-dev.commands.cli.restore.title": "Restore App Dependencies (restore)", diff --git a/schemas/alpha/azure.yaml.json b/schemas/alpha/azure.yaml.json index f1d3016c742..02e9fa72880 100644 --- a/schemas/alpha/azure.yaml.json +++ b/schemas/alpha/azure.yaml.json @@ -107,6 +107,10 @@ "type": "string", "title": "Path to the service source code directory" }, + "parentPath": { + "type": "string", + "title": "Path to the parent directory of the service" + }, "image": { "type": "string", "title": "Optional. The source image to be used for the container image instead of building from source. Supports environment variable substitution.", @@ -159,6 +163,14 @@ "type": "object", "additionalProperties": true }, + "env": { + "type": "object", + "title": "A map of key value pairs used to set as environment variables for the service.", + "description": "Optional. Supports environment variable substitution.", + "additionalProperties": { + "type": "string" + } + }, "hooks": { "type": "object", "title": "Service level hooks", @@ -354,6 +366,9 @@ }, "resources": { "type": "object", + "title": "Definition of resources that the application depends on", + "description": "Optional. Provides additional configuration for Azure resources that the application depends on.", + "minProperties": 1, "additionalProperties": { "type": "object", "required": [ @@ -365,9 +380,15 @@ "title": "Type of resource", "description": "The type of resource to be created. (Example: db.postgres)", "enum": [ + "db.mysql", "db.postgres", "db.redis", "db.mongo", + "db.cosmos", + "messaging.servicebus", + "messaging.eventhubs", + "messaging.kafka", + "storage", "ai.openai.model", "host.containerapp" ] @@ -384,9 +405,15 @@ "allOf": [ { "if": { "properties": { "type": { "const": "host.containerapp" }}}, "then": { "$ref": "#/definitions/containerAppResource" } }, { "if": { "properties": { "type": { "const": "ai.openai.model" }}}, "then": { "$ref": "#/definitions/aiModelResource" } }, - { "if": { "properties": { "type": { "const": "db.postgres" }}}, "then": { "$ref": "#/definitions/resource"} }, + { "if": { "properties": { "type": { "const": "db.mysql" }}}, "then": { "$ref": "#/definitions/mySqlDbResource"} }, + { "if": { "properties": { "type": { "const": "db.postgres" }}}, "then": { "$ref": "#/definitions/postgreSqlDbResource"} }, { "if": { "properties": { "type": { "const": "db.redis" }}}, "then": { "$ref": "#/definitions/resource"} }, - { "if": { "properties": { "type": { "const": "db.mongo" }}}, "then": { "$ref": "#/definitions/resource"} } + { "if": { "properties": { "type": { "const": "db.mongo" }}}, "then": { "$ref": "#/definitions/mongoDbResource"} }, + { "if": { "properties": { "type": { "const": "db.cosmos" }}}, "then": { "$ref": "#/definitions/cosmosDbResource"} }, + { "if": { "properties": { "type": { "const": "messaging.servicebus" }}}, "then": { "$ref": "#/definitions/serviceBusResource"} }, + { "if": { "properties": { "type": { "const": "messaging.eventhubs" }}}, "then": { "$ref": "#/definitions/eventHubsResource"} }, + { "if": { "properties": { "type": { "const": "messaging.kafka" }}}, "then": { "$ref": "#/definitions/kafkaResource"} }, + { "if": { "properties": { "type": { "const": "storage" }}}, "then": { "$ref": "#/definitions/storageAccountResource"} } ] } }, @@ -1205,11 +1232,10 @@ "type": { "type": "string", "title": "Type of resource", - "description": "The type of resource to be created. (Example: db.postgres)", + "description": "The type of resource to be created. (Example: db.redis)", "enum": [ - "db.postgres", "db.redis", - "db.mongo", + "storage", "host.containerapp", "ai.openai.model" ] @@ -1228,9 +1254,6 @@ "type": "object", "description": "A Docker-based container app.", "additionalProperties": false, - "required": [ - "port" - ], "properties": { "type": true, "uses": true, @@ -1298,6 +1321,220 @@ } } } + }, + "mySqlDbResource": { + "type": "object", + "description": "A deployed, ready-to-use Azure Database for MySQL flexible server.", + "additionalProperties": false, + "properties": { + "type": true, + "uses": true, + "authType": { + "type": "string", + "title": "Authentication Type", + "description": "The type of authentication used for Azure MySQL database.", + "enum": [ + "userAssignedManagedIdentity", + "password" + ] + }, + "databaseName": { + "type": "string", + "title": "The Azure MySQL Database Name", + "description": "The name of Azure MySQL database." + } + } + }, + "postgreSqlDbResource": { + "type": "object", + "description": "A deployed, ready-to-use Azure Database for PostgreSQL flexible server.", + "additionalProperties": false, + "properties": { + "type": true, + "uses": true, + "authType": { + "type": "string", + "title": "Authentication Type", + "description": "The type of authentication used for Azure PostgreSQL database.", + "enum": [ + "userAssignedManagedIdentity", + "password" + ] + }, + "databaseName": { + "type": "string", + "title": "The Azure PostgreSQL Database Name", + "description": "The name of Azure PostgreSQL database." + } + } + }, + "mongoDbResource": { + "type": "object", + "description": "A deployed, ready-to-use Azure CosmosDB API for MongoDB.", + "additionalProperties": false, + "properties": { + "type": true, + "uses": true, + "databaseName": { + "type": "string", + "title": "The Azure MongoDB Name", + "description": "The name of Azure CosmosDB API for MongoDB." + } + } + }, + "storageAccountResource": { + "type": "object", + "description": "A deployed, ready-to-use Azure Storage Account.", + "additionalProperties": false, + "properties": { + "type": true, + "uses": true, + "authType": { + "type": "string", + "title": "Authentication Type", + "description": "The type of authentication used for Azure Storage Account.", + "enum": [ + "userAssignedManagedIdentity", + "connectionString" + ] + }, + "containers": { + "type": "array", + "title": "Azure Storage Account container names.", + "description": "The container names of Azure Storage Account.", + "items": { + "type": "string", + "title": "Azure Storage Account container name", + "description": "The container name of Azure Storage Account." + } + } + } + }, + "cosmosDbResource": { + "type": "object", + "description": "A deployed, ready-to-use Azure Cosmos DB for NoSQL.", + "additionalProperties": false, + "properties": { + "type": true, + "uses": true, + "databaseName": { + "type": "string", + "title": "The Azure Cosmos DB Name", + "description": "The name of Azure Cosmos DB." + }, + "containers": { + "type": "array", + "title": "Azure Cosmos DB Containers", + "description": "A list of containers in the Azure CosmosDB.", + "items": { + "type": "object", + "additionalProperties": false, + "properties": { + "containerName": { + "type": "string", + "title": "Container Name", + "description": "The name of the container." + }, + "partitionKeyPaths": { + "type": "array", + "title": "Partition Key Paths", + "description": "A list of partition key paths for the container.", + "items": { + "type": "string" + } + } + } + } + } + } + }, + "serviceBusResource": { + "type": "object", + "description": "A deployed, ready-to-use Azure Service Bus.", + "additionalProperties": false, + "properties": { + "type": true, + "uses": true, + "queues": { + "type": "array", + "title": "Service Bus Queues", + "description": "A list of Service Bus queues.", + "items": { + "type": "string" + } + }, + "isJms": { + "type": "boolean", + "title": "Is JMS", + "description": "Indicates if JMS is enabled for the Service Bus." + }, + "authType": { + "type": "string", + "title": "Authentication Type", + "description": "The type of authentication used for the Service Bus.", + "enum": [ + "userAssignedManagedIdentity", + "connectionString" + ] + } + } + }, + "eventHubsResource": { + "type": "object", + "description": "A deployed, ready-to-use Azure Event Hubs.", + "additionalProperties": false, + "properties": { + "type": true, + "uses": true, + "eventHubNames": { + "type": "array", + "title": "Event Hub Names", + "description": "A list of Event Hub names.", + "items": { + "type": "string" + } + }, + "authType": { + "type": "string", + "title": "Authentication Type", + "description": "The type of authentication used for Event Hubs.", + "enum": [ + "userAssignedManagedIdentity", + "connectionString" + ] + } + } + }, + "kafkaResource": { + "type": "object", + "description": "A deployed, ready-to-use Azure Event Hubs for Apache Kafka.", + "additionalProperties": false, + "properties": { + "type": true, + "uses": true, + "topics": { + "type": "array", + "title": "Topics", + "description": "A list of Kafka topics.", + "items": { + "type": "string" + } + }, + "authType": { + "type": "string", + "title": "Authentication Type", + "description": "The type of authentication used for Kafka.", + "enum": [ + "userAssignedManagedIdentity", + "connectionString" + ] + }, + "springBootVersion": { + "type": "string", + "title": "Spring Boot Version", + "description": "The Spring Boot version used in the project." + } + } } } } \ No newline at end of file