From 0500c57e33f2d2319de5d17a821feab01c2fc4e7 Mon Sep 17 00:00:00 2001 From: Dragutin Vujovic <66312566+dragutin-nav@users.noreply.github.com> Date: Tue, 3 Sep 2024 15:53:16 +0200 Subject: [PATCH] Flytte til gcp (#1686) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Add gcp workflows and adjust PDL format (#1691) * Add new workflow for gcp (#1687) * Byttet namspace fra pto til obo Co-authored-by: Julie Hill Roa Co-authored-by: Dragutin Vujovic Co-authored-by: Ingrid Fosså * Fjernet ingressen til dev-fss * Fjerner vault og pto-config fra nais.yal * Oppdaterer unleash workflow til gcp i dev * Legg til sql-instans i gcp Co-authored-by: Julie Hill Roa Co-authored-by: Dragutin Vujovic Co-authored-by: Sneha Desai * Update db version * Connect til ny db urlen på gcp Co-authored-by: Julie Hill Roa Co-authored-by: Dragutin Vujovic Co-authored-by: Ingrid Fosså * Dependencies update * Fix huskelapp test * Legg til prefiks for miljøvariablar Co-authored-by: Julie Hill Roa Co-authored-by: Dragutin Vujovic Co-authored-by: Sneha Desai * Remove veilarbPep * Fjenet abac urlen og referense til pep Co-authored-by: Julie Hill Roa Co-authored-by: Klara Margrethe Helgemo Co-authored-by: Dragutin Vujovic Co-authored-by: Ingrid Fosså * Remove kafka schema url * Update kafka schema registry url * Revert "Fjenet abac urlen og referense til pep" This reverts commit b9ee6b00078f8715251edf7ab3b66fa916c1dc0a. * Remove duplicate datasource * Stop kafka consumers * Some small fixes related to db * Change access rules (outbound rules) * Change kodeverk dev address * Use another address for kodeverk * Update endpoint urls * Add poao-tilgang to access rules * Change kodeverk url * Connect til ny db urlen på gcp Co-authored-by: Julie Hill Roa Co-authored-by: Dragutin Vujovic Co-authored-by: Ingrid Fosså * Dependencies update * Remove veilarbPep * Fjenet abac urlen og referense til pep Co-authored-by: Julie Hill Roa Co-authored-by: Klara Margrethe Helgemo Co-authored-by: Dragutin Vujovic Co-authored-by: Ingrid Fosså * Prepare app for prod * Apply suggestions from code review * Cleanup code * Rebase on master * Rebase on master * Small fixes for unleash * Trigger redeplooy * Trigger redeploy * Rebase on dev * Add opensearch prod config * Fix deploy unleash * Update dependencies * Dependency updates * Dependency update * Debug db connection * Update hikari config * Small fixes for db connection * Add debug log * Debug logs * Remove debug logs * Adjust hikari config * Small changes for db migration * Remove frontendlogger * Remove influx metrics * Update access rules * Update README * Update ingress * Update kodeverk url * Change kodeverk url * Enable kafka consumers * Fiks poao-tilgang scope * Disable kafka consumers * Return event logger for frontend - it needs refactoring * Begin to consume from Kafka * Change arbeidssoekerregisteret address * paw-arbeidssoekerregisteret-api-oppslag url og tilgang * Increse pool size * Øker kapasitet Co-authored-by: Julie Hill Roa * Add index on brukertiltak_v2 * Adjust little bit enhet tiltak cache * Increase db cpu and memory * Change size of ident in bruker_identer table * Remove bruker_identer table updates * Pdl foedeland can have several rows, usually with same data but different sources * Descrease reporting interval for few metrics * kjøre metrikker kun 1 gang hvert 10 min * opdaterer metrikker til å gå 1 gang hvert 10 min * Report metrics only from leader pod * Update github workflow * Apply suggestions from code review * Update .github/workflows/build-deploy-feature-branch-q1.yaml --------- Co-authored-by: Sneha Desai Co-authored-by: JulieHillRoa Co-authored-by: Ingrid Fosså Co-authored-by: Klara M Helgemo --- .../build-deploy-feature-branch-q1.yaml | 6 +- .../workflows/deploy-unleash-api-token.yaml | 12 +- .github/workflows/main-gcp.yml | 8 +- .github/workflows/main.yml | 103 ------------------ .../application/application-config-prod.yaml | 4 +- .../gcp/application-config-prod.yaml | 26 +++-- .nais/application/gcp/opensearch-dev.yaml | 25 ----- README.md | 33 ++++-- pom.xml | 21 +--- .../arbeidsliste/ArbeidslisteService.java | 5 - .../aktiviteter/TiltakService.java | 2 +- .../config/ClientConfig.java | 9 +- .../config/DbConfigPostgres.java | 38 +++---- .../controller/Frontendlogger.java | 9 +- .../internal/BrukerMappingAlarm.java | 13 ++- .../veilarbportefolje/kafka/KafkaStats.java | 2 +- .../deserializers/AivenAvroDeserializer.java | 2 +- .../deserializers/OnpremAvroDeserializer.java | 18 --- .../opensearch/OpensearchCountService.java | 20 +--- .../persononinfo/domene/PDLPerson.java | 4 +- .../pto/veilarbportefolje/util/DbUtils.java | 47 ++------ src/main/resources/application.properties | 2 +- .../db/postgres/V1_90__brukertiltakv2.sql | 1 + .../config/ApplicationConfigTest.java | 7 -- .../mock/MetricsClientMock.java | 20 ---- 25 files changed, 101 insertions(+), 336 deletions(-) delete mode 100644 .github/workflows/main.yml delete mode 100644 .nais/application/gcp/opensearch-dev.yaml delete mode 100644 src/main/java/no/nav/pto/veilarbportefolje/kafka/deserializers/OnpremAvroDeserializer.java create mode 100644 src/main/resources/db/postgres/V1_90__brukertiltakv2.sql delete mode 100644 src/test/java/no/nav/pto/veilarbportefolje/mock/MetricsClientMock.java diff --git a/.github/workflows/build-deploy-feature-branch-q1.yaml b/.github/workflows/build-deploy-feature-branch-q1.yaml index 30501dc309..b5ee84e278 100644 --- a/.github/workflows/build-deploy-feature-branch-q1.yaml +++ b/.github/workflows/build-deploy-feature-branch-q1.yaml @@ -35,7 +35,7 @@ jobs: uses: nais/docker-build-push@v0 id: docker-build-push with: - team: pto + team: obo identity_provider: ${{ secrets.NAIS_WORKLOAD_IDENTITY_PROVIDER }} project_id: ${{ vars.NAIS_MANAGEMENT_PROJECT_ID }} @@ -50,6 +50,6 @@ jobs: - name: Deploy application uses: nais/deploy/actions/deploy@v2 env: - CLUSTER: dev-fss - RESOURCE: .nais/application/application-config-dev.yaml + CLUSTER: dev-gcp + RESOURCE: .nais/application/gcp/application-config-dev.yaml VAR: image=${{ needs.build-and-push.outputs.image }} diff --git a/.github/workflows/deploy-unleash-api-token.yaml b/.github/workflows/deploy-unleash-api-token.yaml index c80536f3b2..7e4b12e3e7 100644 --- a/.github/workflows/deploy-unleash-api-token.yaml +++ b/.github/workflows/deploy-unleash-api-token.yaml @@ -7,8 +7,8 @@ on: - master paths: - '.github/workflows/deploy-unleash-api-token.yaml' - - '.nais/application/unleash-apitoken-dev.yaml' - - '.nais/application/unleash-apitoken-prod.yaml' + - '.nais/application/gcp/unleash-apitoken-dev.yaml' + - '.nais/application/gcp/unleash-apitoken-prod.yaml' permissions: id-token: write @@ -23,8 +23,8 @@ jobs: - name: Deploy unleash-apitoken uses: nais/deploy/actions/deploy@v2 env: - CLUSTER: dev-fss - RESOURCE: .nais/application/unleash-apitoken-dev.yaml + CLUSTER: dev-gcp + RESOURCE: .nais/application/gcp/unleash-apitoken-dev.yaml PRINT_PAYLOAD: true deploy-prod: @@ -38,7 +38,7 @@ jobs: uses: nais/deploy/actions/deploy@v2 if: github.ref == 'refs/heads/master' env: - CLUSTER: prod-fss - RESOURCE: .nais/application/unleash-apitoken-prod.yaml + CLUSTER: prod-gcp + RESOURCE: .nais/application/gcp/unleash-apitoken-prod.yaml PRINT_PAYLOAD: true diff --git a/.github/workflows/main-gcp.yml b/.github/workflows/main-gcp.yml index 5a6815cccb..563ede5f61 100644 --- a/.github/workflows/main-gcp.yml +++ b/.github/workflows/main-gcp.yml @@ -11,7 +11,7 @@ jobs: test: name: Run tests runs-on: ubuntu-latest - if: github.ref != 'refs/heads/flytte-til-gcp' + if: github.ref != 'refs/heads/master' steps: - name: Checkout uses: actions/checkout@v4 @@ -32,7 +32,7 @@ jobs: build-and-push: name: Build and push runs-on: ubuntu-latest - if: github.ref == 'refs/heads/dev' || github.ref == 'refs/heads/flytte-til-gcp' + if: github.ref == 'refs/heads/dev' outputs: image: ${{ steps.docker-build-push.outputs.image }} steps: @@ -59,7 +59,7 @@ jobs: deploy-dev: name: Deploy application to dev gcp - if: github.ref == 'refs/heads/dev' || github.ref == 'refs/heads/flytte-til-gcp' + if: github.ref == 'refs/heads/dev' needs: build-and-push runs-on: ubuntu-latest steps: @@ -75,7 +75,7 @@ jobs: deploy-prod: name: Deploy application to prod gcp - if: github.ref == 'refs/heads/flytte-til-gcp' + if: github.ref == 'refs/heads/master' needs: build-and-push runs-on: ubuntu-latest steps: diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml deleted file mode 100644 index fbd5f2a2e0..0000000000 --- a/.github/workflows/main.yml +++ /dev/null @@ -1,103 +0,0 @@ -name: Build, push and deploy -on: push -env: - IMAGE_TAG: ${{ github.sha }} - PRINT_PAYLOAD: true -permissions: - packages: write - contents: write - id-token: write -jobs: - test: - name: Run tests - runs-on: ubuntu-latest - if: github.ref != 'refs/heads/master' - steps: - - name: Checkout - uses: actions/checkout@v4 - - - name: Set up Java - uses: actions/setup-java@v4 - with: - java-version: 21 - distribution: 'temurin' - cache: 'maven' - - - name: Run maven tests - env: - MAVEN_OPTS: -Xss1024M -Xmx2048M - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: mvn -B verify - - build-and-push: - name: Build and push - runs-on: ubuntu-latest - if: github.ref == 'refs/heads/dev' || github.ref == 'refs/heads/master' - outputs: - image: ${{ steps.docker-build-push.outputs.image }} - steps: - - name: Checkout - uses: actions/checkout@v4 - - - name: Set up Java - uses: actions/setup-java@v4 - with: - java-version: 21 - distribution: 'temurin' - cache: 'maven' - - - name: Build maven artifacts - run: mvn -Dgithub.token=${{ secrets.GITHUB_TOKEN }} -B package -D skipTests - - - name: Build and push Docker image - uses: nais/docker-build-push@v0 - id: docker-build-push - with: - team: pto - identity_provider: ${{ secrets.NAIS_WORKLOAD_IDENTITY_PROVIDER }} - project_id: ${{ vars.NAIS_MANAGEMENT_PROJECT_ID }} - - deploy-dev: - name: Deploy application to dev - if: github.ref == 'refs/heads/dev' - needs: build-and-push - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: actions/checkout@v4 - - - name: Deploy application - uses: nais/deploy/actions/deploy@v2 - env: - CLUSTER: dev-fss - RESOURCE: .nais/application/application-config-dev.yaml - VAR: image=${{ needs.build-and-push.outputs.image }} - - deploy-prod: - name: Deploy application to prod - if: github.ref == 'refs/heads/master' - needs: build-and-push - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: actions/checkout@v4 - - - name: Deploy application - uses: nais/deploy/actions/deploy@v2 - env: - CLUSTER: prod-fss - RESOURCE: .nais/application/application-config-prod.yaml - VAR: image=${{ needs.build-and-push.outputs.image }} - - release-prod: - name: Create prod release - needs: deploy-prod - runs-on: ubuntu-latest - steps: - - name: Create release - uses: softprops/action-gh-release@v2 - with: - name: Release to prod - target_commitish: master - tag_name: release/prod@${{ env.IMAGE_TAG }} - prerelease: false diff --git a/.nais/application/application-config-prod.yaml b/.nais/application/application-config-prod.yaml index 8c629474db..bea3ac3dc3 100644 --- a/.nais/application/application-config-prod.yaml +++ b/.nais/application/application-config-prod.yaml @@ -9,8 +9,8 @@ metadata: spec: image: {{image}} replicas: - min: 3 - max: 3 + min: 0 + max: 0 cpuThresholdPercentage: 50 port: 8080 ingresses: diff --git a/.nais/application/gcp/application-config-prod.yaml b/.nais/application/gcp/application-config-prod.yaml index 90ace28e7a..16fb02a8e3 100644 --- a/.nais/application/gcp/application-config-prod.yaml +++ b/.nais/application/gcp/application-config-prod.yaml @@ -40,7 +40,7 @@ spec: gcp: sqlInstances: - type: POSTGRES_15 - tier: db-custom-1-4096 + tier: db-custom-8-53248 databases: - name: veilarbportefolje envVarPrefix: DB @@ -68,25 +68,27 @@ spec: namespace: team-rocket - application: pdl-api namespace: pdl - cluster: dev-fss + cluster: prod-fss - application: veilarbvedtaksstotte namespace: pto - cluster: dev-fss + cluster: prod-fss - application: veilarbarena namespace: pto - cluster: dev-fss + cluster: prod-fss - application: veilarbveileder namespace: pto - cluster: dev-fss + cluster: prod-fss - application: poao-tilgang namespace: poao + - application: paw-arbeidssoekerregisteret-api-oppslag + namespace: paw external: - host: team-obo-unleash-api.nav.cloud.nais.io - - host: pdl-api.dev-fss-pub.nais.io - - host: veilarboppfolging.dev-fss-pub.nais.io - - host: veilarbvedtaksstotte.dev-fss-pub.nais.io - - host: veilarbveileder.dev-fss-pub.nais.io - - host: veilarbarena.dev-fss-pub.nais.io + - host: pdl-api.prod-fss-pub.nais.io + - host: veilarboppfolging.prod-fss-pub.nais.io + - host: veilarbvedtaksstotte.prod-fss-pub.nais.io + - host: veilarbveileder.prod-fss-pub.nais.io + - host: veilarbarena.prod-fss-pub.nais.io resources: limits: cpu: "4" @@ -120,9 +122,9 @@ spec: - name: POAO_TILGANG_URL value: "http://poao-tilgang.poao" - name: POAO_TILGANG_TOKEN_SCOPE - value: "api://prod-fss.poao.poao-tilgang/.default" + value: "api://prod-gcp.poao.poao-tilgang/.default" - name: KODEVERK_URL - value: "https://kodeverk-api.intern.nav.no" + value: "http://kodeverk-api.team-rocket" - name: KODEVERK_SCOPE value: "api://prod-gcp.team-rocket.kodeverk-api/.default" - name: OPPSLAG_ARBEIDSSOEKERREGISTERET_URL diff --git a/.nais/application/gcp/opensearch-dev.yaml b/.nais/application/gcp/opensearch-dev.yaml deleted file mode 100644 index d609ab0b49..0000000000 --- a/.nais/application/gcp/opensearch-dev.yaml +++ /dev/null @@ -1,25 +0,0 @@ -apiVersion: aiven.io/v1alpha1 -kind: OpenSearch -metadata: - labels: - team: obo - name: opensearch-obo-veilarbportefolje - namespace: obo -spec: - plan: startup-4 - project: nav-dev - ---- - -apiVersion: aiven.io/v1alpha1 -kind: ServiceIntegration -metadata: - labels: - team: obo - name: opensearch-obo-veilarbportefolje - namespace: obo -spec: - project: nav-dev - integrationType: prometheus - destinationEndpointId: f20f5b48-18f4-4e2a-8e5f-4ab3edb19733 - sourceServiceName: opensearch-obo-veilarbportefolje \ No newline at end of file diff --git a/README.md b/README.md index 574d4981c8..330a6949fc 100644 --- a/README.md +++ b/README.md @@ -20,7 +20,8 @@ Følgende data aggregeres: ### Swagger-dokumentasjon -Endepunktene er dokumentert vha. Swagger/OpenAPI. Dokumentasjonen er tilgjengelig på egen sti, som i skrivende stund (08.05.24) er `https://veilarbportefoljeflate.intern.dev.nav.no/veilarbportefolje/internal/swagger-ui/index.html`. +Endepunktene er dokumentert vha. Swagger/OpenAPI. Dokumentasjonen er tilgjengelig på egen sti, som i skrivende stund ( +08.05.24) er `https://veilarbportefoljeflate.intern.dev.nav.no/veilarbportefolje/internal/swagger-ui/index.html`. ## Hvordan bygge @@ -36,37 +37,49 @@ Mere info om: https://aiven.io/opensearch Settings for index er definert i filen: + ``` src/main/resources/opensearch_settings.json ``` ### Fremgangsmåte ved endringer i OpenSearch settings + Når det gjøres endringer i `src/main/resources/opensearch_settings.json`, så er oppsettet slik at det må lages en ny indeks. Dvs. man oppdaterer ikke den eksisterende indeksen. Den nye indeksen må populeres med data før den kan erstatte den gamle. #### Fremgangsmåte + 1. Gjør endringer i `src/main/resources/opensearch_settings.json`, commit, push og deploy 2. Gå til pto-admin i riktig miljø (dev/prod) og velg "Veilarbportefolje" i dropdown 3. Utfør en "Hovedindeksering". Bruk referansen i response til å følge med i loggene. Denne jobben gjør oppdatering av -alle brukere i eksisterende indeks. Formålet er å se hvor lang tid det tar å indeksere alle brukerne. + alle brukere i eksisterende indeks. Formålet er å se hvor lang tid det tar å indeksere alle brukerne. 4. Når man oppretter ny indeks (neste steg), så vil ikke endringer som kommer underveis -oppdatere den gamle indeksen som fortsatt er i bruk. Endringene vil først bli synlige i Oversikten når den nye indeksen -er ferdig indeksert, og den gamle indeksen er slettet. Se derfor hvor lang tid indekseringen i steg 3 tok, og vurder -tidspunktet på dagen neste steg bør gjøres. Normalt sett tar indekseringen 10-15min. Dersom det tar mye lenger tid -bør det undersøkes om nye endringer har ført til dette, f.eks. manglende databaseindeks. + oppdatere den gamle indeksen som fortsatt er i bruk. Endringene vil først bli synlige i Oversikten når den nye + indeksen + er ferdig indeksert, og den gamle indeksen er slettet. Se derfor hvor lang tid indekseringen i steg 3 tok, og vurder + tidspunktet på dagen neste steg bør gjøres. Normalt sett tar indekseringen 10-15min. Dersom det tar mye lenger tid + bør det undersøkes om nye endringer har ført til dette, f.eks. manglende databaseindeks. 5. Utfør "Hovedindeksering: Nytt alias", som oppretter ny indeks og indekserer alle brukere på den. Samtidig blir -gjeldende indeks satt til read-only. Når indeksering er ferdig tas den nye indeksen i bruk og den gamle slettes. -Bruk referansen i response til å følge med i loggene. Dersom jobben feiler, så skal den nye indeksen bli slettet, og -den gamle brukes videre (read-only modus fjernes). Skulle jobben feile, så bør man kjøre en vanlig hovedindeksering -igjen (steg 3), siden endringer som kom inn mens jobben kjørte, før den feilet, kun blir skrivet til den nye indeksen. + gjeldende indeks satt til read-only. Når indeksering er ferdig tas den nye indeksen i bruk og den gamle slettes. + Bruk referansen i response til å følge med i loggene. Dersom jobben feiler, så skal den nye indeksen bli slettet, og + den gamle brukes videre (read-only modus fjernes). Skulle jobben feile, så bør man kjøre en vanlig hovedindeksering + igjen (steg 3), siden endringer som kom inn mens jobben kjørte, før den feilet, kun blir skrivet til den nye + indeksen. ## PostgreSQL Innloggingsinformasjon til databasen: https://vault.adeo.no/ + * Dev: `vault read postgresql/preprod-fss/creds/veilarbportefolje-dev-admin` * Prod: `vault read postgresql/prod-fss/creds/veilarbportefolje-prod-readonly` +* + +## PostgreSQL GCP + +Innloggingsinformasjon til databasen: +.... ## Plugin til IntelliJ diff --git a/pom.xml b/pom.xml index a02920a453..34aff5d3a9 100644 --- a/pom.xml +++ b/pom.xml @@ -166,22 +166,7 @@ com.zaxxer HikariCP - - - - no.nav - vault-jdbc - 1.3.10 - - - org.slf4j - slf4j-simple - - - org.slf4j - slf4j-api - - + 5.1.0 @@ -317,12 +302,12 @@ org.flywaydb flyway-database-postgresql - 10.17.0 + 10.17.2 org.postgresql postgresql - 42.7.3 + 42.7.4 net.javacrumbs.shedlock diff --git a/src/main/java/no/nav/pto/veilarbportefolje/arbeidsliste/ArbeidslisteService.java b/src/main/java/no/nav/pto/veilarbportefolje/arbeidsliste/ArbeidslisteService.java index 1fe84326f5..1fffb2a1ab 100644 --- a/src/main/java/no/nav/pto/veilarbportefolje/arbeidsliste/ArbeidslisteService.java +++ b/src/main/java/no/nav/pto/veilarbportefolje/arbeidsliste/ArbeidslisteService.java @@ -3,8 +3,6 @@ import io.vavr.control.Try; import io.vavr.control.Validation; import lombok.RequiredArgsConstructor; -import no.nav.common.metrics.Event; -import no.nav.common.metrics.MetricsClient; import no.nav.common.types.identer.AktorId; import no.nav.common.types.identer.EnhetId; import no.nav.common.types.identer.Fnr; @@ -34,7 +32,6 @@ public class ArbeidslisteService { private final ArbeidslisteRepositoryV2 arbeidslisteRepositoryV2; private final BrukerServiceV2 brukerServiceV2; private final OpensearchIndexerV2 opensearchIndexerV2; - private final MetricsClient metricsClient; public Try getArbeidsliste(Fnr fnr) { return arbeidslisteRepositoryV2.retrieveArbeidsliste(fnr); @@ -46,8 +43,6 @@ public List getArbeidslisteForVeilederPaEnhet(EnhetId enhet, Veile public Try createArbeidsliste(ArbeidslisteDTO dto) { - metricsClient.report((new Event("arbeidsliste.opprettet"))); - Try aktoerId = hentAktorId(dto.getFnr()); if (aktoerId.isFailure()) { return Try.failure(aktoerId.getCause()); diff --git a/src/main/java/no/nav/pto/veilarbportefolje/arenapakafka/aktiviteter/TiltakService.java b/src/main/java/no/nav/pto/veilarbportefolje/arenapakafka/aktiviteter/TiltakService.java index b37bad25c3..ab5a6eabbf 100644 --- a/src/main/java/no/nav/pto/veilarbportefolje/arenapakafka/aktiviteter/TiltakService.java +++ b/src/main/java/no/nav/pto/veilarbportefolje/arenapakafka/aktiviteter/TiltakService.java @@ -37,7 +37,7 @@ public class TiltakService { private final OpensearchIndexer opensearchIndexer; private final Cache enhetTiltakCachePostgres = Caffeine.newBuilder() - .expireAfterWrite(10, TimeUnit.MINUTES) + .expireAfterWrite(30, TimeUnit.MINUTES) .maximumSize(1000) .build(); diff --git a/src/main/java/no/nav/pto/veilarbportefolje/config/ClientConfig.java b/src/main/java/no/nav/pto/veilarbportefolje/config/ClientConfig.java index 5021a9155c..5497aa3940 100644 --- a/src/main/java/no/nav/pto/veilarbportefolje/config/ClientConfig.java +++ b/src/main/java/no/nav/pto/veilarbportefolje/config/ClientConfig.java @@ -6,8 +6,6 @@ import no.nav.common.client.aktoroppslag.PdlAktorOppslagClient; import no.nav.common.client.pdl.PdlClient; import no.nav.common.client.pdl.PdlClientImpl; -import no.nav.common.metrics.InfluxClient; -import no.nav.common.metrics.MetricsClient; import no.nav.common.rest.client.RestClient; import no.nav.common.token_client.client.AzureAdMachineToMachineTokenClient; import no.nav.pto.veilarbportefolje.arbeidssoeker.v2.OppslagArbeidssoekerregisteretClient; @@ -19,6 +17,7 @@ import no.nav.pto.veilarbportefolje.vedtakstotte.VedtaksstotteClient; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; + import java.net.http.HttpClient; import java.util.function.Supplier; @@ -26,16 +25,12 @@ public class ClientConfig { static final String APPLICATION_NAME = "veilarbportefolje"; + @Bean public PoaoTilgangWrapper poaoTilgangWrapper(AuthContextHolder authContextHolder, AzureAdMachineToMachineTokenClient tokenClient, EnvironmentProperties environmentProperties) { return new PoaoTilgangWrapper(authContextHolder, tokenClient, environmentProperties); } - @Bean - public MetricsClient metricsClient() { - return new InfluxClient(); - } - @Bean public VeilarbVeilederClient veilarbVeilederClient(AuthService authService, EnvironmentProperties environmentProperties) { return new VeilarbVeilederClient(authService, environmentProperties); diff --git a/src/main/java/no/nav/pto/veilarbportefolje/config/DbConfigPostgres.java b/src/main/java/no/nav/pto/veilarbportefolje/config/DbConfigPostgres.java index cbc9a1e24a..4e6075b95d 100644 --- a/src/main/java/no/nav/pto/veilarbportefolje/config/DbConfigPostgres.java +++ b/src/main/java/no/nav/pto/veilarbportefolje/config/DbConfigPostgres.java @@ -1,10 +1,10 @@ package no.nav.pto.veilarbportefolje.config; +import jakarta.annotation.PostConstruct; import lombok.RequiredArgsConstructor; import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; import org.flywaydb.core.Flyway; -import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Primary; @@ -14,11 +14,9 @@ import org.springframework.transaction.PlatformTransactionManager; import org.springframework.transaction.annotation.EnableTransactionManagement; -import jakarta.annotation.PostConstruct; import javax.sql.DataSource; import static no.nav.pto.veilarbportefolje.util.DbUtils.createDataSource; -import static no.nav.pto.veilarbportefolje.util.DbUtils.getSqlAdminRole; @Slf4j @Configuration @@ -30,12 +28,7 @@ public class DbConfigPostgres { @Bean @Primary public DataSource dataSource() { - return createDataSource(environmentProperties.getDbUrl(), true); - } - - @Bean("PostgresReadOnly") - public DataSource dataSourceRead() { - return createDataSource(environmentProperties.getDbUrl(), false); + return createDataSource(environmentProperties.getDbUrl()); } @Bean @@ -45,12 +38,12 @@ public JdbcTemplate db(DataSource dataSource) { } @Bean(name = "PostgresJdbcReadOnly") - public JdbcTemplate dbRead(@Qualifier("PostgresReadOnly") DataSource dataSource) { + public JdbcTemplate dbRead(DataSource dataSource) { return new JdbcTemplate(dataSource); } @Bean(name = "PostgresNamedJdbcReadOnly") - public NamedParameterJdbcTemplate namedParameterJdbcTemplate(@Qualifier("PostgresReadOnly") DataSource dataSource) { + public NamedParameterJdbcTemplate namedParameterJdbcTemplate(DataSource dataSource) { return new NamedParameterJdbcTemplate(dataSource); } @@ -62,18 +55,19 @@ public PlatformTransactionManager transactionManager(DataSource dataSource) { @PostConstruct @SneakyThrows public void migrateDb() { - log.info("Starting database migration..."); - DataSource dataSource = createDataSource(environmentProperties.getDbUrl(), true); + DataSource dataSource = createDataSource(environmentProperties.getDbUrl()); - Flyway.configure() - .validateMigrationNaming(true) - .dataSource(dataSource) - .locations("db/postgres") - .initSql("SET ROLE '" + getSqlAdminRole() + "';") - .baselineOnMigrate(true) - .load() - .migrate(); + if (dataSource != null) { + log.info("Starting database migration..."); + Flyway.configure() + .validateMigrationNaming(true) + .dataSource(dataSource) + .locations("db/postgres") + .baselineOnMigrate(true) + .load() + .migrate(); - dataSource.getConnection().close(); + dataSource.getConnection().close(); + } } } diff --git a/src/main/java/no/nav/pto/veilarbportefolje/controller/Frontendlogger.java b/src/main/java/no/nav/pto/veilarbportefolje/controller/Frontendlogger.java index 599f141372..a3451f4c76 100644 --- a/src/main/java/no/nav/pto/veilarbportefolje/controller/Frontendlogger.java +++ b/src/main/java/no/nav/pto/veilarbportefolje/controller/Frontendlogger.java @@ -7,7 +7,6 @@ import lombok.experimental.Accessors; import lombok.extern.slf4j.Slf4j; import no.nav.common.metrics.Event; -import no.nav.common.metrics.MetricsClient; import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestMapping; @@ -15,21 +14,17 @@ import java.util.Map; -import static no.nav.common.utils.EnvironmentUtils.isProduction; -import static no.nav.pto.veilarbportefolje.util.SecureLog.secureLog; - @Slf4j @RestController @RequiredArgsConstructor @RequestMapping("/api/logger") @Tag(name = "Frontendlogger", description = "Frontendlogger-funksjonalitet") public class Frontendlogger { - private final MetricsClient metricsClient; @PostMapping("/event") @Operation(summary = "Skriv event til Influx", description = "Registrerer en frontend-hendelse og sender til InfluxDB.") public void skrivEventTilInflux(@RequestBody FrontendEvent event) { - Event toInflux = new Event(event.name + ".event"); + /*Event toInflux = new Event(event.name + ".event"); if (event.getTags() != null) { event.getTags().forEach(toInflux::addTagToReport); } @@ -41,7 +36,7 @@ public void skrivEventTilInflux(@RequestBody FrontendEvent event) { if (!isProduction().orElse(false)) { secureLog.info("Skriver event til influx: " + eventToString(event.name, toInflux)); } - metricsClient.report(toInflux); + metricsClient.report(toInflux);*/ } @Data diff --git a/src/main/java/no/nav/pto/veilarbportefolje/internal/BrukerMappingAlarm.java b/src/main/java/no/nav/pto/veilarbportefolje/internal/BrukerMappingAlarm.java index 4b56f4a6f5..e411b74d82 100644 --- a/src/main/java/no/nav/pto/veilarbportefolje/internal/BrukerMappingAlarm.java +++ b/src/main/java/no/nav/pto/veilarbportefolje/internal/BrukerMappingAlarm.java @@ -5,6 +5,7 @@ import io.micrometer.core.instrument.binder.MeterBinder; import lombok.NonNull; import lombok.RequiredArgsConstructor; +import no.nav.common.job.leader_election.LeaderElectionClient; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.scheduling.annotation.Scheduled; @@ -20,6 +21,7 @@ public class BrukerMappingAlarm implements MeterBinder { @Qualifier("PostgresJdbcReadOnly") private final JdbcTemplate db; + private final LeaderElectionClient leaderElectionClient; private final AtomicInteger antallBrukereSomIkkeHarIdentIPDL = new AtomicInteger(0); private final AtomicInteger antallAktiveBrukereSomIkkeHarBrukerDataFraPDL = new AtomicInteger(0); @@ -38,12 +40,15 @@ public void bindTo(@NonNull MeterRegistry meterRegistry) { .register(meterRegistry); } - @Scheduled(cron = "* */10 * * * ?") + @Scheduled(cron = "0 */10 * * * ?") public void oppdaterMetrikk() { - antallBrukereSomIkkeHarIdentIPDL.set(antallBrukereSomIkkeHarIdentIPDL()); - antallAktiveBrukereSomIkkeHarBrukerDataFraPDL.set(antallAktiveBrukereSomIkkeHarBrukerDataFraPDL()); - antallBrukereSomIkkeLiggerIDatabaseLenkenFraArena.set(antallBrukereSomIkkeLiggerIDatabaseLenkenFraArena()); + if (leaderElectionClient.isLeader()) { + antallBrukereSomIkkeHarIdentIPDL.set(antallBrukereSomIkkeHarIdentIPDL()); + antallAktiveBrukereSomIkkeHarBrukerDataFraPDL.set(antallAktiveBrukereSomIkkeHarBrukerDataFraPDL()); + antallBrukereSomIkkeLiggerIDatabaseLenkenFraArena.set(antallBrukereSomIkkeLiggerIDatabaseLenkenFraArena()); + } } + private int antallBrukereSomIkkeHarIdentIPDL() { String sql = """ select count(*) from oppfolging_data od diff --git a/src/main/java/no/nav/pto/veilarbportefolje/kafka/KafkaStats.java b/src/main/java/no/nav/pto/veilarbportefolje/kafka/KafkaStats.java index 0d06c38faf..698fa6267c 100644 --- a/src/main/java/no/nav/pto/veilarbportefolje/kafka/KafkaStats.java +++ b/src/main/java/no/nav/pto/veilarbportefolje/kafka/KafkaStats.java @@ -31,7 +31,7 @@ public void bindTo(@NonNull MeterRegistry meterRegistry) { Gauge.builder("veilarbportefolje_kafka_retries_avg", kafkaRetriesAvg, AtomicDouble::get).description("Average number of retries for failed messages").register(meterRegistry); } - @Scheduled(cron = "* */10 * * * ?") + @Scheduled(cron = "0 */10 * * * ?") public void oppdaterMetrikk() { try { List retries = this.jdbcTemplate.query("SELECT retries FROM KAFKA_CONSUMER_RECORD WHERE retries > 0", (rs, rowNum) -> rs.getInt("retries")); diff --git a/src/main/java/no/nav/pto/veilarbportefolje/kafka/deserializers/AivenAvroDeserializer.java b/src/main/java/no/nav/pto/veilarbportefolje/kafka/deserializers/AivenAvroDeserializer.java index 7a0bb74bee..6b63c362b9 100644 --- a/src/main/java/no/nav/pto/veilarbportefolje/kafka/deserializers/AivenAvroDeserializer.java +++ b/src/main/java/no/nav/pto/veilarbportefolje/kafka/deserializers/AivenAvroDeserializer.java @@ -8,7 +8,7 @@ import java.util.Map; public class AivenAvroDeserializer { - private static final String KAFKA_SCHEMAS_URL = EnvironmentUtils.getRequiredProperty("KAFKA_SCHEMAS_URL"); + private static final String KAFKA_SCHEMAS_URL = EnvironmentUtils.getRequiredProperty("KAFKA_SCHEMA_REGISTRY"); public Deserializer getDeserializer() { Deserializer avroDeserializer = Deserializers.aivenAvroDeserializer(); diff --git a/src/main/java/no/nav/pto/veilarbportefolje/kafka/deserializers/OnpremAvroDeserializer.java b/src/main/java/no/nav/pto/veilarbportefolje/kafka/deserializers/OnpremAvroDeserializer.java deleted file mode 100644 index be316b8e6d..0000000000 --- a/src/main/java/no/nav/pto/veilarbportefolje/kafka/deserializers/OnpremAvroDeserializer.java +++ /dev/null @@ -1,18 +0,0 @@ -package no.nav.pto.veilarbportefolje.kafka.deserializers; - -import io.confluent.kafka.serializers.KafkaAvroDeserializerConfig; -import no.nav.common.kafka.consumer.util.deserializer.Deserializers; -import no.nav.common.utils.EnvironmentUtils; -import org.apache.kafka.common.serialization.Deserializer; - -import java.util.Map; - -public class OnpremAvroDeserializer { - private static final String KAFKA_SCHEMAS_URL = EnvironmentUtils.getRequiredProperty("KAFKA_SCHEMAS_URL"); - - public Deserializer getDeserializer() { - return Deserializers.onPremAvroDeserializer(KAFKA_SCHEMAS_URL, - Map.of(KafkaAvroDeserializerConfig.SPECIFIC_AVRO_READER_CONFIG, true, - KafkaAvroDeserializerConfig.SCHEMA_REGISTRY_URL_CONFIG, KAFKA_SCHEMAS_URL)); - } -} diff --git a/src/main/java/no/nav/pto/veilarbportefolje/opensearch/OpensearchCountService.java b/src/main/java/no/nav/pto/veilarbportefolje/opensearch/OpensearchCountService.java index 513779be56..8dae906714 100644 --- a/src/main/java/no/nav/pto/veilarbportefolje/opensearch/OpensearchCountService.java +++ b/src/main/java/no/nav/pto/veilarbportefolje/opensearch/OpensearchCountService.java @@ -4,8 +4,6 @@ import lombok.Data; import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; -import no.nav.common.metrics.Event; -import no.nav.common.metrics.MetricsClient; import no.nav.common.rest.client.RestUtils; import no.nav.pto.veilarbportefolje.opensearch.domene.OpensearchClientConfig; import okhttp3.OkHttpClient; @@ -23,17 +21,14 @@ public class OpensearchCountService { private final OpensearchClientConfig opensearchClientConfig; private final String indexName; - private final MetricsClient metricsClient; private final OkHttpClient client; @Autowired public OpensearchCountService( OpensearchClientConfig opensearchClientConfig, - IndexName opensearchIndex, - MetricsClient metricsClient + IndexName opensearchIndex ) { this.opensearchClientConfig = opensearchClientConfig; - this.metricsClient = metricsClient; this.indexName = opensearchIndex.getValue(); client = baseClient(); } @@ -49,22 +44,13 @@ public long getCount() { try (Response response = client.newCall(request).execute()) { RestUtils.throwIfNotSuccessful(response); - long count = RestUtils.parseJsonResponse(response, CountResponse.class) + + return RestUtils.parseJsonResponse(response, CountResponse.class) .map(CountResponse::getCount) .orElse(0L); - - reportDocCountToInfluxdb(count); - return count; } } - private void reportDocCountToInfluxdb(long count) { - Event event = new Event("portefolje.antall.brukere"); - event.addFieldToReport("antall_brukere", count); - - metricsClient.report(event); - } - public static String createAbsoluteUrl(OpensearchClientConfig config, String indexName) { return String.format("%s%s/", createAbsoluteUrl(config), diff --git a/src/main/java/no/nav/pto/veilarbportefolje/persononinfo/domene/PDLPerson.java b/src/main/java/no/nav/pto/veilarbportefolje/persononinfo/domene/PDLPerson.java index 595a40247e..d02e04af21 100644 --- a/src/main/java/no/nav/pto/veilarbportefolje/persononinfo/domene/PDLPerson.java +++ b/src/main/java/no/nav/pto/veilarbportefolje/persononinfo/domene/PDLPerson.java @@ -116,9 +116,7 @@ private static String hentFoedselLand(List !foedsel.getMetadata().isHistorisk()).toList(); - if (fodselsListe.size() > 1) { - throw new PdlPersonValideringException("Støtte for flere registrerte foedselLand er ikke implentert"); - } + return fodselsListe.stream().findFirst() .map(PdlPersonResponse.PdlPersonResponseData.Foedested::getFoedeland) .orElse(""); diff --git a/src/main/java/no/nav/pto/veilarbportefolje/util/DbUtils.java b/src/main/java/no/nav/pto/veilarbportefolje/util/DbUtils.java index 81763c59d1..5b55f7f4be 100644 --- a/src/main/java/no/nav/pto/veilarbportefolje/util/DbUtils.java +++ b/src/main/java/no/nav/pto/veilarbportefolje/util/DbUtils.java @@ -1,30 +1,22 @@ package no.nav.pto.veilarbportefolje.util; import com.zaxxer.hikari.HikariConfig; -import lombok.SneakyThrows; +import com.zaxxer.hikari.HikariDataSource; import lombok.extern.slf4j.Slf4j; -import no.nav.vault.jdbc.hikaricp.HikariCPVaultUtil; import javax.sql.DataSource; -import static no.nav.common.utils.EnvironmentUtils.isProduction; -import static org.postgresql.PGProperty.SOCKET_TIMEOUT; - @Slf4j public class DbUtils { - private enum DbRole { - ADMIN, - READONLY, - } - public static DataSource createDataSource(String dbUrl, boolean admin) { - if (admin) { - HikariConfig config = createDataSourceConfig(dbUrl, 2); - return createVaultRefreshDataSource(config, DbRole.ADMIN); + public static DataSource createDataSource(String dbUrl) { + try { + HikariConfig config = createDataSourceConfig(dbUrl, 15); + return new HikariDataSource(config); + } catch (Exception e) { + log.info("Can't connect to db, error: " + e, e); + return null; } - HikariConfig config = createDataSourceConfig(dbUrl, 3); - config.addDataSourceProperty(SOCKET_TIMEOUT.getName(), "600"); // 10min - return createVaultRefreshDataSource(config, DbRole.READONLY); } public static HikariConfig createDataSourceConfig(String dbUrl, int maximumPoolSize) { @@ -36,29 +28,6 @@ public static HikariConfig createDataSourceConfig(String dbUrl, int maximumPoolS return config; } - public static String getSqlAdminRole() { - boolean isProd = isProduction().orElse(false); - return (isProd ? "veilarbportefolje-prod-admin" : "veilarbportefolje-dev-admin"); - } - - public static String getSqlReadOnlyRole() { - boolean isProd = isProduction().orElse(false); - return (isProd ? "veilarbportefolje-prod-readonly" : "veilarbportefolje-dev-readonly"); - } - - @SneakyThrows - private static DataSource createVaultRefreshDataSource(HikariConfig config, DbRole role) { - if (role.equals(DbRole.READONLY)) { - return HikariCPVaultUtil.createHikariDataSourceWithVaultIntegration(config, getMountPath(), getSqlReadOnlyRole()); - } - return HikariCPVaultUtil.createHikariDataSourceWithVaultIntegration(config, getMountPath(), getSqlAdminRole()); - } - - private static String getMountPath() { - boolean isProd = isProduction().orElse(false); - return "postgresql/" + (isProd ? "prod-fss" : "preprod-fss"); - } - public static String boolToJaNei(boolean bool) { return bool ? "J" : "N"; } diff --git a/src/main/resources/application.properties b/src/main/resources/application.properties index 9a4d8833fd..63441dd1b2 100644 --- a/src/main/resources/application.properties +++ b/src/main/resources/application.properties @@ -21,7 +21,7 @@ app.env.opensearchUsername=${OPEN_SEARCH_USERNAME} app.env.opensearchPassword=${OPEN_SEARCH_PASSWORD} app.env.unleashUrl=${UNLEASH_SERVER_API_URL}/api app.env.unleashApiToken=${UNLEASH_SERVER_API_TOKEN} -app.env.dbUrl=${VEILARBPORTEFOLJE_POSTGRES_DB_URL} +app.env.dbUrl=${DB_JDBC_URL} app.env.naisAadDiscoveryUrl=${AZURE_APP_WELL_KNOWN_URL:null} app.env.naisAadClientId=${AZURE_APP_CLIENT_ID:null} app.env.kodeverkUrl=${KODEVERK_URL} diff --git a/src/main/resources/db/postgres/V1_90__brukertiltakv2.sql b/src/main/resources/db/postgres/V1_90__brukertiltakv2.sql new file mode 100644 index 0000000000..48f198836c --- /dev/null +++ b/src/main/resources/db/postgres/V1_90__brukertiltakv2.sql @@ -0,0 +1 @@ +CREATE INDEX IDX_brukertiltakv2_status on brukertiltak_v2 (status); \ No newline at end of file diff --git a/src/test/java/no/nav/pto/veilarbportefolje/config/ApplicationConfigTest.java b/src/test/java/no/nav/pto/veilarbportefolje/config/ApplicationConfigTest.java index a4c72d3fbf..c969f541d2 100644 --- a/src/test/java/no/nav/pto/veilarbportefolje/config/ApplicationConfigTest.java +++ b/src/test/java/no/nav/pto/veilarbportefolje/config/ApplicationConfigTest.java @@ -6,7 +6,6 @@ import no.nav.common.auth.context.AuthContextHolderThreadLocal; import no.nav.common.auth.context.UserRole; import no.nav.common.job.leader_election.LeaderElectionClient; -import no.nav.common.metrics.MetricsClient; import no.nav.common.token_client.client.AzureAdMachineToMachineTokenClient; import no.nav.common.token_client.client.AzureAdOnBehalfOfTokenClient; import no.nav.common.utils.Credentials; @@ -38,7 +37,6 @@ import no.nav.pto.veilarbportefolje.kodeverk.KodeverkClient; import no.nav.pto.veilarbportefolje.kodeverk.KodeverkService; import no.nav.pto.veilarbportefolje.mal.MalService; -import no.nav.pto.veilarbportefolje.mock.MetricsClientMock; import no.nav.pto.veilarbportefolje.opensearch.*; import no.nav.pto.veilarbportefolje.opensearch.domene.OpensearchClientConfig; import no.nav.pto.veilarbportefolje.oppfolging.*; @@ -234,11 +232,6 @@ public DefaultUnleash defaultUnleash() { return mock; } - @Bean - public MetricsClient metricsClient() { - return new MetricsClientMock(); - } - @Bean public RestHighLevelClient restHighLevelClient() { return createClient(opensearchClientConfig()); diff --git a/src/test/java/no/nav/pto/veilarbportefolje/mock/MetricsClientMock.java b/src/test/java/no/nav/pto/veilarbportefolje/mock/MetricsClientMock.java deleted file mode 100644 index a2e58fd627..0000000000 --- a/src/test/java/no/nav/pto/veilarbportefolje/mock/MetricsClientMock.java +++ /dev/null @@ -1,20 +0,0 @@ -package no.nav.pto.veilarbportefolje.mock; - -import lombok.extern.slf4j.Slf4j; -import no.nav.common.metrics.Event; -import no.nav.common.metrics.MetricsClient; - -import java.util.Map; - -@Slf4j -public class MetricsClientMock implements MetricsClient { - - @Override - public void report(Event event) {} - - @Override - public void report(String name, Map fields, Map tags, long l) { - log.info(String.format("sender event %s Fields: %s Tags: %s", name, fields.toString(), tags.toString())); - } - -}