diff --git a/.github/actions/push_image/action.yml b/.github/actions/push_image/action.yml new file mode 100644 index 000000000..e4075f213 --- /dev/null +++ b/.github/actions/push_image/action.yml @@ -0,0 +1,41 @@ +name: Push image +description: Upload built image to AWS ECR +inputs: + username: + description: ghcr.io username + required: true + password: + description: ghcr.io password + required: true + aws-arn: + description: aws util arn + required: true + image-tag: + description: image tag for container image + required: true + + + +runs: + using: composite + steps: + - name: Login to GitHub Container Registry + uses: docker/login-action@v3.3.0 + with: + registry: ghcr.io + username: ${{ inputs.username }} + password: ${{ inputs.password }} + + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@v4 + with: + role-to-assume: ${{ inputs.aws-arn }} + role-session-name: aoe-runner-ecr-push + aws-region: eu-west-1 + + - name: 03-push-image.sh + working-directory: ./ + shell: bash + env: + IMAGE_TAG: ${{ inputs.image-tag }} + run: ./deploy-scripts/03-push-image.sh \ No newline at end of file diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 9feaed73c..596db3902 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -1,14 +1,71 @@ name: Build on: + workflow_dispatch: pull_request: branches: - '**' - workflow_dispatch: + +env: + DOCKER_BUILDKIT: '1' + +defaults: + run: + shell: bash + +permissions: + id-token: write + contents: read + actions: read + packages: write jobs: build: runs-on: ubuntu-latest + name: 01-build.sh + steps: + - uses: actions/checkout@v4 + + - name: Login to GitHub Container Registry + uses: docker/login-action@v3.3.0 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Run 01-build.sh + run: ./deploy-scripts/01-build.sh + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + IMAGE_TAG: ga-${{ github.run_number }} + TRUST_STORE_PASSWORD: ${{ secrets.TRUST_STORE_PASSWORD }} + + lint: + name: 02-lint.sh + runs-on: ubuntu-24.04 + steps: + - uses: actions/checkout@v4.2.1 + - name: Run 02-lint.sh + run: ./deploy-scripts/02-lint.sh + + push_image: + name: 03-push-image.sh + if: github.event.pull_request.draft == false + needs: [ lint, build ] + permissions: + packages: write + id-token: write + contents: read + runs-on: 'ubuntu-24.04' steps: - - name: Output - run: echo "===============> Would now start building AOE" \ No newline at end of file + - name: Fetch history for all branches and tags + uses: actions/checkout@v4 + with: + fetch-depth: 0 + - name: Push image + uses: ./.github/actions/push_image + with: + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + aws-arn: ${{ secrets.AWS_AOE_UTILITY_ROLE_ARN }} + image-tag: ga-${{ github.run_number }} diff --git a/.github/workflows/deploy_dev.yml b/.github/workflows/deploy_dev.yml index 5ae91d399..b69df1ea0 100644 --- a/.github/workflows/deploy_dev.yml +++ b/.github/workflows/deploy_dev.yml @@ -6,10 +6,51 @@ on: branch: description: Branch to deploy required: true + proceedDeploy: + description: Set to true to skip CDK diff and directly proceed with deployment. Default is false. + required: true + default: 'false' + +concurrency: deploy-dev + +env: + DOCKER_BUILDKIT: '1' + +defaults: + run: + shell: bash jobs: deploy_dev: - runs-on: ubuntu-latest + name: 04-deploy-dev.sh + permissions: + packages: read + id-token: write + contents: write + runs-on: 'ubuntu-24.04' steps: - - name: Output - run: echo "===============> Would now start deploying AOE to DEV" \ No newline at end of file + - uses: actions/checkout@v4.2.1 + - name: Checkout all branches and tags with full history + uses: actions/checkout@v4 + with: + ref: ${{ inputs.branch }} + fetch-depth: 0 + - name: Configure dev AWS credentials + uses: aws-actions/configure-aws-credentials@v4 + with: + role-to-assume: ${{ secrets.AWS_AOE_DEV_ROLE_ARN }} + role-session-name: aoe-runner-deploy-dev + aws-region: eu-west-1 + + - name: Run CDK Diff + if: ${{ github.event.inputs.proceedDeploy == 'false' }} + run: ./deploy-scripts/04-deploy-dev.sh diff + env: + UTILITY_ACCOUNT_ID: ${{ secrets.UTILITY_ACCOUNT_ID }} + + - name: Run CDK Deploy + if: ${{ github.event.inputs.proceedDeploy == 'true' }} + run: ./deploy-scripts/04-deploy-dev.sh deploy + env: + UTILITY_ACCOUNT_ID: ${{ secrets.UTILITY_ACCOUNT_ID }} + diff --git a/.github/workflows/deploy_prod.yml b/.github/workflows/deploy_prod.yml index 0376279dd..0703b33d4 100644 --- a/.github/workflows/deploy_prod.yml +++ b/.github/workflows/deploy_prod.yml @@ -6,10 +6,51 @@ on: branch: description: Branch to deploy required: true + proceedDeploy: + description: Set to true to skip CDK diff and directly proceed with deployment. Default is false. + required: true + default: 'false' + +concurrency: deploy-prod + +env: + DOCKER_BUILDKIT: '1' + +defaults: + run: + shell: bash jobs: deploy_prod: - runs-on: ubuntu-latest + name: 06-deploy-prod.sh + permissions: + packages: read + id-token: write + contents: write + runs-on: 'ubuntu-24.04' steps: - - name: Output - run: echo "===============> Would now start deploying AOE to PROD" \ No newline at end of file + - uses: actions/checkout@v4.2.1 + - name: Checkout all branches and tags with full history + uses: actions/checkout@v4 + with: + ref: ${{ inputs.branch }} + fetch-depth: 0 + - name: Configure prod AWS credentials + uses: aws-actions/configure-aws-credentials@v4 + with: + role-to-assume: ${{ secrets.AWS_AOE_PROD_ROLE_ARN }} + role-session-name: aoe-runner-deploy-prod + aws-region: eu-west-1 + + - name: Run CDK Diff + if: ${{ github.event.inputs.proceedDeploy == 'false' }} + run: ./deploy-scripts/06-deploy-prod.sh diff + env: + UTILITY_ACCOUNT_ID: ${{ secrets.UTILITY_ACCOUNT_ID }} + + - name: Run CDK Deploy + if: ${{ github.event.inputs.proceedDeploy == 'true' }} + run: ./deploy-scripts/06-deploy-prod.sh deploy + env: + UTILITY_ACCOUNT_ID: ${{ secrets.UTILITY_ACCOUNT_ID }} + diff --git a/.github/workflows/deploy_qa.yml b/.github/workflows/deploy_qa.yml index 809e9403b..b2b3ce039 100644 --- a/.github/workflows/deploy_qa.yml +++ b/.github/workflows/deploy_qa.yml @@ -6,10 +6,51 @@ on: branch: description: Branch to deploy required: true + proceedDeploy: + description: Set to true to skip CDK diff and directly proceed with deployment. Default is false. + required: true + default: 'false' + +concurrency: deploy-qa + +env: + DOCKER_BUILDKIT: '1' + +defaults: + run: + shell: bash jobs: deploy_qa: - runs-on: ubuntu-latest + name: 05-deploy-qa.sh + permissions: + packages: read + id-token: write + contents: write + runs-on: 'ubuntu-24.04' steps: - - name: Output - run: echo "===============> Would now start deploying AOE to QA" \ No newline at end of file + - uses: actions/checkout@v4.2.1 + - name: Checkout all branches and tags with full history + uses: actions/checkout@v4 + with: + ref: ${{ inputs.branch }} + fetch-depth: 0 + - name: Configure qa AWS credentials + uses: aws-actions/configure-aws-credentials@v4 + with: + role-to-assume: ${{ secrets.AWS_AOE_QA_ROLE_ARN }} + role-session-name: aoe-runner-deploy-qa + aws-region: eu-west-1 + + - name: Run CDK Diff + if: ${{ github.event.inputs.proceedDeploy == 'false' }} + run: ./deploy-scripts/05-deploy-qa.sh diff + env: + UTILITY_ACCOUNT_ID: ${{ secrets.UTILITY_ACCOUNT_ID }} + + - name: Run CDK Deploy + if: ${{ github.event.inputs.proceedDeploy == 'true' }} + run: ./deploy-scripts/05-deploy-qa.sh deploy + env: + UTILITY_ACCOUNT_ID: ${{ secrets.UTILITY_ACCOUNT_ID }} + diff --git a/.gitignore b/.gitignore new file mode 100644 index 000000000..93ed88cab --- /dev/null +++ b/.gitignore @@ -0,0 +1,4 @@ +.idea + +# exclude container mounted folders +/docker/dev \ No newline at end of file diff --git a/.nvmrc b/.nvmrc new file mode 100644 index 000000000..6f2835a64 --- /dev/null +++ b/.nvmrc @@ -0,0 +1 @@ +v23.1.0 diff --git a/README.md b/README.md new file mode 100644 index 000000000..66b2bbdfd --- /dev/null +++ b/README.md @@ -0,0 +1,56 @@ +# AOE + +## Teknologiat +Nämä ovat keskeiset AOE järjestelmän käytettävät teknologiat + +- PostgreSQL +- MongoDB +- Redis +- ElasticSearch +- Apache Kafka +- AWS S3 +- Palvelinteknologiat + - Spring boot + - NodeJs + - Maven +- Web-sovelluksen frontend-teknologiat + - Angular 14.3.0 + - npm-työkalu riippuvuuksien hakuun + - TypeScript + +## Kehitysympäristö + +### .env tiedostojen hakeminen +AOE:n service projektit vaatii .env tiedostot jotka voi ladata `./scripts/fetch_secrets.sh` skriptin avulla AWS:stä. +Skripti kopioi AWS S3:sta .env tiedostot projekteihin. Huom skripti tarvitsee toimiakseen aws sso sisäänkirjautumisen. + +### S3 .env datan päivittäminen +AWS S3 .env datan voi päivitää käyttäen `./scripts/update_secrets.sh` skriptiä +1) Päivitä .env tiedosto(t) projektien juuressa. +- `/aoe-web-backend/.env` +- `/aoe-streaming-app/.env` +- `/aoe-data-analytics/.env` +- `/aoe-semantic-apis/.env` +- `/aoe-data-services/.env` +2) Aja `./scripts/update_secrets.sh` + +#### Ajaminen Dockerilla +Koko AOE:n saa paikallisesti ajoon `./start-local-env.sh` skiptin avulla. +Skripti käynnistää docker compose:n avulla kaikki AOE palvelut, frontin ja riippuvuudet docker kontteihin. + +Ympäristössä on myös NGINX, jonka kautta kaikki web sovelluksen https-pyynnöt menevät. + +Lokaali AOE käyttää mock OIDC palvelua, jota vasten AOE tekee autentikaation. OIDC mock service:ssä on konfiguroitu yksi käyttäjä: aoeuser/password123. + +1) Lisää oman koneesi host tiedostoon seuraavat rivit +- 127.0.0.1 aoe-oidc-server +- 127.0.0.1 demo.aoe.fi + +2) Aja projektin juuressa `./start-local-env.sh` +- Skripti lataa tarvittaessa .env tiedostot AWS S3:sta +- Skipti luo itseallekirjoitetun varmenteen hakemistoon `docker/dev/nginx/nginx-selfsigned.crt` +- Lisää varmenne `docker/dev/nginx/nginx-selfsigned.crt` oman koneesi luotettuihin varmenteisiin, selaimat sallivat itseallekirjoitetun varmenteen käytön. + +4. Selaimella AOE web sovellukseen pääsee url:lla https://demo.aoe.fi/ + + diff --git a/aoe-data-analytics/.env.template b/aoe-data-analytics/.env.template new file mode 100644 index 000000000..982453354 --- /dev/null +++ b/aoe-data-analytics/.env.template @@ -0,0 +1,21 @@ +spring.datasource.primary.username= +spring.datasource.primary.password= + +kafka.bootstrap-servers= +spring.kafka.bootstrap-servers= +spring.kafka.consumer.bootstrap-servers= +spring.kafka.producer.bootstrap-servers= + +mongodb.primary.host= +mongodb.primary.port= +mongodb.primary.database= +mongodb.primary.username= +mongodb.primary.password= + +mongodb.primary.enable.ssl=false + +kafka.enabled=true +kafka.sasl.enable=false +kafka.consumer.auto.startup=true +trust.store.pass= +trust.store.location= \ No newline at end of file diff --git a/aoe-data-analytics/.gitignore b/aoe-data-analytics/.gitignore index c665ddf8f..7f2d3c2b0 100644 --- a/aoe-data-analytics/.gitignore +++ b/aoe-data-analytics/.gitignore @@ -1,7 +1,7 @@ ### Build Directory target/ -### Environment Variables +# env file .env ### Markup files diff --git a/aoe-data-analytics/.gitlab-ci.yml b/aoe-data-analytics/.gitlab-ci.yml deleted file mode 100644 index 98d7ad400..000000000 --- a/aoe-data-analytics/.gitlab-ci.yml +++ /dev/null @@ -1,40 +0,0 @@ -variables: - DOCKER_HOST: unix:///var/run/docker.sock - DOCKER_DRIVER: overlay2 - -image: docker:24.0.6 - -stages: - - build - - deploy - - cleanup - -build_analytics: - stage: build - script: - - echo "PROD build" - - docker-compose -f docker-compose.prod.yml build - only: - - main - tags: - - prod-oaipmh - -deploy_analytics: - stage: deploy - script: - - echo "PROD deploy" - - docker-compose -f docker-compose.prod.yml up -d - only: - - main - tags: - - prod-oaipmh - -cleanup_analytics: - stage: cleanup - script: - - echo "PROD system clean up" - - docker system prune -a -f --volumes - only: - - main - tags: - - prod-oaipmh diff --git a/aoe-data-analytics/README.md b/aoe-data-analytics/README.md index ba371a3a6..334471012 100644 --- a/aoe-data-analytics/README.md +++ b/aoe-data-analytics/README.md @@ -1,13 +1,3 @@ -# [AOE - Library of Open Educational Resources](https://github.com/CSCfi/aoe) - -## Service Component Links in GitHub (mirrored) -- **aoe-data-analytics** -- [aoe-data-services](https://github.com/CSCfi/aoe-data-services) -- [aoe-semantic-apis](https://github.com/CSCfi/aoe-semantic-apis) -- [aoe-streaming-app](https://github.com/CSCfi/aoe-streaming-app) -- [aoe-web-backend](https://github.com/CSCfi/aoe-web-backend) -- [aoe-web-frontend](https://github.com/CSCfi/aoe-web-frontend) - # AOE Data Analytics Microservices for querying analytics and statistics from databases. diff --git a/aoe-data-analytics/deploy-scripts/01-build.sh b/aoe-data-analytics/deploy-scripts/01-build.sh new file mode 100755 index 000000000..2efcbe2aa --- /dev/null +++ b/aoe-data-analytics/deploy-scripts/01-build.sh @@ -0,0 +1,23 @@ +#!/usr/bin/env bash +set -o errexit -o nounset -o pipefail + +# shellcheck source=../scripts/common-functions.sh +source "$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )/../../scripts/common-functions.sh" + +# shellcheck source=./deploy-functions.sh +source "$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )/../../deploy-scripts/deploy-functions.sh" +source "$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )/../../scripts/build-functions.sh" + + +function main { + local aoe_service_name="aoe-data-analytics" + local service_image_tag="AOE_DATA_ANALYTICS_TAG" + + cd "$repo" + + buildService "$aoe_service_name" "$service_image_tag" +} + +main + + diff --git a/aoe-data-analytics/deploy-scripts/02-push-image.sh b/aoe-data-analytics/deploy-scripts/02-push-image.sh new file mode 100755 index 000000000..e51d4993d --- /dev/null +++ b/aoe-data-analytics/deploy-scripts/02-push-image.sh @@ -0,0 +1,30 @@ +#!/usr/bin/env bash +set -o errexit -o nounset -o pipefail + +# shellcheck source=../scripts/common-functions.sh +source "$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )/../../scripts/common-functions.sh" + +# shellcheck source=./deploy-functions.sh +source "$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )/../../deploy-scripts/deploy-functions.sh" + +function main() { + setup + + local aoe_service_name="aoe-data-analytics" + local github_image_tag="$github_registry${aoe_service_name}:${IMAGE_TAG}" + + local ecr_registry="${REGISTRY}/$aoe_service_name" + local ecr_image_tag="${ecr_registry}:${IMAGE_TAG}" + upload_image_to_ecr "$github_image_tag" "$ecr_image_tag" +} + +function setup() { + cd "${repo}" + require_command docker + require_docker_compose + configure_aws_credentials + get_ecr_login_credentials +} + + +main "$@" diff --git a/aoe-data-analytics/docker-compose.prod.yml b/aoe-data-analytics/docker-compose.prod.yml deleted file mode 100644 index ef9dbd3c5..000000000 --- a/aoe-data-analytics/docker-compose.prod.yml +++ /dev/null @@ -1,27 +0,0 @@ -services: - aoe-etl-processor: - extra_hosts: - - 'host.docker.internal:host-gateway' - build: - context: . - dockerfile: ./service-etl-processor/Dockerfile - image: aoe-etl-processor:latest - container_name: aoe-etl-processor - restart: unless-stopped - ports: - - '8080:8080' - env_file: - - /environment/aoe-data-analytics/.env -# environment: -# SPRING_DATASOURCE_SECONDARY_USERNAME: '${PG_ROLE_REPORTER_SECONDARY}' -# SPRING_DATASOURCE_SECONDARY_PASSWORD: '${PG_ROLE_PASSWORD_REPORTER_SECONDARY}' - logging: - options: - max-size: '10m' - max-file: '3' - networks: - - apache-kafka_network-kafka - -networks: - apache-kafka_network-kafka: - external: true diff --git a/aoe-data-analytics/docker-compose.yml b/aoe-data-analytics/docker-compose.yml deleted file mode 100644 index 3a0e53abc..000000000 --- a/aoe-data-analytics/docker-compose.yml +++ /dev/null @@ -1,25 +0,0 @@ -services: - aoe-etl-processor: - extra_hosts: - - 'host.docker.internal:host-gateway' - build: - context: . - dockerfile: ./service-etl-processor/Dockerfile - image: aoe-etl-processor:latest - container_name: aoe-etl-processor - restart: unless-stopped - ports: - - '8080:8080' - env_file: - - ./service-etl-processor/.env - logging: - options: - max-size: '10m' - max-file: '3' - networks: - - network-analytics - -networks: - network-analytics: - external: true - name: network-analytics diff --git a/aoe-data-analytics/module-rdb-analytics/src/main/java/fi/csc/analytics/configuration/JPAConfigurationSecondary.java b/aoe-data-analytics/module-rdb-analytics/src/main/java/fi/csc/analytics/configuration/JPAConfigurationSecondary.java deleted file mode 100644 index cf526b57b..000000000 --- a/aoe-data-analytics/module-rdb-analytics/src/main/java/fi/csc/analytics/configuration/JPAConfigurationSecondary.java +++ /dev/null @@ -1,47 +0,0 @@ -package fi.csc.analytics.configuration; - -import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; -import org.springframework.data.jpa.repository.config.EnableJpaRepositories; -import org.springframework.orm.jpa.JpaTransactionManager; -import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean; -import org.springframework.orm.jpa.vendor.HibernateJpaVendorAdapter; -import org.springframework.transaction.PlatformTransactionManager; -import org.springframework.transaction.annotation.EnableTransactionManagement; - -import javax.sql.DataSource; -import java.util.Objects; -import java.util.Properties; - -@Configuration -@EnableTransactionManagement -@EnableJpaRepositories( - basePackages = "fi.csc.analytics.repository.secondary", - entityManagerFactoryRef = "entityManagerFactorySecondary", - transactionManagerRef = "transactionManagerSecondary" -) -public class JPAConfigurationSecondary { - - @Bean(name = "entityManagerFactorySecondary") - public LocalContainerEntityManagerFactoryBean entityManagerFactorySecondary( - @Qualifier("dataSourceSecondary") DataSource dataSource) { - LocalContainerEntityManagerFactoryBean entityManagerFactoryBean = new LocalContainerEntityManagerFactoryBean(); - entityManagerFactoryBean.setDataSource(dataSource); - entityManagerFactoryBean.setPackagesToScan("fi.csc.analytics.entity"); - entityManagerFactoryBean.setPersistenceUnitName("secondary"); - entityManagerFactoryBean.setJpaVendorAdapter(new HibernateJpaVendorAdapter()); - - Properties jpaProperties = new Properties(); - jpaProperties.put(org.hibernate.cfg.Environment.DIALECT, "org.hibernate.dialect.PostgreSQL95Dialect"); - entityManagerFactoryBean.setJpaProperties(jpaProperties); - - return entityManagerFactoryBean; - } - - @Bean(name = "transactionManagerSecondary") - public PlatformTransactionManager transactionManagerSecondary( - @Qualifier("entityManagerFactorySecondary") LocalContainerEntityManagerFactoryBean entityManagerFactorySecondary) { - return new JpaTransactionManager(Objects.requireNonNull(entityManagerFactorySecondary.getObject())); - } -} diff --git a/aoe-data-analytics/module-rdb-analytics/src/main/java/fi/csc/analytics/configuration/RDBConfiguration.java b/aoe-data-analytics/module-rdb-analytics/src/main/java/fi/csc/analytics/configuration/RDBConfiguration.java index 90c02c9ca..13f3d9aa4 100644 --- a/aoe-data-analytics/module-rdb-analytics/src/main/java/fi/csc/analytics/configuration/RDBConfiguration.java +++ b/aoe-data-analytics/module-rdb-analytics/src/main/java/fi/csc/analytics/configuration/RDBConfiguration.java @@ -21,12 +21,6 @@ public DataSourceProperties dataSourcePropertiesPrimary() { return new DataSourceProperties(); } - @Bean - @ConfigurationProperties("spring.datasource.secondary") - public DataSourceProperties dataSourcePropertiesSecondary() { - return new DataSourceProperties(); - } - @Primary @Bean(name = "dataSourcePrimary") public DataSource dataSourcePrimary() { @@ -35,21 +29,10 @@ public DataSource dataSourcePrimary() { .build(); } - @Bean(name = "dataSourceSecondary") - public DataSource dataSourceSecondary() { - return dataSourcePropertiesSecondary() - .initializeDataSourceBuilder() - .build(); - } - @Primary @Bean(name = "jdbcTemplatePrimary") public JdbcTemplate jdbcTemplatePrimary(@Qualifier("dataSourcePrimary") DataSource dataSource) { return new JdbcTemplate(dataSource); } - @Bean(name = "jdbcTemplateSecondary") - public JdbcTemplate jdbcTemplateSecondary(@Qualifier("dataSourceSecondary") DataSource dataSource) { - return new JdbcTemplate(dataSource); - } } diff --git a/aoe-data-analytics/module-rdb-analytics/src/main/java/fi/csc/analytics/repository/secondary/EducationalMaterialRepositorySecondary.java b/aoe-data-analytics/module-rdb-analytics/src/main/java/fi/csc/analytics/repository/secondary/EducationalMaterialRepositorySecondary.java deleted file mode 100644 index 32c7c672c..000000000 --- a/aoe-data-analytics/module-rdb-analytics/src/main/java/fi/csc/analytics/repository/secondary/EducationalMaterialRepositorySecondary.java +++ /dev/null @@ -1,76 +0,0 @@ -package fi.csc.analytics.repository.secondary; - -import fi.csc.analytics.entity.EducationalMaterial; -import org.springframework.data.jpa.repository.JpaRepository; -import org.springframework.data.jpa.repository.Query; -import org.springframework.data.repository.query.Param; -import org.springframework.stereotype.Repository; -import org.springframework.transaction.annotation.Transactional; - -import java.time.OffsetDateTime; - -@Repository -@Transactional(readOnly = true, transactionManager = "transactionManagerSecondary") -public interface EducationalMaterialRepositorySecondary extends JpaRepository { - - @Query(""" - select count(em) from EducationalMaterial em - join em.educationalLevels el - where el.educationalLevelKey = :educationalLevelKey""") - Long countByEducationalLevelKey(String educationalLevelKey); - - @Query(""" - select count(em) from EducationalMaterial em - join em.educationalLevels el - where el.educationalLevelKey = :educationalLevelKey - and em.publishedAt >= :startDate - and em.publishedAt < :endDate""") - Long countByEducationalLevelBetweenPublishDates( - @Param("educationalLevelKey") String educationalLevelKey, - @Param("startDate") OffsetDateTime startDate, - @Param("endDate") OffsetDateTime endDate); - - @Query(""" - select count(em) from EducationalMaterial em - join em.educationalLevels el - where el.educationalLevelKey = :educationalLevelKey - and em.expires < :expiresDate""") - Long countByEducationalLevelExpiresBefore( - @Param("educationalLevelKey") String educationalLevelKey, - @Param("expiresDate") OffsetDateTime expiresDate); - - @Query(""" - select count(em) from EducationalMaterial em - join em.alignmentObjects ao - where ao.objectKey = :objectKey""") - Long countByEducationalSubjectKey(String objectKey); - - @Query(""" - select count(em) from EducationalMaterial em - join em.alignmentObjects ao - where ao.objectKey = :objectKey - and em.publishedAt >= :startDate - and em.publishedAt < :endDate""") - Long countByEducationalSubjectBetweenPublishDates( - @Param("objectKey") String objectKey, - @Param("startDate") OffsetDateTime startDate, - @Param("endDate") OffsetDateTime endDate); - - @Query(""" - select count(em) from EducationalMaterial em - join em.authors a - where a.organizationKey = :organizationKey""") - Long countByOrganizationKey(@Param("organizationKey") String organizationKey); - - @Query(""" - select count(em) from EducationalMaterial em - join em.authors a - where a.organizationKey = :organizationKey - and em.publishedAt >= :startDate - and em.publishedAt < :endDate""") - Long countByOrganizationBetweenPublishDates( - @Param("organizationKey") String organizationKey, - @Param("startDate") OffsetDateTime startDate, - @Param("endDate") OffsetDateTime endDate); - -} diff --git a/aoe-data-analytics/module-rdb-analytics/src/main/resources/rdb.properties b/aoe-data-analytics/module-rdb-analytics/src/main/resources/rdb.properties index 6c132a24a..0ad034d95 100644 --- a/aoe-data-analytics/module-rdb-analytics/src/main/resources/rdb.properties +++ b/aoe-data-analytics/module-rdb-analytics/src/main/resources/rdb.properties @@ -5,12 +5,6 @@ spring.datasource.primary.password= spring.datasource.primary.initialization-mode=never spring.datasource.primary.driver-class-name=org.postgresql.Driver -spring.datasource.secondary.url=jdbc:postgresql://aoe-postgres:5432/aoe -spring.datasource.secondary.username= -spring.datasource.secondary.password= -spring.datasource.secondary.initialization-mode=never -spring.datasource.secondary.driver-class-name=org.postgresql.Driver - spring.jpa.database-platform=org.hibernate.dialect.PostgreSQL95Dialect spring.jpa.hibernate.naming.physical-strategy=org.hibernate.boot.model.naming.PhysicalNamingStrategyStandardImpl spring.jpa.hibernate.naming.implicit-strategy=org.hibernate.boot.model.naming.ImplicitNamingStrategyLegacyJpaImpl diff --git a/aoe-data-analytics/module-rdb-analytics/src/test/java/fi/csc/analytics/repository/primary/EducationalMaterialRepositoryPrimaryTest.java b/aoe-data-analytics/module-rdb-analytics/src/test/java/fi/csc/analytics/repository/primary/EducationalMaterialRepositoryPrimaryTest.java deleted file mode 100644 index 2ee065c4e..000000000 --- a/aoe-data-analytics/module-rdb-analytics/src/test/java/fi/csc/analytics/repository/primary/EducationalMaterialRepositoryPrimaryTest.java +++ /dev/null @@ -1,13 +0,0 @@ -package fi.csc.analytics.repository.primary; - -import org.junit.jupiter.api.Test; - -import static org.junit.jupiter.api.Assertions.*; - -// @DataJpaTest -class EducationalMaterialRepositoryPrimaryTest { - - @Test - void test() { - } -} \ No newline at end of file diff --git a/aoe-data-analytics/module-rdb-analytics/src/test/java/fi/csc/analytics/repository/secondary/EducationalMaterialRepositorySecondaryTest.java b/aoe-data-analytics/module-rdb-analytics/src/test/java/fi/csc/analytics/repository/secondary/EducationalMaterialRepositorySecondaryTest.java deleted file mode 100644 index 157015a68..000000000 --- a/aoe-data-analytics/module-rdb-analytics/src/test/java/fi/csc/analytics/repository/secondary/EducationalMaterialRepositorySecondaryTest.java +++ /dev/null @@ -1,13 +0,0 @@ -package fi.csc.analytics.repository.secondary; - -import org.junit.jupiter.api.Test; - -import static org.junit.jupiter.api.Assertions.*; - -// @DataJpaTest -class EducationalMaterialRepositorySecondaryTest { - - @Test - void test() { - } -} \ No newline at end of file diff --git a/aoe-data-analytics/module-rdb-analytics/src/test/resources/test.properties b/aoe-data-analytics/module-rdb-analytics/src/test/resources/test.properties deleted file mode 100644 index e69de29bb..000000000 diff --git a/aoe-data-analytics/service-etl-processor/Dockerfile b/aoe-data-analytics/service-etl-processor/Dockerfile index 5898addab..371c5b75c 100644 --- a/aoe-data-analytics/service-etl-processor/Dockerfile +++ b/aoe-data-analytics/service-etl-processor/Dockerfile @@ -1,10 +1,27 @@ FROM maven:3.8.4-openjdk-17-slim as build + +RUN apt-get update && \ + apt-get upgrade -y && \ + apt-get install -y curl openssl perl --no-install-recommends && \ + apt-get clean + +COPY ./service-etl-processor/import_rds_certs.sh /certs/import_rds_certs.sh +#COPY import_rds_certs.sh /certs/import_rds_certs.sh + +RUN --mount=type=secret,id=trust_store_password,required=true \ + TRUST_STORE_PASSWORD=$(cat /run/secrets/trust_store_password) \ + /certs/import_rds_certs.sh + WORKDIR /app ADD . . RUN mvn package -DskipTests && \ sh -c "touch ./service-etl-processor/target/service-etl-processor-0.0.1-exec.jar" FROM openjdk:17-slim + +# Copy the certs folder from the build stage +COPY --from=build /certs /certs + WORKDIR /app COPY --from=build /app/service-etl-processor/target/service-etl-processor-0.0.1-exec.jar service-etl-processor.jar -ENTRYPOINT ["java", "-Xms512m", "-Xmx512m", "-Djava.security.egd=file:/dev/./urandom", "-jar", "service-etl-processor.jar"] +ENTRYPOINT ["sh", "-c", "java -Xms512m -Xmx512m -Djavax.net.ssl.trustStorePassword=$TRUST_STORE_PASS -Djavax.net.ssl.trustStore=/certs/rds-truststore.jks -Djava.security.egd=file:/dev/./urandom -jar service-etl-processor.jar"] diff --git a/aoe-data-analytics/service-etl-processor/import_rds_certs.sh b/aoe-data-analytics/service-etl-processor/import_rds_certs.sh new file mode 100755 index 000000000..6ffa76466 --- /dev/null +++ b/aoe-data-analytics/service-etl-processor/import_rds_certs.sh @@ -0,0 +1,33 @@ +#!/bin/sh +# +# sourced from AWS documentation: +# https://docs.aws.amazon.com/documentdb/latest/developerguide/connect_programmatically.html + +if [ -z "$TRUST_STORE_PASSWORD" ]; then + echo "ERROR: TRUST_STORE_PASSWORD is not set." + exit 1 +fi + +mydir=/certs +truststore=${mydir}/rds-truststore.jks +storepassword="$TRUST_STORE_PASSWORD" + +mkdir -p ${mydir} + +cp "$JAVA_HOME"/lib/security/cacerts ${truststore} +chmod 644 ${truststore} + +keytool -storepasswd -keystore ${truststore} -storepass changeit -new "${storepassword}" + +curl -sS "https://truststore.pki.rds.amazonaws.com/global/global-bundle.pem" > ${mydir}/global-bundle.pem + +awk 'split_after == 1 {n++;split_after=0} /-----END CERTIFICATE-----/ {split_after=1}{print > "rds-ca-" n ".pem"}' < ${mydir}/global-bundle.pem + +for CERT in rds-ca-*; do + alias=$(openssl x509 -noout -text -in "$CERT" | perl -ne 'next unless /Subject:/; s/.*(CN=|CN = )//; print') + echo "Importing $alias" + keytool -import -file "${CERT}" -alias "${alias}" -storepass "${storepassword}" -keystore ${truststore} -noprompt + rm "$CERT" +done + +rm ${mydir}/global-bundle.pem diff --git a/aoe-data-analytics/service-etl-processor/pom.xml b/aoe-data-analytics/service-etl-processor/pom.xml index 7f8c56371..c3eb9edb9 100644 --- a/aoe-data-analytics/service-etl-processor/pom.xml +++ b/aoe-data-analytics/service-etl-processor/pom.xml @@ -53,6 +53,11 @@ kafka-streams 3.3.1 + + software.amazon.msk + aws-msk-iam-auth + 2.2.0 + org.springframework.kafka spring-kafka diff --git a/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/ServiceEtlProcessorApplication.java b/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/ServiceEtlProcessorApplication.java index 45580d41d..375fb1fdb 100644 --- a/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/ServiceEtlProcessorApplication.java +++ b/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/ServiceEtlProcessorApplication.java @@ -1,5 +1,7 @@ package fi.csc.processor; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.springframework.boot.Banner; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; @@ -7,9 +9,12 @@ @SpringBootApplication public class ServiceEtlProcessorApplication { + private static final Logger LOG = LoggerFactory.getLogger(ServiceEtlProcessorApplication.class.getSimpleName()); + public static void main(String[] args) { SpringApplication app = new SpringApplication(ServiceEtlProcessorApplication.class); app.setBannerMode(Banner.Mode.OFF); app.run(args); } + } diff --git a/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/configuration/ApplicationConfiguration.java b/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/configuration/ApplicationConfiguration.java index d58ae923b..c5d24cc1b 100644 --- a/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/configuration/ApplicationConfiguration.java +++ b/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/configuration/ApplicationConfiguration.java @@ -1,7 +1,6 @@ package fi.csc.processor.configuration; import fi.csc.analytics.configuration.JPAConfigurationPrimary; -import fi.csc.analytics.configuration.JPAConfigurationSecondary; import fi.csc.analytics.configuration.RDBConfiguration; import org.springframework.boot.autoconfigure.EnableAutoConfiguration; import org.springframework.boot.autoconfigure.data.jpa.JpaRepositoriesAutoConfiguration; @@ -12,7 +11,7 @@ @ComponentScan(value = "fi.csc", excludeFilters = @ComponentScan.Filter({ Configuration.class })) @PropertySource("classpath:rdb.properties") @EnableAutoConfiguration(exclude = JpaRepositoriesAutoConfiguration.class) -@Import({ RDBConfiguration.class, JPAConfigurationPrimary.class, JPAConfigurationSecondary.class }) +@Import({ RDBConfiguration.class, JPAConfigurationPrimary.class}) public class ApplicationConfiguration { public static PropertySourcesPlaceholderConfigurer propertyConfigInDev() { diff --git a/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/configuration/KafkaConsumerConfiguration.java b/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/configuration/KafkaConsumerConfiguration.java index a642b86f6..9e3c77ba1 100644 --- a/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/configuration/KafkaConsumerConfiguration.java +++ b/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/configuration/KafkaConsumerConfiguration.java @@ -2,11 +2,13 @@ import fi.csc.processor.model.request.MaterialActivity; import fi.csc.processor.model.request.SearchRequest; +import fi.csc.processor.utils.KafkaConfigUtil; import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.clients.consumer.CooperativeStickyAssignor; import org.apache.kafka.common.serialization.StringDeserializer; import org.apache.kafka.common.serialization.StringSerializer; import org.springframework.beans.factory.annotation.Value; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.kafka.annotation.EnableKafka; @@ -19,7 +21,7 @@ import java.util.HashMap; import java.util.Map; -// @ConditionalOnProperty(value = "kafka.enabled", matchIfMissing = true) +@ConditionalOnProperty(value = "kafka.enabled", matchIfMissing = true) @EnableKafka @Configuration public class KafkaConsumerConfiguration { @@ -33,11 +35,14 @@ public class KafkaConsumerConfiguration { @Value(value = "${kafka.group-id.prod-search-requests}") private String groupSearchRequestsPrimary; - @Value(value = "${kafka.group-id.material-activity}") - private String groupMaterialActivitySecondary; + @Value(value = "${kafka.sasl.enable}") + private boolean saslEnabled; - @Value(value = "${kafka.group-id.search-requests}") - private String groupSearchRequestsSecondary; + @Value(value = "${trust.store.pass}") + private String trustStorePassword; + + @Value("${trust.store.location}") + private String trustStoreLocation; @Bean public ConsumerFactory consumerFactoryMaterialActivityPrimary() { @@ -47,10 +52,16 @@ public ConsumerFactory consumerFactoryMaterialActivity config.put(ConsumerConfig.PARTITION_ASSIGNMENT_STRATEGY_CONFIG, CooperativeStickyAssignor.class.getName()); config.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringSerializer.class); config.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, JsonSerializer.class); - // config.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest"); + + if (saslEnabled) { + config.putAll(KafkaConfigUtil.saslConfig(trustStorePassword, trustStoreLocation)); + } + return new DefaultKafkaConsumerFactory<>(config, new StringDeserializer(), new JsonDeserializer<>(MaterialActivity.class)); } + + @Bean public ConcurrentKafkaListenerContainerFactory kafkaListenerMaterialActivityPrimary() { ConcurrentKafkaListenerContainerFactory factory = new ConcurrentKafkaListenerContainerFactory<>(); @@ -66,7 +77,11 @@ public ConsumerFactory consumerFactorySearchRequestsPrima config.put(ConsumerConfig.PARTITION_ASSIGNMENT_STRATEGY_CONFIG, CooperativeStickyAssignor.class.getName()); config.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringSerializer.class); config.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, JsonSerializer.class); - // config.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest"); + + if (saslEnabled) { + config.putAll(KafkaConfigUtil.saslConfig(trustStorePassword, trustStoreLocation)); + } + return new DefaultKafkaConsumerFactory<>(config, new StringDeserializer(), new JsonDeserializer<>(SearchRequest.class)); } @@ -77,41 +92,4 @@ public ConcurrentKafkaListenerContainerFactory kafkaListe return factory; } - @Bean - public ConsumerFactory consumerFactoryMaterialActivitySecondary() { - Map config = new HashMap<>(); - config.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers); - config.put(ConsumerConfig.GROUP_ID_CONFIG, groupMaterialActivitySecondary); - config.put(ConsumerConfig.PARTITION_ASSIGNMENT_STRATEGY_CONFIG, CooperativeStickyAssignor.class.getName()); - config.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringSerializer.class); - config.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, JsonSerializer.class); - // config.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest"); - return new DefaultKafkaConsumerFactory<>(config, new StringDeserializer(), new JsonDeserializer<>(MaterialActivity.class)); - } - - @Bean - public ConcurrentKafkaListenerContainerFactory kafkaListenerMaterialActivitySecondary() { - ConcurrentKafkaListenerContainerFactory factory = new ConcurrentKafkaListenerContainerFactory<>(); - factory.setConsumerFactory(consumerFactoryMaterialActivitySecondary()); - return factory; - } - - @Bean - public ConsumerFactory consumerFactorySearchRequestsSecondary() { - Map config = new HashMap<>(); - config.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers); - config.put(ConsumerConfig.GROUP_ID_CONFIG, groupSearchRequestsSecondary); - config.put(ConsumerConfig.PARTITION_ASSIGNMENT_STRATEGY_CONFIG, CooperativeStickyAssignor.class.getName()); - config.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringSerializer.class); - config.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, JsonSerializer.class); - // config.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest"); - return new DefaultKafkaConsumerFactory<>(config, new StringDeserializer(), new JsonDeserializer<>(SearchRequest.class)); - } - - @Bean - public ConcurrentKafkaListenerContainerFactory kafkaListenerSearchRequestsSecondary() { - ConcurrentKafkaListenerContainerFactory factory = new ConcurrentKafkaListenerContainerFactory<>(); - factory.setConsumerFactory(consumerFactorySearchRequestsSecondary()); - return factory; - } } diff --git a/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/configuration/KafkaProducerConfiguration.java b/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/configuration/KafkaProducerConfiguration.java index 3799e2616..557cd2d39 100644 --- a/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/configuration/KafkaProducerConfiguration.java +++ b/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/configuration/KafkaProducerConfiguration.java @@ -2,11 +2,13 @@ import fi.csc.processor.model.request.MaterialActivity; import fi.csc.processor.model.request.SearchRequest; +import fi.csc.processor.utils.KafkaConfigUtil; import org.apache.kafka.clients.admin.AdminClientConfig; import org.apache.kafka.clients.admin.NewTopic; import org.apache.kafka.clients.producer.ProducerConfig; import org.apache.kafka.common.serialization.StringSerializer; import org.springframework.beans.factory.annotation.Value; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.kafka.config.TopicBuilder; @@ -19,7 +21,7 @@ import java.util.HashMap; import java.util.Map; -// @ConditionalOnProperty(value = "kafka.enabled", matchIfMissing = true) +@ConditionalOnProperty(value = "kafka.enabled", matchIfMissing = true) @Configuration public class KafkaProducerConfiguration { @@ -32,19 +34,26 @@ public class KafkaProducerConfiguration { @Value(value = "${kafka.topic.prod-search-requests}") private String topicSearchRequestsPrimary; - @Value(value = "${kafka.topic.material-activity}") - private String topicMaterialActivitySecondary; + @Value(value = "${kafka.sasl.enable}") + private boolean saslEnabled; - @Value(value = "${kafka.topic.search-requests}") - private String topicSearchRequestsSecondary; + @Value(value = "${trust.store.pass}") + private String trustStorePassword; + + @Value("${trust.store.location}") + private String trustStoreLocation; @Bean public ProducerFactory producerFactoryMaterialActivity() { Map config = new HashMap<>(); config.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers); config.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class); - // configProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class); config.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JsonSerializer.class); + + if (saslEnabled) { + config.putAll(KafkaConfigUtil.saslConfig(trustStorePassword, trustStoreLocation)); + } + return new DefaultKafkaProducerFactory<>(config); } @@ -58,8 +67,12 @@ public ProducerFactory producerFactorySearchRequest() { Map config = new HashMap<>(); config.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers); config.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class); - // configProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class); config.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JsonSerializer.class); + + if (saslEnabled) { + config.putAll(KafkaConfigUtil.saslConfig(trustStorePassword, trustStoreLocation)); + } + return new DefaultKafkaProducerFactory<>(config); } @@ -72,6 +85,15 @@ public KafkaTemplate kafkaTemplateSearchRequest() { public KafkaAdmin kafkaAdmin() { Map configs = new HashMap<>(); configs.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers); + + if (saslEnabled) { + configs.putAll(KafkaConfigUtil.saslConfig(trustStorePassword, trustStoreLocation)); + } + + return createKafkaAdmin(configs); + } + + private KafkaAdmin createKafkaAdmin(Map configs) { return new KafkaAdmin(configs); } @@ -91,19 +113,4 @@ public NewTopic topicSearchRequestsPrimary() { .build(); } - @Bean - public NewTopic topicMaterialActivitySecondary() { - return TopicBuilder.name(topicMaterialActivitySecondary) - .partitions(2) - .replicas(2) - .build(); - } - - @Bean - public NewTopic topicSearchRequestsSecondary() { - return TopicBuilder.name(topicSearchRequestsSecondary) - .partitions(2) - .replicas(2) - .build(); - } } diff --git a/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/configuration/MongoPrimaryConfiguration.java b/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/configuration/MongoPrimaryConfiguration.java index 30d01a418..4a208521e 100644 --- a/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/configuration/MongoPrimaryConfiguration.java +++ b/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/configuration/MongoPrimaryConfiguration.java @@ -6,8 +6,8 @@ import com.mongodb.client.MongoClient; import com.mongodb.client.MongoClients; import fi.csc.processor.converter.TimeFormatConverter; -import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.beans.factory.annotation.Value; import org.springframework.boot.autoconfigure.mongo.MongoProperties; import org.springframework.boot.context.properties.ConfigurationProperties; import org.springframework.boot.context.properties.EnableConfigurationProperties; @@ -35,6 +35,9 @@ mongoTemplateRef = "primaryMongoTemplate") public class MongoPrimaryConfiguration { + @Value("${mongodb.primary.enable.ssl}") + private boolean enableSsl; + @Primary @Bean(name = "primaryProperties") @ConfigurationProperties("mongodb.primary") @@ -44,7 +47,14 @@ public MongoProperties primaryProperties() { @Bean(name = "primaryMongoClient") public MongoClient mongoClient(@Qualifier("primaryProperties") MongoProperties mongoProperties) { - return MongoClients.create(MongoClientSettings.builder() + + MongoClientSettings.Builder builder = MongoClientSettings.builder(); + + if (enableSsl) { + builder.applyToSslSettings(b -> b.enabled(true).invalidHostNameAllowed(true)); + } + + return MongoClients.create(builder .credential(MongoCredential.createCredential( mongoProperties.getUsername(), mongoProperties.getDatabase(), diff --git a/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/configuration/MongoSecondaryConfiguration.java b/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/configuration/MongoSecondaryConfiguration.java deleted file mode 100644 index ef5c30c4c..000000000 --- a/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/configuration/MongoSecondaryConfiguration.java +++ /dev/null @@ -1,64 +0,0 @@ -package fi.csc.processor.configuration; - -import com.mongodb.MongoClientSettings; -import com.mongodb.MongoCredential; -import com.mongodb.ServerAddress; -import com.mongodb.client.MongoClient; -import com.mongodb.client.MongoClients; -import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.boot.autoconfigure.mongo.MongoProperties; -import org.springframework.boot.context.properties.ConfigurationProperties; -import org.springframework.boot.context.properties.EnableConfigurationProperties; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; -import org.springframework.data.mongodb.MongoDatabaseFactory; -import org.springframework.data.mongodb.core.MongoTemplate; -import org.springframework.data.mongodb.core.SimpleMongoClientDatabaseFactory; -import org.springframework.data.mongodb.core.convert.DefaultMongoTypeMapper; -import org.springframework.data.mongodb.core.convert.MappingMongoConverter; -import org.springframework.data.mongodb.repository.config.EnableMongoRepositories; - -import static java.util.Collections.singletonList; - -@Configuration -@EnableConfigurationProperties -@EnableMongoRepositories( - basePackages = "fi.csc.processor.repository.secondary", - mongoTemplateRef = "secondaryMongoTemplate") -public class MongoSecondaryConfiguration { - - @Bean(name = "secondaryProperties") - @ConfigurationProperties("mongodb.secondary") - public MongoProperties secondaryProperties() { - return new MongoProperties(); - } - - @Bean(name = "secondaryMongoClient") - public MongoClient mongoClient(@Qualifier("secondaryProperties") MongoProperties mongoProperties) { - return MongoClients.create(MongoClientSettings.builder() - .credential(MongoCredential.createCredential( - mongoProperties.getUsername(), - mongoProperties.getDatabase(), - mongoProperties.getPassword())) - .applyToClusterSettings(settings -> settings.hosts(singletonList(new ServerAddress( - mongoProperties.getHost(), - mongoProperties.getPort() - )))) - .build()); - } - - @Bean(name = "secondaryMongoDBFactory") - public MongoDatabaseFactory mongoDatabaseFactory( - @Qualifier("secondaryMongoClient") MongoClient mongoClient, - @Qualifier("secondaryProperties") MongoProperties mongoProperties) { - return new SimpleMongoClientDatabaseFactory(mongoClient, mongoProperties.getDatabase()); - } - - @Bean(name = "secondaryMongoTemplate") - public MongoTemplate mongoTemplate( - @Qualifier("secondaryMongoDBFactory") MongoDatabaseFactory databaseFactory, - MappingMongoConverter converter) { - converter.setTypeMapper(new DefaultMongoTypeMapper(null)); - return new MongoTemplate(databaseFactory, converter); - } -} diff --git a/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/consumer/KafkaConsumer.java b/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/consumer/KafkaConsumer.java index b9a44ba83..52b904d98 100644 --- a/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/consumer/KafkaConsumer.java +++ b/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/consumer/KafkaConsumer.java @@ -6,11 +6,10 @@ import fi.csc.processor.model.document.SearchRequestDocument; import fi.csc.processor.repository.primary.MaterialActivityPrimaryRepository; import fi.csc.processor.repository.primary.SearchRequestPrimaryRepository; -import fi.csc.processor.repository.secondary.MaterialActivitySecondaryRepository; -import fi.csc.processor.repository.secondary.SearchRequestSecondaryRepository; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.kafka.annotation.KafkaListener; import org.springframework.kafka.listener.ConsumerSeekAware; import org.springframework.kafka.support.KafkaHeaders; @@ -23,32 +22,26 @@ import java.time.format.DateTimeFormatter; @Service +@ConditionalOnProperty(value = "kafka.enabled", matchIfMissing = true) public class KafkaConsumer implements ConsumerSeekAware { private final Logger LOG = LoggerFactory.getLogger(KafkaConsumer.class.getSimpleName()); private final DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'") .withZone(ZoneId.of("UTC")); private final MaterialActivityPrimaryRepository materialActivityPrimaryRepository; private final SearchRequestPrimaryRepository searchRequestPrimaryRepository; - private final MaterialActivitySecondaryRepository materialActivitySecondaryRepository; - private final SearchRequestSecondaryRepository searchRequestSecondaryRepository; @Autowired - public KafkaConsumer( - MaterialActivityPrimaryRepository materialActivityPrimaryRepository, - SearchRequestPrimaryRepository searchRequestPrimaryRepository, - MaterialActivitySecondaryRepository materialActivitySecondaryRepository, - SearchRequestSecondaryRepository searchRequestSecondaryRepository) { + public KafkaConsumer(MaterialActivityPrimaryRepository materialActivityPrimaryRepository, + SearchRequestPrimaryRepository searchRequestPrimaryRepository) { this.materialActivityPrimaryRepository = materialActivityPrimaryRepository; this.searchRequestPrimaryRepository = searchRequestPrimaryRepository; - this.materialActivitySecondaryRepository = materialActivitySecondaryRepository; - this.searchRequestSecondaryRepository = searchRequestSecondaryRepository; } @KafkaListener( topics = "${kafka.topic.prod-material-activity}", groupId = "${kafka.group-id.prod-material-activity}", containerFactory = "kafkaListenerMaterialActivityPrimary", - autoStartup = "true", + autoStartup = "${kafka.consumer.auto.startup}", properties = {"enable.auto.commit:false", "auto.offset.reset:latest"}) public void consumeMaterialActivityPrimary( @Payload MaterialActivity materialActivity, // byte[] payload @@ -67,7 +60,7 @@ public void consumeMaterialActivityPrimary( topics = "${kafka.topic.prod-search-requests}", groupId = "${kafka.group-id.prod-search-requests}", containerFactory = "kafkaListenerSearchRequestsPrimary", - autoStartup = "true", + autoStartup = "${kafka.consumer.auto.startup}", properties = {"enable.auto.commit:false", "auto.offset.reset:latest"}) public void consumeSearchRequestsPrimary( @Payload SearchRequest searchRequest, // byte[] payload @@ -81,40 +74,4 @@ public void consumeSearchRequestsPrimary( LOG.debug(String.format("Consumed message -> %s [offset=%d]", searchRequest, offset)); } - @KafkaListener( - topics = "${kafka.topic.material-activity}", - groupId = "${kafka.group-id.material-activity}", - containerFactory = "kafkaListenerMaterialActivitySecondary", - autoStartup = "true", - properties = {"enable.auto.commit:false", "auto.offset.reset:latest"}) - public void consumeMaterialActivitySecondary( - @Payload MaterialActivity materialActivity, // byte[] payload - @Header(KafkaHeaders.OFFSET) int offset) { - MaterialActivityDocument materialActivityDocument = new MaterialActivityDocument(); - materialActivityDocument.setTimestamp(LocalDateTime.parse(materialActivity.getTimestamp(), formatter)); - materialActivityDocument.setSessionId(materialActivity.getSessionId()); - materialActivityDocument.setEduMaterialId(materialActivity.getEduMaterialId()); - materialActivityDocument.setInteraction(materialActivity.getInteraction()); - materialActivityDocument.setMetadata(materialActivity.getMetadata()); - materialActivitySecondaryRepository.save(materialActivityDocument); - LOG.debug(String.format("Consumed message -> %s [offset=%d]", materialActivity, offset)); - } - - @KafkaListener( - topics = "${kafka.topic.search-requests}", - groupId = "${kafka.group-id.search-requests}", - containerFactory = "kafkaListenerSearchRequestsSecondary", - autoStartup = "true", - properties = {"enable.auto.commit:false", "auto.offset.reset:latest"}) - public void consumeSearchRequestsSecondary( - @Payload SearchRequest searchRequest, // byte[] payload - @Header(KafkaHeaders.OFFSET) int offset) { - SearchRequestDocument searchRequestDocument = new SearchRequestDocument(); - searchRequestDocument.setTimestamp(LocalDateTime.parse(searchRequest.getTimestamp(), formatter)); - searchRequestDocument.setSessionId(searchRequest.getSessionId()); - searchRequestDocument.setKeywords(searchRequest.getKeywords()); - searchRequestDocument.setFilters(searchRequest.getFilters()); - searchRequestSecondaryRepository.save(searchRequestDocument); - LOG.debug(String.format("Consumed message -> %s [offset=%d]", searchRequest, offset)); - } } diff --git a/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/controller/KafkaController.java b/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/controller/KafkaController.java index 044a89d6e..de794e2fb 100644 --- a/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/controller/KafkaController.java +++ b/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/controller/KafkaController.java @@ -44,21 +44,4 @@ public CompletableFuture> sendMessageToKafkaTopicPrimary( }); } - @PostMapping(path = "/test/materialactivity", consumes = MediaType.APPLICATION_JSON_VALUE) - public CompletableFuture> sendMessageToKafkaTopicSecondary( - @RequestBody MaterialActivity materialActivity) { - return async(() -> { - this.kafkaProducer.sendMaterialActivitySecondary(materialActivity); - return new ResponseEntity<>(HttpStatus.ACCEPTED); - }); - } - - @PostMapping(path = "/test/searchrequests", consumes = MediaType.APPLICATION_JSON_VALUE) - public CompletableFuture> sendMessageToKafkaTopicSecondary( - @RequestBody SearchRequest searchRequest) { - return async(() -> { - this.kafkaProducer.sendSearchRequestsSecondary(searchRequest); - return new ResponseEntity<>(HttpStatus.ACCEPTED); - }); - } } diff --git a/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/controller/StatisticsController.java b/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/controller/StatisticsController.java index 23ac9525c..b1d14c3d4 100644 --- a/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/controller/StatisticsController.java +++ b/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/controller/StatisticsController.java @@ -1,7 +1,6 @@ package fi.csc.processor.controller; import fi.csc.processor.enumeration.Interval; -import fi.csc.processor.enumeration.TargetEnv; import fi.csc.processor.model.document.MaterialActivityDocument; import fi.csc.processor.model.document.SearchRequestDocument; import fi.csc.processor.model.request.EducationalLevelTotalRequest; @@ -38,75 +37,67 @@ public class StatisticsController { this.timeSeriesService = timeSeriesService; } - @PostMapping(path = "/{target}/educationallevel/all", + @PostMapping(path = "/prod/educationallevel/all", consumes = MediaType.APPLICATION_JSON_VALUE, produces = MediaType.APPLICATION_JSON_VALUE) public CompletableFuture>> getEducationalLevelDistribution( - @PathVariable(value = "target") TargetEnv targetEnv, @RequestBody EducationalLevelTotalRequest educationalLevelTotalRequest) { return async(() -> new ResponseEntity<>(this.statisticsService.getEducationalLevelDistribution( - educationalLevelTotalRequest, targetEnv), HttpStatus.OK)); + educationalLevelTotalRequest), HttpStatus.OK)); } - @PostMapping(path = "/{target}/educationallevel/expired", + @PostMapping(path = "/prod/educationallevel/expired", consumes = MediaType.APPLICATION_JSON_VALUE, produces = MediaType.APPLICATION_JSON_VALUE) public CompletableFuture>> getEducationalLevelExpired( - @PathVariable(value = "target") TargetEnv targetEnv, @RequestBody EducationalLevelTotalRequest educationalLevelTotalRequest) { if (educationalLevelTotalRequest.getExpiredBefore() != null) { return async(() -> new ResponseEntity<>(this.statisticsService.getEducationalLevelExpired( - educationalLevelTotalRequest, targetEnv), HttpStatus.OK)); + educationalLevelTotalRequest), HttpStatus.OK)); } else { return async(() -> new ResponseEntity<>(HttpStatus.BAD_REQUEST)); } } - @PostMapping(path = "/{target}/educationalsubject/all", + @PostMapping(path = "/prod/educationalsubject/all", consumes = MediaType.APPLICATION_JSON_VALUE, produces = MediaType.APPLICATION_JSON_VALUE) public CompletableFuture>> getEducationalSubjectDistribution( - @PathVariable(value = "target") TargetEnv targetEnv, @RequestBody EducationalSubjectTotalRequest educationalSubjectTotalRequest) { return async(() -> new ResponseEntity<>(this.statisticsService.getEducationalSubjectDistribution( - educationalSubjectTotalRequest, targetEnv), HttpStatus.OK)); + educationalSubjectTotalRequest), HttpStatus.OK)); } - @PostMapping(path = "/{target}/organization/all", + @PostMapping(path = "/prod/organization/all", consumes = MediaType.APPLICATION_JSON_VALUE, produces = MediaType.APPLICATION_JSON_VALUE) public CompletableFuture>> getOrganizationDistribution( - @PathVariable(value = "target") TargetEnv targetEnv, @RequestBody OrganizationTotalRequest organizationTotalRequest) { return async(() -> new ResponseEntity<>(this.statisticsService.getOrganizationDistribution( - organizationTotalRequest, targetEnv), HttpStatus.OK)); + organizationTotalRequest), HttpStatus.OK)); } - @PostMapping(path = "/{target}/materialactivity/{interval}/total", + @PostMapping(path = "/prod/materialactivity/{interval}/total", consumes = MediaType.APPLICATION_JSON_VALUE, produces = MediaType.APPLICATION_JSON_VALUE) public CompletableFuture>> getMaterialActivityTotalByInterval( - @PathVariable(value = "target") TargetEnv targetEnv, @PathVariable(value = "interval") Interval interval, @RequestBody IntervalTotalRequest intervalTotalRequest) { return async(() -> new ResponseEntity<>(this.timeSeriesService.getTotalByInterval( interval, intervalTotalRequest, - MaterialActivityDocument.class, - targetEnv), HttpStatus.OK)); + MaterialActivityDocument.class), HttpStatus.OK)); } - @PostMapping(path = "/{target}/searchrequests/{interval}/total", + @PostMapping(path = "/prod/searchrequests/{interval}/total", consumes = MediaType.APPLICATION_JSON_VALUE, produces = MediaType.APPLICATION_JSON_VALUE) public CompletableFuture>> getSearchRequestsTotalByInterval( - @PathVariable(value = "target") TargetEnv targetEnv, @PathVariable(value = "interval") Interval interval, @RequestBody IntervalTotalRequest intervalTotalRequest) { return async(() -> new ResponseEntity<>(this.timeSeriesService.getTotalByInterval( interval, intervalTotalRequest, - SearchRequestDocument.class, - targetEnv), HttpStatus.OK)); + SearchRequestDocument.class), HttpStatus.OK)); } } diff --git a/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/controller/StatusController.java b/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/controller/StatusController.java index 0f299ec03..5529163de 100644 --- a/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/controller/StatusController.java +++ b/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/controller/StatusController.java @@ -13,7 +13,7 @@ @RestController public class StatusController { - @GetMapping(path = "/status", consumes = MediaType.APPLICATION_JSON_VALUE, produces = MediaType.TEXT_PLAIN_VALUE) + @GetMapping(path = "/status", produces = MediaType.TEXT_PLAIN_VALUE) public CompletableFuture> getStatus() { return async(() -> new ResponseEntity<>("Service operable: true", HttpStatus.OK)); } diff --git a/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/converter/StringToTargetEnvConverter.java b/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/converter/StringToTargetEnvConverter.java deleted file mode 100644 index 9fa0ddc40..000000000 --- a/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/converter/StringToTargetEnvConverter.java +++ /dev/null @@ -1,14 +0,0 @@ -package fi.csc.processor.converter; - -import fi.csc.processor.annotation.RequestParameterConverter; -import fi.csc.processor.enumeration.TargetEnv; -import org.springframework.core.convert.converter.Converter; - -@RequestParameterConverter -public class StringToTargetEnvConverter implements Converter { - - @Override - public TargetEnv convert(String source) { - return TargetEnv.decode(source); - } -} diff --git a/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/enumeration/TargetEnv.java b/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/enumeration/TargetEnv.java deleted file mode 100644 index f365d9659..000000000 --- a/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/enumeration/TargetEnv.java +++ /dev/null @@ -1,30 +0,0 @@ -package fi.csc.processor.enumeration; - -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonValue; - -import java.util.stream.Stream; - -public enum TargetEnv { - PROD("prod"), - TEST("test"); - - private final String value; - - TargetEnv(String value) { - this.value = value; - } - - @JsonCreator - public static TargetEnv decode(final String value) { - return Stream.of(TargetEnv.values()) - .filter(targetEnum -> targetEnum.value.equalsIgnoreCase(value)) - .findFirst() - .orElse(null); - } - - @JsonValue - public String getValue() { - return value; - } -} diff --git a/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/model/statistics/RecordDateValue.java b/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/model/statistics/RecordDateValue.java deleted file mode 100644 index af218dca0..000000000 --- a/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/model/statistics/RecordDateValue.java +++ /dev/null @@ -1,6 +0,0 @@ -package fi.csc.processor.model.statistics; - -import java.io.Serializable; -import java.time.LocalDate; - -public record RecordDateValue(LocalDate date, Integer value) implements Serializable {} diff --git a/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/producer/KafkaProducer.java b/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/producer/KafkaProducer.java index 5712816f1..efd6c680f 100644 --- a/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/producer/KafkaProducer.java +++ b/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/producer/KafkaProducer.java @@ -24,11 +24,6 @@ public class KafkaProducer { @Value(value = "${kafka.topic.prod-search-requests}") private String topicSearchRequestsPrimary; - @Value(value = "${kafka.topic.material-activity}") - private String topicMaterialActivitySecondary; - - @Value(value = "${kafka.topic.search-requests}") - private String topicSearchRequestsSecondary; @Autowired private KafkaProducer( @@ -76,41 +71,4 @@ public void onFailure(Throwable ex) { }); } - public void sendMaterialActivitySecondary(MaterialActivity materialActivity) { - LOG.info(String.format("Producing message -> %s", materialActivity)); - - ListenableFuture> future = this.kafkaTemplateMaterialActivity.send(topicMaterialActivitySecondary, materialActivity); - - future.addCallback(new ListenableFutureCallback<>() { - - @Override - public void onSuccess(SendResult result) { - LOG.info(String.format("Sent message with offset=%s", result.getRecordMetadata().offset())); - } - - @Override - public void onFailure(Throwable ex) { - LOG.error(String.format("Unable to send message \"%s\" due to : ", ex.getMessage())); - } - }); - } - - public void sendSearchRequestsSecondary(SearchRequest searchRequest) { - LOG.info(String.format("Producing message -> %s", searchRequest)); - - ListenableFuture> future = this.kafkaTemplateSearchRequests.send(topicSearchRequestsSecondary, searchRequest); - - future.addCallback(new ListenableFutureCallback<>() { - - @Override - public void onSuccess(SendResult result) { - LOG.info(String.format("Sent message with offset=%s", result.getRecordMetadata().offset())); - } - - @Override - public void onFailure(Throwable ex) { - LOG.error(String.format("Unable to send message \"%s\" due to : ", ex.getMessage())); - } - }); - } } diff --git a/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/repository/primary/MaterialActivityPrimaryRepository.java b/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/repository/primary/MaterialActivityPrimaryRepository.java index a4189c51b..e13942915 100644 --- a/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/repository/primary/MaterialActivityPrimaryRepository.java +++ b/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/repository/primary/MaterialActivityPrimaryRepository.java @@ -7,14 +7,4 @@ @Repository public interface MaterialActivityPrimaryRepository extends MongoRepository { -// "{'$project': {'timestamp': 1, day: {$day: '$timestamp'}}}", -// "{'$group': {'_id': {day: '$day'}, dayTotal: {'$sum': 1}}}" -// @Aggregation(pipeline = { -// "{$project: {total: 1}}", -// "{$match: {timestamp: {$gte: ?0, $lt: ?1}}}", -// "{$group: {_id: {year: {$year: 'timestamp'}, month: {$month: 'timestamp'}, day: {$day: 'timestamp'}}, total: {$sum: 1}}}" -// }) -// List getDailySums(LocalDateTime since, LocalDateTime until); -// List getDailySums(LocalDateTime since, LocalDateTime until); -// List findByTimestampBetweenOrderByTimestampAsc(LocalDateTime since, LocalDateTime until); } diff --git a/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/repository/primary/SearchRequestPrimaryRepository.java b/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/repository/primary/SearchRequestPrimaryRepository.java index 2f482e56d..84b7e2673 100644 --- a/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/repository/primary/SearchRequestPrimaryRepository.java +++ b/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/repository/primary/SearchRequestPrimaryRepository.java @@ -7,7 +7,4 @@ @Repository public interface SearchRequestPrimaryRepository extends MongoRepository { - - // No custom methods - } diff --git a/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/repository/secondary/MaterialActivitySecondaryRepository.java b/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/repository/secondary/MaterialActivitySecondaryRepository.java deleted file mode 100644 index 7e27be3c5..000000000 --- a/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/repository/secondary/MaterialActivitySecondaryRepository.java +++ /dev/null @@ -1,24 +0,0 @@ -package fi.csc.processor.repository.secondary; - -import fi.csc.processor.model.document.MaterialActivityDocument; -import org.bson.types.ObjectId; -import org.springframework.data.mongodb.repository.MongoRepository; -import org.springframework.stereotype.Repository; - -@Repository -public interface MaterialActivitySecondaryRepository extends MongoRepository { - - // No custom methods - -} - -// "{'$project': {'timestamp': 1, day: {$day: '$timestamp'}}}", -// "{'$group': {'_id': {day: '$day'}, dayTotal: {'$sum': 1}}}" -// @Aggregation(pipeline = { -// "{$project: {total: 1}}", -// "{$match: {timestamp: {$gte: ?0, $lt: ?1}}}", -// "{$group: {_id: {year: {$year: 'timestamp'}, month: {$month: 'timestamp'}, day: {$day: 'timestamp'}}, total: {$sum: 1}}}" -// }) -// List getDailySums(LocalDateTime since, LocalDateTime until); -// List getDailySums(LocalDateTime since, LocalDateTime until); -// List findByTimestampBetweenOrderByTimestampAsc(LocalDateTime since, LocalDateTime until); diff --git a/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/repository/secondary/SearchRequestSecondaryRepository.java b/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/repository/secondary/SearchRequestSecondaryRepository.java deleted file mode 100644 index 20b9c551b..000000000 --- a/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/repository/secondary/SearchRequestSecondaryRepository.java +++ /dev/null @@ -1,13 +0,0 @@ -package fi.csc.processor.repository.secondary; - -import fi.csc.processor.model.document.SearchRequestDocument; -import org.bson.types.ObjectId; -import org.springframework.data.mongodb.repository.MongoRepository; -import org.springframework.stereotype.Repository; - -@Repository -public interface SearchRequestSecondaryRepository extends MongoRepository { - - // No custom methods - -} diff --git a/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/service/StatisticsService.java b/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/service/StatisticsService.java index ab9dcf244..997718326 100644 --- a/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/service/StatisticsService.java +++ b/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/service/StatisticsService.java @@ -1,8 +1,6 @@ package fi.csc.processor.service; import fi.csc.analytics.repository.primary.EducationalMaterialRepositoryPrimary; -import fi.csc.analytics.repository.secondary.EducationalMaterialRepositorySecondary; -import fi.csc.processor.enumeration.TargetEnv; import fi.csc.processor.model.request.EducationalLevelTotalRequest; import fi.csc.processor.model.request.EducationalSubjectTotalRequest; import fi.csc.processor.model.request.OrganizationTotalRequest; @@ -10,8 +8,6 @@ import fi.csc.processor.model.statistics.StatisticsMeta; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; -import org.springframework.transaction.annotation.Transactional; - import java.util.Arrays; import java.util.Collections; import java.util.List; @@ -19,19 +15,14 @@ @Service public class StatisticsService { private final EducationalMaterialRepositoryPrimary educationalMaterialRepositoryPrimary; - private final EducationalMaterialRepositorySecondary educationalMaterialRepositorySecondary; @Autowired - public StatisticsService( - EducationalMaterialRepositoryPrimary educationalMaterialRepositoryPrimary, - EducationalMaterialRepositorySecondary educationalMaterialRepositorySecondary) { + public StatisticsService(EducationalMaterialRepositoryPrimary educationalMaterialRepositoryPrimary) { this.educationalMaterialRepositoryPrimary = educationalMaterialRepositoryPrimary; - this.educationalMaterialRepositorySecondary = educationalMaterialRepositorySecondary; } public StatisticsMeta getEducationalLevelDistribution( - EducationalLevelTotalRequest educationalLevelTotalRequest, - TargetEnv targetEnv) { + EducationalLevelTotalRequest educationalLevelTotalRequest) { List values = null; if (educationalLevelTotalRequest.getSince() != null && @@ -40,12 +31,8 @@ public StatisticsMeta getEducationalLevelDistribution( educationalLevelTotalRequest.getEducationalLevels().length > 0) { values = Arrays.stream(educationalLevelTotalRequest.getEducationalLevels()) .map(e -> { - Long total = switch (targetEnv) { - case PROD -> this.educationalMaterialRepositoryPrimary.countByEducationalLevelBetweenPublishDates( - e, educationalLevelTotalRequest.getSince(), educationalLevelTotalRequest.getUntil()); - case TEST -> this.educationalMaterialRepositorySecondary.countByEducationalLevelBetweenPublishDates( + Long total = this.educationalMaterialRepositoryPrimary.countByEducationalLevelBetweenPublishDates( e, educationalLevelTotalRequest.getSince(), educationalLevelTotalRequest.getUntil()); - }; return new RecordKeyValue(e, total); }) .toList(); @@ -53,10 +40,7 @@ public StatisticsMeta getEducationalLevelDistribution( educationalLevelTotalRequest.getEducationalLevels().length > 0) { values = Arrays.stream(educationalLevelTotalRequest.getEducationalLevels()) .map(e -> { - Long total = switch (targetEnv) { - case PROD -> this.educationalMaterialRepositoryPrimary.countByEducationalLevelKey(e); - case TEST -> this.educationalMaterialRepositorySecondary.countByEducationalLevelKey(e); - }; + Long total = this.educationalMaterialRepositoryPrimary.countByEducationalLevelKey(e); return new RecordKeyValue(e, total); }) .toList(); @@ -70,16 +54,11 @@ public StatisticsMeta getEducationalLevelDistribution( } public StatisticsMeta getEducationalLevelExpired( - EducationalLevelTotalRequest educationalLevelTotalRequest, - TargetEnv targetEnv) { + EducationalLevelTotalRequest educationalLevelTotalRequest) { List values = Arrays.stream(educationalLevelTotalRequest.getEducationalLevels()) .map(e -> { - Long total = switch (targetEnv) { - case PROD -> this.educationalMaterialRepositoryPrimary.countByEducationalLevelExpiresBefore( - e, educationalLevelTotalRequest.getExpiredBefore()); - case TEST -> this.educationalMaterialRepositorySecondary.countByEducationalLevelExpiresBefore( + Long total = this.educationalMaterialRepositoryPrimary.countByEducationalLevelExpiresBefore( e, educationalLevelTotalRequest.getExpiredBefore()); - }; return new RecordKeyValue(e, total); }) .toList(); @@ -90,29 +69,21 @@ public StatisticsMeta getEducationalLevelExpired( } public StatisticsMeta getEducationalSubjectDistribution( - EducationalSubjectTotalRequest educationalSubjectTotalRequest, - TargetEnv targetEnv) { + EducationalSubjectTotalRequest educationalSubjectTotalRequest) { List values; if (educationalSubjectTotalRequest.getSince() != null && educationalSubjectTotalRequest.getUntil() != null) { values = Arrays.stream(educationalSubjectTotalRequest.getEducationalSubjects()) .map(e -> { - Long total = switch (targetEnv) { - case PROD -> this.educationalMaterialRepositoryPrimary.countByEducationalSubjectBetweenPublishDates( + Long total = this.educationalMaterialRepositoryPrimary.countByEducationalSubjectBetweenPublishDates( e, educationalSubjectTotalRequest.getSince(), educationalSubjectTotalRequest.getUntil()); - case TEST -> this.educationalMaterialRepositorySecondary.countByEducationalSubjectBetweenPublishDates( - e, educationalSubjectTotalRequest.getSince(), educationalSubjectTotalRequest.getUntil()); - }; return new RecordKeyValue(e, total); }) .toList(); } else { values = Arrays.stream(educationalSubjectTotalRequest.getEducationalSubjects()) .map(e -> { - Long total = switch (targetEnv) { - case PROD -> this.educationalMaterialRepositoryPrimary.countByEducationalSubjectKey(e); - case TEST -> this.educationalMaterialRepositorySecondary.countByEducationalSubjectKey(e); - }; + Long total = this.educationalMaterialRepositoryPrimary.countByEducationalSubjectKey(e); return new RecordKeyValue(e, total); }) .toList(); @@ -125,29 +96,21 @@ public StatisticsMeta getEducationalSubjectDistribution( } public StatisticsMeta getOrganizationDistribution( - OrganizationTotalRequest organizationTotalRequest, - TargetEnv targetEnv) { + OrganizationTotalRequest organizationTotalRequest) { List values; if (organizationTotalRequest.getSince() != null && organizationTotalRequest.getUntil() != null) { values = Arrays.stream(organizationTotalRequest.getOrganizations()) .map(e -> { - Long total = switch (targetEnv) { - case PROD -> this.educationalMaterialRepositoryPrimary.countByOrganizationBetweenPublishDates( - e, organizationTotalRequest.getSince(), organizationTotalRequest.getUntil()); - case TEST -> this.educationalMaterialRepositorySecondary.countByOrganizationBetweenPublishDates( + Long total = this.educationalMaterialRepositoryPrimary.countByOrganizationBetweenPublishDates( e, organizationTotalRequest.getSince(), organizationTotalRequest.getUntil()); - }; return new RecordKeyValue(e, total); }) .toList(); } else { values = Arrays.stream(organizationTotalRequest.getOrganizations()) .map(e -> { - Long total = switch (targetEnv) { - case PROD -> this.educationalMaterialRepositoryPrimary.countByOrganizationKey(e); - case TEST -> this.educationalMaterialRepositorySecondary.countByOrganizationKey(e); - }; + Long total = this.educationalMaterialRepositoryPrimary.countByOrganizationKey(e); return new RecordKeyValue(e, total); }) .toList(); diff --git a/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/service/TimeSeriesService.java b/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/service/TimeSeriesService.java index 4bdb3cf0b..4ebf70749 100644 --- a/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/service/TimeSeriesService.java +++ b/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/service/TimeSeriesService.java @@ -1,7 +1,6 @@ package fi.csc.processor.service; import fi.csc.processor.enumeration.Interval; -import fi.csc.processor.enumeration.TargetEnv; import fi.csc.processor.model.request.IntervalTotalRequest; import fi.csc.processor.model.statistics.IntervalTotal; import fi.csc.processor.model.statistics.StatisticsMeta; @@ -23,14 +22,10 @@ public class TimeSeriesService { private static final Logger LOG = LoggerFactory.getLogger(TimeSeriesService.class.getSimpleName()); private final MongoTemplate mongoPrimaryTemplate; - private final MongoTemplate mongoSecondaryTemplate; @Autowired - TimeSeriesService( - @Qualifier("primaryMongoTemplate") MongoTemplate mongoPrimaryTemplate, - @Qualifier("secondaryMongoTemplate") MongoTemplate mongoSecondaryTemplate) { + TimeSeriesService(@Qualifier("primaryMongoTemplate") MongoTemplate mongoPrimaryTemplate) { this.mongoPrimaryTemplate = mongoPrimaryTemplate; - this.mongoSecondaryTemplate = mongoSecondaryTemplate; } /** @@ -54,13 +49,9 @@ public class TimeSeriesService { public StatisticsMeta getTotalByInterval( Interval interval, IntervalTotalRequest intervalTotalRequest, - Class targetCollection, - TargetEnv targetEnv) { - MongoTemplate mongoTemplate = switch (targetEnv) { - case PROD -> this.mongoPrimaryTemplate; - case TEST -> this.mongoSecondaryTemplate; - }; - AggregationResults result = mongoTemplate.aggregate( + Class targetCollection + ) { + AggregationResults result = this.mongoPrimaryTemplate.aggregate( buildAggregationConfiguration(interval, intervalTotalRequest), targetCollection, IntervalTotal.class); diff --git a/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/utils/KafkaConfigUtil.java b/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/utils/KafkaConfigUtil.java new file mode 100644 index 000000000..28ca9f43b --- /dev/null +++ b/aoe-data-analytics/service-etl-processor/src/main/java/fi/csc/processor/utils/KafkaConfigUtil.java @@ -0,0 +1,24 @@ +package fi.csc.processor.utils; + +import org.apache.kafka.clients.CommonClientConfigs; +import org.apache.kafka.common.config.SaslConfigs; +import org.apache.kafka.common.config.SslConfigs; + +import java.util.Map; + +public class KafkaConfigUtil { + + private KafkaConfigUtil() { + // no instance creation allowed + } + + public static Map saslConfig(String trustStorePassword, String trustStoreLocation) { + return Map.of( + SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, trustStoreLocation, + SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG, trustStorePassword, + CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, "SASL_SSL", + SaslConfigs.SASL_MECHANISM, "AWS_MSK_IAM", + SaslConfigs.SASL_JAAS_CONFIG, "software.amazon.msk.auth.iam.IAMLoginModule required;", + SaslConfigs.SASL_CLIENT_CALLBACK_HANDLER_CLASS, "software.amazon.msk.auth.iam.IAMClientCallbackHandler"); + } +} diff --git a/aoe-data-analytics/service-etl-processor/src/main/resources/META-INF/additional-spring-configuration-metadata.json b/aoe-data-analytics/service-etl-processor/src/main/resources/META-INF/additional-spring-configuration-metadata.json index 981a83038..14497895f 100644 --- a/aoe-data-analytics/service-etl-processor/src/main/resources/META-INF/additional-spring-configuration-metadata.json +++ b/aoe-data-analytics/service-etl-processor/src/main/resources/META-INF/additional-spring-configuration-metadata.json @@ -25,31 +25,6 @@ "type": "java.lang.String", "description": "MongoDB production password." }, - { - "name": "mongodb.secondary.host", - "type": "java.lang.String", - "description": "MongoDB test host." - }, - { - "name": "mongodb.secondary.port", - "type": "java.lang.String", - "description": "MongoDB test port." - }, - { - "name": "mongodb.secondary.database", - "type": "java.lang.String", - "description": "MongoDB test database." - }, - { - "name": "mongodb.secondary.username", - "type": "java.lang.String", - "description": "MongoDB test username." - }, - { - "name": "mongodb.secondary.password", - "type": "java.lang.String", - "description": "MongoDB test password." - }, { "name": "kafka.group-id.prod-material-activity", "type": "java.lang.String", diff --git a/aoe-data-analytics/service-etl-processor/src/main/resources/application.properties b/aoe-data-analytics/service-etl-processor/src/main/resources/application.properties index f741f0613..0fd89adba 100644 --- a/aoe-data-analytics/service-etl-processor/src/main/resources/application.properties +++ b/aoe-data-analytics/service-etl-processor/src/main/resources/application.properties @@ -1,53 +1,40 @@ -## Application Properties -logging.level.fi.csc=ERROR -logging.level.org.apache.kafka=ERROR -logging.level.org.springframework=ERROR - -server.forward-headers-strategy=native -server.port=8080 -server.servlet.context-path=/api - -spring.profiles.active=dev -spring.autoconfigure.exclude=org.springframework.boot.autoconfigure.mongo.MongoAutoConfiguration - -## MongoDB Properties (prod) -mongodb.primary.host=localhost -mongodb.primary.port=27017 -mongodb.primary.database= -mongodb.primary.username= -mongodb.primary.password= - -## MongoDB Properties (test) -mongodb.secondary.host=localhost -mongodb.secondary.port=27017 -mongodb.secondary.database= -mongodb.secondary.username= -mongodb.secondary.password= - -## Kafka Cluster Properties -spring.kafka.consumer.bootstrap-servers=localhost:19092,localhost:19092,localhost:19092 -spring.kafka.consumer.auto-offset-reset=latest -spring.kafka.consumer.enable-auto-commit=true -# auto-commit-interval: 5000 -spring.kafka.producer.batch-size=10 -spring.kafka.producer.client-id=aoe-kafka-client -spring.kafka.producer.bootstrap-servers=localhost:19092,localhost:19092,localhost:19092 - -## Custom properties -# kafka.enabled=true - -## Kafka Group IDs (prod) -kafka.group-id.prod-material-activity=group-prod-material-activity -kafka.group-id.prod-search-requests=group-prod-search-requests - -## Kafka Topics (prod) -kafka.topic.prod-material-activity=prod_material_activity -kafka.topic.prod-search-requests=prod_search_requests - -## Kafka Group IDs (test) -kafka.group-id.material-activity=group-material-activity -kafka.group-id.search-requests=group-search-requests - -## Kafka Topics (test) -kafka.topic.material-activity=material_activity -kafka.topic.search-requests=search_requests +## Application Properties +logging.level.fi.csc=ERROR +logging.level.org.apache.kafka=ERROR +logging.level.org.springframework=ERROR + +server.forward-headers-strategy=native +server.port=8080 +server.servlet.context-path=/api + +spring.profiles.active=dev +spring.autoconfigure.exclude=org.springframework.boot.autoconfigure.mongo.MongoAutoConfiguration + +## MongoDB Properties +mongodb.primary.enable.ssl=false +mongodb.primary.host=localhost +mongodb.primary.port=27017 +mongodb.primary.database= +mongodb.primary.username= +mongodb.primary.password= + +## Kafka Cluster Properties +kafka.enabled=true +kafka.sasl.enable=false +spring.kafka.consumer.bootstrap-servers=localhost:19092,localhost:19092,localhost:19092 +spring.kafka.consumer.auto-offset-reset=latest +spring.kafka.consumer.enable-auto-commit=true +kafka.consumer.auto.startup=true + +# auto-commit-interval: 5000 +spring.kafka.producer.batch-size=10 +spring.kafka.producer.client-id=aoe-kafka-client +spring.kafka.producer.bootstrap-servers=localhost:19092,localhost:19092,localhost:19092 + +## Kafka Group IDs (prod) +kafka.group-id.prod-material-activity=group-prod-material-activity +kafka.group-id.prod-search-requests=group-prod-search-requests + +## Kafka Topics (prod) +kafka.topic.prod-material-activity=prod_material_activity +kafka.topic.prod-search-requests=prod_search_requests diff --git a/aoe-data-analytics/service-etl-processor/src/test/java/fi/csc/processor/controller/StatisticsControllerTest.java b/aoe-data-analytics/service-etl-processor/src/test/java/fi/csc/processor/controller/StatisticsControllerTest.java index d75ea0d4f..b50ab2495 100644 --- a/aoe-data-analytics/service-etl-processor/src/test/java/fi/csc/processor/controller/StatisticsControllerTest.java +++ b/aoe-data-analytics/service-etl-processor/src/test/java/fi/csc/processor/controller/StatisticsControllerTest.java @@ -1,7 +1,6 @@ package fi.csc.processor.controller; import com.fasterxml.jackson.databind.ObjectMapper; -import fi.csc.processor.enumeration.TargetEnv; import fi.csc.processor.model.request.EducationalLevelTotalRequest; import fi.csc.processor.model.statistics.StatisticsMeta; import fi.csc.processor.service.StatisticsService; @@ -20,7 +19,6 @@ import org.springframework.test.web.servlet.setup.MockMvcBuilders; import org.springframework.web.context.WebApplicationContext; -import static org.junit.jupiter.api.Assertions.*; import static org.mockito.ArgumentMatchers.any; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.*; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.*; @@ -54,8 +52,7 @@ public void setUp() { .webAppContextSetup(webApplicationContext) .build(); Mockito.when(statisticsService.getEducationalLevelDistribution( - any(EducationalLevelTotalRequest.class), - any(TargetEnv.class))).thenReturn(new StatisticsMeta<>()); + any(EducationalLevelTotalRequest.class))).thenReturn(new StatisticsMeta<>()); } @Test diff --git a/aoe-data-analytics/service-etl-processor/src/test/java/fi/csc/processor/service/TimeSeriesServiceTest.java b/aoe-data-analytics/service-etl-processor/src/test/java/fi/csc/processor/service/TimeSeriesServiceTest.java deleted file mode 100644 index 36115d562..000000000 --- a/aoe-data-analytics/service-etl-processor/src/test/java/fi/csc/processor/service/TimeSeriesServiceTest.java +++ /dev/null @@ -1,12 +0,0 @@ -package fi.csc.processor.service; - -import org.junit.jupiter.api.Test; - -import static org.junit.jupiter.api.Assertions.*; - -class TimeSeriesServiceTest { - -// @Test - void getTotalByInterval() { - } -} \ No newline at end of file diff --git a/aoe-data-analytics/service-etl-processor/src/test/resources/META-INF/additional-spring-configuration-metadata.json b/aoe-data-analytics/service-etl-processor/src/test/resources/META-INF/additional-spring-configuration-metadata.json index 981a83038..14497895f 100644 --- a/aoe-data-analytics/service-etl-processor/src/test/resources/META-INF/additional-spring-configuration-metadata.json +++ b/aoe-data-analytics/service-etl-processor/src/test/resources/META-INF/additional-spring-configuration-metadata.json @@ -25,31 +25,6 @@ "type": "java.lang.String", "description": "MongoDB production password." }, - { - "name": "mongodb.secondary.host", - "type": "java.lang.String", - "description": "MongoDB test host." - }, - { - "name": "mongodb.secondary.port", - "type": "java.lang.String", - "description": "MongoDB test port." - }, - { - "name": "mongodb.secondary.database", - "type": "java.lang.String", - "description": "MongoDB test database." - }, - { - "name": "mongodb.secondary.username", - "type": "java.lang.String", - "description": "MongoDB test username." - }, - { - "name": "mongodb.secondary.password", - "type": "java.lang.String", - "description": "MongoDB test password." - }, { "name": "kafka.group-id.prod-material-activity", "type": "java.lang.String", diff --git a/aoe-data-services/oaipmh-provider/src/main/resources/application-test.properties b/aoe-data-services/.env.template similarity index 50% rename from aoe-data-services/oaipmh-provider/src/main/resources/application-test.properties rename to aoe-data-services/.env.template index 8b7b1bf93..d8871f54e 100644 --- a/aoe-data-services/oaipmh-provider/src/main/resources/application-test.properties +++ b/aoe-data-services/.env.template @@ -1,10 +1,9 @@ -server.port=8002 - -# IDENTIFIER FOR THE DEMO INSTANCE USAGE -aoe.oai-identifier.repository-identifier=demo.aoe.fi - -# AOE REQUEST PARAMETERS -aoe.request.per-page=20 - -aoe.request.url=https://aoe.fi/api/v1/oaipmh/metadata - +server.port=8002 + +aoe.oai-identifier.repository-identifier=demo.aoe.fi + +# AOE REQUEST PARAMETERS +aoe.request.per-page=20 +aoe.identify.base-url=https://demo.aoe.fi/meta/oaipmh +aoe.request.url=http://aoe-web-backend:3000/api/v1/oaipmh/metadata + diff --git a/aoe-data-services/.gitignore b/aoe-data-services/.gitignore index 429f1067b..82190eebe 100644 --- a/aoe-data-services/.gitignore +++ b/aoe-data-services/.gitignore @@ -4,6 +4,9 @@ target/ !**/src/main/** !**/src/test/** +# env file +.env + ### STS ### .apt_generated .classpath diff --git a/aoe-data-services/.gitlab-ci.yml b/aoe-data-services/.gitlab-ci.yml deleted file mode 100644 index 7efb7a594..000000000 --- a/aoe-data-services/.gitlab-ci.yml +++ /dev/null @@ -1,61 +0,0 @@ -variables: - DOCKER_HOST: unix:///var/run/docker.sock - DOCKER_DRIVER: overlay2 - -image: docker:24.0.6 - -stages: - - build - - deploy - - cleanup - -build_test: - stage: build - script: - - echo "TEST build" - - docker compose -f docker-compose.test.yml build - only: - - test - tags: - - prod-oaipmh - -deploy_test: - stage: deploy - script: - - echo "TEST deploy" - - docker compose -f docker-compose.test.yml up -d - only: - - test - tags: - - prod-oaipmh - -build_prod: - stage: build - script: - - echo "PROD build" - - docker compose -f docker-compose.prod.yml build - only: - - main - tags: - - prod-oaipmh - -deploy_prod: - stage: deploy - script: - - echo "PROD deploy" - - docker compose -f docker-compose.prod.yml up -d - only: - - main - tags: - - prod-oaipmh - -cleanup: - stage: cleanup - script: - - echo "Cleaning up the system" - - docker system prune -a -f --volumes - only: - - test - - main - tags: - - prod-oaipmh diff --git a/aoe-data-services/README.md b/aoe-data-services/README.md index 6791d4c99..f6532c125 100644 --- a/aoe-data-services/README.md +++ b/aoe-data-services/README.md @@ -1,13 +1,3 @@ -# [AOE - Library of Open Educational Resources](https://github.com/CSCfi/aoe) - -## Service Component links in GitHub (mirrored) -- [aoe-data-analytics](https://github.com/CSCfi/aoe-data-analytics) -- aoe-data-services -- [aoe-semantic-apis](https://github.com/CSCfi/aoe-semantic-apis) -- [aoe-streaming-app](https://github.com/CSCfi/aoe-streaming-app) -- [aoe-web-backend](https://github.com/CSCfi/aoe-web-backend) -- [aoe-web-frontend](https://github.com/CSCfi/aoe-web-frontend) - # AOE Data Services ## OAI-PMH Provider @@ -21,16 +11,3 @@ ### Description Integration service for metadata harvesting from external systems. Service interface implements [OAI-PMH protocol](https://www.openarchives.org/OAI/2.0/openarchivesprotocol.htm). - -### Management - -#### Build and run the test instance -``` -$ sudo docker-compose -f docker-compose.test.yml build -$ sudo docker-compose -f docker-compose.test.yml up -``` -#### Build and run the production instance -``` -$ sudo docker-compose -f docker-compose.prod.yml build -$ sudo docker-compose -f docker-compose.prod.yml up -``` diff --git a/aoe-data-services/deploy-scripts/01-build.sh b/aoe-data-services/deploy-scripts/01-build.sh new file mode 100755 index 000000000..4736d8d8c --- /dev/null +++ b/aoe-data-services/deploy-scripts/01-build.sh @@ -0,0 +1,23 @@ +#!/usr/bin/env bash +set -o errexit -o nounset -o pipefail + +# shellcheck source=../scripts/common-functions.sh +source "$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )/../../scripts/common-functions.sh" + +# shellcheck source=./deploy-functions.sh +source "$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )/../../deploy-scripts/deploy-functions.sh" +source "$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )/../../scripts/build-functions.sh" + + +function main { + local aoe_service_name="aoe-data-services" + local service_image_tag="AOE_DATA_SERVICES_TAG" + + cd "$repo" + + buildService "$aoe_service_name" "$service_image_tag" +} + +main + + diff --git a/aoe-data-services/deploy-scripts/02-push-image.sh b/aoe-data-services/deploy-scripts/02-push-image.sh new file mode 100755 index 000000000..c781d32e4 --- /dev/null +++ b/aoe-data-services/deploy-scripts/02-push-image.sh @@ -0,0 +1,30 @@ +#!/usr/bin/env bash +set -o errexit -o nounset -o pipefail + +# shellcheck source=../scripts/common-functions.sh +source "$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )/../../scripts/common-functions.sh" + +# shellcheck source=./deploy-functions.sh +source "$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )/../../deploy-scripts/deploy-functions.sh" + +function main() { + setup + + local aoe_service_name="aoe-data-services" + local github_image_tag="$github_registry${aoe_service_name}:${IMAGE_TAG}" + + local ecr_registry="${REGISTRY}/$aoe_service_name" + local ecr_image_tag="${ecr_registry}:${IMAGE_TAG}" + upload_image_to_ecr "$github_image_tag" "$ecr_image_tag" +} + +function setup() { + cd "${repo}" + require_command docker + require_docker_compose + configure_aws_credentials + get_ecr_login_credentials +} + + +main "$@" diff --git a/aoe-data-services/docker-compose.prod.yml b/aoe-data-services/docker-compose.prod.yml deleted file mode 100644 index 70afe6afc..000000000 --- a/aoe-data-services/docker-compose.prod.yml +++ /dev/null @@ -1,24 +0,0 @@ -services: - oaipmh-provider: - build: - context: . - dockerfile: ./oaipmh-provider/Dockerfile - image: prod-oaipmh-provider:latest - container_name: prod-oaipmh-provider - restart: unless-stopped - ports: - - '8001:8001' - env_file: - - /environment/aoe-data-services/prod/.env - logging: - options: - max-size: '10m' - max-file: '3' - environment: - SPRING_PROFILES_ACTIVE: 'prod' - networks: - - network-prod-oaipmh-provider - -networks: - network-prod-oaipmh-provider: - driver: bridge diff --git a/aoe-data-services/docker-compose.test.yml b/aoe-data-services/docker-compose.test.yml deleted file mode 100644 index 460ae5a9a..000000000 --- a/aoe-data-services/docker-compose.test.yml +++ /dev/null @@ -1,20 +0,0 @@ -services: - test-oaipmh-provider: - build: - context: . - dockerfile: ./oaipmh-provider/Dockerfile - image: test-oaipmh-provider:latest - container_name: test-oaipmh-provider - restart: unless-stopped - ports: - - '8002:8002' - env_file: - - /environment/aoe-data-services/test/.env - environment: - SPRING_PROFILES_ACTIVE: 'test' - networks: - - network-test-oaipmh-provider - -networks: - network-test-oaipmh-provider: - driver: bridge diff --git a/aoe-data-services/oaipmh-provider/src/main/java/fi/csc/provider/adapter/OaiPmhDateFormatter.java b/aoe-data-services/oaipmh-provider/src/main/java/fi/csc/provider/adapter/OaiPmhDateFormatter.java index d2c309410..028889f44 100644 --- a/aoe-data-services/oaipmh-provider/src/main/java/fi/csc/provider/adapter/OaiPmhDateFormatter.java +++ b/aoe-data-services/oaipmh-provider/src/main/java/fi/csc/provider/adapter/OaiPmhDateFormatter.java @@ -6,12 +6,13 @@ public class OaiPmhDateFormatter { + private OaiPmhDateFormatter(){ + // no instance creation allowed + } + private static final DateTimeFormatter OAI_DATETIME = DateTimeFormatter .ofPattern("yyyy-MM-dd'T'HH:mm:ss'Z'") .withZone(ZoneId.of("UTC")); - /*private static final DateTimeFormatter ISO_DATETIME = DateTimeFormatter - .ofPattern("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'") - .withZone(ZoneId.of("UTC"));*/ public static LocalDateTime convertToIso(String value) { return LocalDateTime.parse(value, OAI_DATETIME); diff --git a/aoe-data-services/oaipmh-provider/src/main/java/fi/csc/provider/configuration/RestConfiguration.java b/aoe-data-services/oaipmh-provider/src/main/java/fi/csc/provider/configuration/RestConfiguration.java index 4e703e87b..ae83ddea8 100644 --- a/aoe-data-services/oaipmh-provider/src/main/java/fi/csc/provider/configuration/RestConfiguration.java +++ b/aoe-data-services/oaipmh-provider/src/main/java/fi/csc/provider/configuration/RestConfiguration.java @@ -36,16 +36,11 @@ public ObjectMapper objectMapper(Jackson2ObjectMapperBuilder builder) { objectMapper.enable(SerializationFeature.WRITE_ENUMS_USING_TO_STRING); objectMapper.enable(DeserializationFeature.READ_ENUMS_USING_TO_STRING); objectMapper.enable(DeserializationFeature.ACCEPT_EMPTY_ARRAY_AS_NULL_OBJECT); - // ObjectMapper objectMapper = Jackson2ObjectMapperBuilder.xml().build(); - // objectMapper.enable(JsonParser.Feature.ALLOW_BACKSLASH_ESCAPING_ANY_CHARACTER); - // objectMapper.enable(JsonGenerator.Feature.ESCAPE_NON_ASCII); - // objectMapper.configure(MapperFeature.ACCEPT_CASE_INSENSITIVE_PROPERTIES, true); - // objectMapper.registerModule(new JsonldModule()); return objectMapper; } @Bean - @Profile({"prod", "test"}) + @Profile({"prod"}) public RestTemplate restTemplate() throws NoSuchAlgorithmException, KeyStoreException, KeyManagementException { final SSLContext sslcontext = SSLContexts.custom() .loadTrustMaterial(null, new TrustAllStrategy()) diff --git a/aoe-data-services/oaipmh-provider/src/main/java/fi/csc/provider/service/impl/MigrationServiceImpl.java b/aoe-data-services/oaipmh-provider/src/main/java/fi/csc/provider/service/impl/MigrationServiceImpl.java index fc2f8069b..7756173e5 100644 --- a/aoe-data-services/oaipmh-provider/src/main/java/fi/csc/provider/service/impl/MigrationServiceImpl.java +++ b/aoe-data-services/oaipmh-provider/src/main/java/fi/csc/provider/service/impl/MigrationServiceImpl.java @@ -62,7 +62,6 @@ public LrmiMetadata migrateAoeToLrmi(AoeMetadata amd) { private void setDublinCoreData(AoeMetadata amd, LrmiMetadata lrmi) { // ID set temporarily to be moved to the header block after the metadata migration. - // lrmi.setIdentifier("oai:aoe.fi:" + amd.getId()); lrmi.setIdentifier("oai:" + env.getProperty("aoe.oai-identifier.repository-identifier") + ":" + amd.getId()); // dc:identifier @@ -77,7 +76,7 @@ private void setDublinCoreData(AoeMetadata amd, LrmiMetadata lrmi) { .collect(Collectors.toList()) : null); // dc:date - lrmi.setDate(amd.getCreatedat()); // updated ??? + lrmi.setDate(amd.getCreatedat()); // dc:description // Descriptions of the educational material. @@ -170,7 +169,6 @@ private void setLrmiData(AoeMetadata amd, LrmiMetadata lrmi) { // lrmi_fi:material // Educational material file or link. lrmi.setMaterial(amd.getMaterials() == null ? null : amd.getMaterials().stream() - // .filter(m -> !m.getOriginalfilename().isEmpty() && !m.getFilepath().isEmpty() && !m.getMimetype().isEmpty()) .map(m -> { Material material = new Material(); @@ -299,7 +297,6 @@ private void setLrmiData(AoeMetadata amd, LrmiMetadata lrmi) { // AlignmentObjects // Alignment types NOT found in learningResourceTypes (list) are converted into alignment objects. lrmi.setAlignmentObject(amd.getAlignmentobject() == null ? null : amd.getAlignmentobject().stream() - //.filter(a -> !learningResourceTypes.contains(a.getAlignmenttype())) .filter(a -> learningResourceTypes.stream().noneMatch(a.getAlignmenttype()::equalsIgnoreCase)) .map(a -> { AlignmentObject alignmentObject = new AlignmentObject(); diff --git a/aoe-data-services/oaipmh-provider/src/main/java/fi/csc/provider/validation/XmlValidatorApplication.java b/aoe-data-services/oaipmh-provider/src/main/java/fi/csc/provider/validation/XmlValidatorApplication.java index 658df4646..86d0763ae 100644 --- a/aoe-data-services/oaipmh-provider/src/main/java/fi/csc/provider/validation/XmlValidatorApplication.java +++ b/aoe-data-services/oaipmh-provider/src/main/java/fi/csc/provider/validation/XmlValidatorApplication.java @@ -14,8 +14,8 @@ public class XmlValidatorApplication { public static void main(String[] args) { - String xmlFile = "C:\\Users\\mroppone\\_project\\aoe-metadata-provider\\oaipmh-provider\\src\\main\\resources\\xml-oai\\oai_dc.xml"; - String xsdFile = "C:\\Users\\mroppone\\_project\\aoe-metadata-provider\\oaipmh-provider\\src\\main\\resources\\xml-oai\\oai_dc.xsd"; + String xmlFile = "\\oai_dc.xml"; + String xsdFile = "\\oai_dc.xsd"; validateXmlFile(xmlFile, xsdFile); } @@ -30,7 +30,6 @@ private static void validateXmlFile(String xmlFile, String xsdFile) { Schema schema = schemaFactory.newSchema(xsdSource); // Multiple XSD files - // Schema schema = schemaFactory.newSchema(new Source[] {new StreamSource(new File(xsdFile1)), new StreamSource(new File(xsdFile2))}); Validator validator = schema.newValidator(); validator.validate(xmlSource); System.out.println(xmlSource.getSystemId() + " is valid"); diff --git a/aoe-data-services/oaipmh-provider/src/test/java/fi/csc/oaipmh/ProviderRestApplicationTests.java b/aoe-data-services/oaipmh-provider/src/test/java/fi/csc/oaipmh/ProviderRestApplicationTests.java deleted file mode 100644 index f3f8f66c0..000000000 --- a/aoe-data-services/oaipmh-provider/src/test/java/fi/csc/oaipmh/ProviderRestApplicationTests.java +++ /dev/null @@ -1,11 +0,0 @@ -package fi.csc.oaipmh; - -import org.junit.jupiter.api.Test; -import org.springframework.boot.test.context.SpringBootTest; - -@SpringBootTest -public class ProviderRestApplicationTests { - - // @Test - public void contextLoads() {} -} diff --git a/aoe-data-services/oaipmh-provider/src/test/resources/META-INF/additional-spring-configuration-metadata.json b/aoe-data-services/oaipmh-provider/src/test/resources/META-INF/additional-spring-configuration-metadata.json deleted file mode 100644 index 55fc2040d..000000000 --- a/aoe-data-services/oaipmh-provider/src/test/resources/META-INF/additional-spring-configuration-metadata.json +++ /dev/null @@ -1,74 +0,0 @@ -{ - "properties": [ - { - "name": "aoe.identify.repository-name", - "type": "java.lang.String", - "description": "Organization and metadata service name." - }, - { - "name": "aoe.identify.base-url", - "type": "java.lang.String", - "description": "Base URL for harvesting requests." - }, - { - "name": "aoe.identify.protocol-version", - "type": "java.lang.String", - "description": "OAI-PMH protocol version." - }, - { - "name": "aoe.identify.admin-email", - "type": "java.lang.String", - "description": "Administrative contact email." - }, - { - "name": "aoe.identify.earliest-datestamp", - "type": "java.lang.String", - "description": "Earliest record timestamp." - }, - { - "name": "aoe.identify.deleted-record", - "type": "java.lang.String", - "description": "Deleted record visibility policy." - }, - { - "name": "aoe.identify.granularity", - "type": "java.lang.String", - "description": "Timestamp accuracy." - }, - { - "name": "aoe.identify.compression", - "type": "java.lang.String", - "description": "Compression method." - }, - { - "name": "aoe.oai-identifier.scheme", - "type": "java.lang.String", - "description": "Document scheme standard." - }, - { - "name": "aoe.oai-identifier.repository-identifier", - "type": "java.lang.String", - "description": "Service root domain." - }, - { - "name": "aoe.oai-identifier.delimeter", - "type": "java.lang.String", - "description": "Delimeter character used." - }, - { - "name": "aoe.oai-identifier.sample-identifier", - "type": "java.lang.String", - "description": "Sample metadata identity." - }, - { - "name": "aoe.request.per-page", - "type": "java.lang.Integer", - "description": "AOE metadata per page." - }, - { - "name": "aoe.request.url", - "type": "java.lang.String", - "description": "AOE metadata URL." - } - ] -} \ No newline at end of file diff --git a/aoe-data-services/oaipmh-provider/src/test/resources/test.properties b/aoe-data-services/oaipmh-provider/src/test/resources/test.properties deleted file mode 100644 index feb8b13f2..000000000 --- a/aoe-data-services/oaipmh-provider/src/test/resources/test.properties +++ /dev/null @@ -1,3 +0,0 @@ -# AOE REQUEST PARAMETERS -aoe.request.per-page=100 -aoe.request.url=https://demo.aoe.fi/api/oajpmh/materialMetaData \ No newline at end of file diff --git a/aoe-infra/.eslintrc.cjs b/aoe-infra/.eslintrc.cjs new file mode 100644 index 000000000..a55de2d0b --- /dev/null +++ b/aoe-infra/.eslintrc.cjs @@ -0,0 +1,21 @@ +module.exports = { + extends: ['plugin:prettier/recommended'], + plugins: ['prettier'], + parser: "@typescript-eslint/parser", + parserOptions: { + ecmaFeatures: { + jsx: true + }, ecmaVersion: 2018, sourceType: 'module' + }, + rules: { + curly: 'error', + 'no-magic-numbers': 'off', + eqeqeq: 'error', + 'no-undef-init': 'error', + 'no-unneeded-ternary': 'error', + 'no-var': 'error', + 'prefer-promise-reject-errors': 'error', + 'prefer-template': 'error', + '@typescript-eslint/explicit-module-boundary-types': 'off' + } +} diff --git a/aoe-infra/infra/.gitignore b/aoe-infra/.gitignore similarity index 100% rename from aoe-infra/infra/.gitignore rename to aoe-infra/.gitignore diff --git a/aoe-infra/infra/.npmignore b/aoe-infra/.npmignore similarity index 100% rename from aoe-infra/infra/.npmignore rename to aoe-infra/.npmignore diff --git a/aoe-infra/.package-lock.json.checksum b/aoe-infra/.package-lock.json.checksum new file mode 100644 index 000000000..12a5280fd --- /dev/null +++ b/aoe-infra/.package-lock.json.checksum @@ -0,0 +1 @@ +12c108619db0bd3cfc03044ed21348470faca37b package-lock.json diff --git a/aoe-infra/.prettierrc b/aoe-infra/.prettierrc new file mode 100644 index 000000000..cc8396b96 --- /dev/null +++ b/aoe-infra/.prettierrc @@ -0,0 +1,19 @@ +{ + "arrowParens": "always", + "bracketSameLine": true, + "bracketSpacing": true, + "embeddedLanguageFormatting": "auto", + "endOfLine": "lf", + "htmlWhitespaceSensitivity": "css", + "jsxSingleQuote": false, + "printWidth": 120, + "proseWrap": "preserve", + "quoteProps": "as-needed", + "requirePragma": false, + "insertPragma": false, + "semi": false, + "singleQuote": true, + "tabWidth": 2, + "trailingComma": "none", + "useTabs": false +} diff --git a/aoe-infra/01-build.sh b/aoe-infra/01-build.sh new file mode 100755 index 000000000..97abfd884 --- /dev/null +++ b/aoe-infra/01-build.sh @@ -0,0 +1,18 @@ +#!/bin/bash +set -o errexit -o nounset -o pipefail + +source "$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )/../scripts/common-functions.sh" + +main() { + cd "$repo/aoe-infra" + + start_gh_actions_group "Build aoe-infra CDK" + + use_correct_node_version + npm_ci_if_package_lock_has_changed + npm run build + + end_gh_actions_group +} + +main diff --git a/aoe-infra/README.md b/aoe-infra/README.md index 7e94c00a1..684bfce8d 100644 --- a/aoe-infra/README.md +++ b/aoe-infra/README.md @@ -1,2 +1,102 @@ -# aoe-infra -AWS infrastructure for AOE project +# AOE AWS infrastructure + +Infrastructure for aoe.fi - project + +## Getting started + +If you wish to run the CDK - commands from your local machine, install the global depencencies: `nodejs 20, npm, npx` and install the project dependencies with `npm install` in the `/infra` - directory. + +## AWS vault + +When deploying to the target environment from your local machine, use `aws-vault exec ` and then proceed with cdk - commands. AWS vault targets the destination account explicitly. + +Alternatively, you can use aws cli v2: + +`aws sso login --sso-session oph-org-sso` where `oph-org-sso` profile must match the profile configured in your `~/.aws/config` + +With aws sso login spell above, you must define `--profile ` + +Example: `npx cdk deploy -c environment=dev DataAnalyticsAuroraStack --profile aoe-dev` + + +## cdk command examples for deploying the project stacks + +* `npx cdk deploy -c environment= --all` deploy all stacks to the target environment +* `npx cdk destroy -c environment= --all` destroy all stacks to the target environment (note: you need to empty S3 - buckets etc. manually) +* `npx npx cdk deploy -c environment=dev WebBackendAuroraStack` deploy only WebBackendAuroraStack (and any change in it's dependencies) +* `npx npx cdk destroy -c environment=dev WebBackendAuroraStack` destroy only WebBackendAuroraStack (and any change in it's dependencies) + +## Generic cdk commands +* `npx cdk diff` compare deployed stack with current state +* `npx npm run build` compile typescript to js +* `npx npm run watch` watch for changes and compile +* `npx npm run test` perform the jest unit tests +* `npx cdk synth` emits the synthesized CloudFormation template + +## Environment variables + +Environment variables have been split into two places; + +* `environments/.json` contains environment specific non-sensitive configuration +* AWS Parameter Store contains variables with sensitive information. Parameters in the parameter store are expected to be prefixed with `///` + +## Subnetting + +Project uses a /16 network which has been split into /18 per VPC (=per environment), which in turn is designed to be split into 16x /22 networks with 1022 IP - addresses available per subnet. + +## Adding a new service + +First, add a new Security Group and Security Group rules to the `security-groups.ts`, add the service/environment specific configuration into `environments/.json` then create a new stack instance of `ecs-service.ts` in the `/bin/infra.ts` + +## Adding a new database + +Then, +- add a new Security Group and Security Group rules to the `security-groups.ts`, +- add a new secret in the `secrets-manager-stack.ts` +- add the service/environment specific database configuration into `environments/.json` +- create a new stack instance of `aurora-serverless-database.ts` in the `/bin/infra.ts` + +Aurora stack creation only creates database master user with a password stored in the AWS Secrets Manager (`/auroradbs//master-user-password`). Application user must be created (and granted) separately. + +### Database dump for transfer + +Following options are recommended for dumping the database: + + pg_dump -Fc --clean -U aoe_db_admin -d aoe > transfer.dump + +### Database restore in AWS +j +Secrets are stored in AWS Secrets Manager. Database restore to empty RDS-environment is done in following way: + +Connect to database `postgres` from bastion: + + psql -U aoe_db_admin -W -h .amazonaws.com postgres + +Create database and users: + + CREATE DATABASE aoe ENCODING 'utf-8'; + CREATE ROLE reporter WITH PASSWORD ''; + CREATE ROLE aoe_admin WITH PASSWORD ''; + +From bastion, run restore: + + pg_restore -U aoe_db_admin -W -h .rds.amazonaws.com --no-owner --role=aoe_db_admin -d aoe < transfer.dump + +Connect to database `aoe` from bastion: + + psql -U aoe_db_admin -W -h .amazonaws.com aoe + +Grant access: + + ALTER ROLE reporter WITH LOGIN; + ALTER ROLE aoe_admin WITH LOGIN; + GRANT ALL PRIVILEGES ON DATABASE aoe TO aoe_admin; + GRANT ALL PRIVILEGES ON SCHEMA public TO aoe_admin; + GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA public TO aoe_admin; + GRANT USAGE, SELECT, UPDATE ON ALL SEQUENCES IN SCHEMA public TO aoe_admin; + GRANT CONNECT ON DATABASE aoe TO reporter; + GRANT SELECT ON ALL TABLES IN SCHEMA public TO reporter; + GRANT USAGE, SELECT ON ALL SEQUENCES IN SCHEMA public TO reporter; + +Exit `psql`. + diff --git a/aoe-infra/bin/infra.ts b/aoe-infra/bin/infra.ts new file mode 100644 index 000000000..7d4671687 --- /dev/null +++ b/aoe-infra/bin/infra.ts @@ -0,0 +1,604 @@ +#!/usr/bin/env node +import 'source-map-support/register' +import * as cdk from 'aws-cdk-lib' +import * as utility from '../environments/utility.json' +import * as dev from '../environments/dev.json' +import * as qa from '../environments/qa.json' +import * as prod from '../environments/prod.json' +import { VpcStack } from '../lib/vpc-stack' +import { SecurityGroupStack } from '../lib/security-groups' +import { AuroraCommonStack } from '../lib/aurora-serverless-common' +import { AuroraDatabaseStack } from '../lib/aurora-serverless-database' +import { AlbStack } from '../lib/alb-stack' +import { CloudFrontCertificateStack } from '../lib/cloudfront-certificate-stack' +import { CloudfrontStack } from '../lib/cloudfront-stack' +import { KmsStack } from '../lib/kms-stack' +import { FargateClusterStack } from '../lib/fargate-cluster-stack' +import { EcsServiceStack } from '../lib/ecs-service' +import { FrontendBucketStack } from '../lib/front-end-bucket-stack' +import { FrontendStaticContentDeploymentStack } from '../lib/front-end-content-deployment-stack' +import { EcrStack } from '../lib/ecr-stack' +import { ElasticacheServerlessStack } from '../lib/redis-stack' +import { CpuArchitecture } from 'aws-cdk-lib/aws-ecs' +import { BastionStack } from '../lib/bastion-stack' +import { SecretManagerStack } from '../lib/secrets-manager-stack' +import { OpenSearchServerlessStack } from '../lib/opensearch-stack' +import { HostedZoneStack } from '../lib/hosted-zone-stack' +import { S3Stack } from '../lib/S3Stack' +import { PolicyStatement } from 'aws-cdk-lib/aws-iam' +import * as iam from 'aws-cdk-lib/aws-iam' +import { NamespaceStack } from '../lib/namespaceStack' +import { EfsStack } from '../lib/efs-stack' +import { DocumentdbStack } from '../lib/documentdb-stack' +import { MskStack } from '../lib/msk-stack' +import { GithubActionsStack } from '../lib/githubActionsStack' +import { UtilityStack } from '../lib/utility-stack' + +const app = new cdk.App() + +// Load up configuration for the environment +const environmentName: string = app.node.tryGetContext('environment') +const utilityAccountId: string = app.node.tryGetContext('UTILITY_ACCOUNT_ID') + +// Allow any in this case, since we don't want to explicitely type json data +/* eslint-disable @typescript-eslint/no-explicit-any */ +let environmentConfig: any + +if (environmentName === 'utility') { + environmentConfig = utility +} else if (environmentName === 'dev') { + environmentConfig = dev +} else if (environmentName === 'qa') { + environmentConfig = qa +} else if (environmentName === 'prod') { + environmentConfig = prod +} else { + console.error( + 'You must define a valid environment name in CDK context! Valid environment names are dev, qa, prod and utility' + ) + process.exit(1) +} + +// dev, qa & prod account resources.. +if (environmentName === 'dev' || environmentName === 'qa' || environmentName === 'prod') { + + const domain = environmentName === 'prod' ? `aws.${environmentConfig.aws.domain}` : environmentConfig.aws.domain + + new GithubActionsStack(app, 'GithubActionsStack', { + env: { region: 'eu-west-1' }, + environment: environmentName + }) + + // Remember to update KMS key removal policy + const Kms = new KmsStack(app, 'KmsStack', { + env: { region: 'eu-west-1' }, + stackName: `${environmentName}-kms`, + environment: environmentName + }) + + const Secrets = new SecretManagerStack(app, 'SecretManagerStack', { + env: { region: 'eu-west-1' }, + stackName: `${environmentName}-secrets`, + kmsKey: Kms.secretsManagerKey + }) + + const Network = new VpcStack(app, 'VpcStack', { + env: { region: 'eu-west-1' }, + stackName: `${environmentName}-vpc`, + vpc_cidr: environmentConfig.aws.vpc_cidr, + availability_zones: environmentConfig.aws.availability_zones + }) + + const HostedZones = new HostedZoneStack(app, 'HostedZoneStack', { + env: { region: 'eu-west-1' }, + stackName: `${environmentName}-hosted-zone`, + domain: environmentConfig.aws.domain, + vpc: Network.vpc + }) + + const SecurityGroups = new SecurityGroupStack(app, 'SecurityGroupStack', { + env: { region: 'eu-west-1' }, + stackName: `${environmentName}-security-groups`, + vpc: Network.vpc + }) + + new BastionStack(app, 'BastionStack', { + env: { region: 'eu-west-1' }, + stackName: `${environmentName}-bastion`, + vpc: Network.vpc, + securityGroup: SecurityGroups.bastionSecurityGroup, + kmsKey: Kms.ebsKmsKey, + environment: environmentName + }) + + const AuroraCommons = new AuroraCommonStack(app, 'AuroraCommonStack', { + env: { region: 'eu-west-1' }, + stackName: `${environmentName}-aurora-common`, + vpc: Network.vpc + }) + + const WebBackendAurora = new AuroraDatabaseStack(app, 'WebBackendAuroraStack', { + env: { region: 'eu-west-1' }, + stackName: `${environmentName}-web-backend-aurora`, + auroraVersion: environmentConfig.aurora_databases.web_backend.version, + environment: environmentName, + clusterName: 'web-backend', + vpc: Network.vpc, + securityGroup: SecurityGroups.webBackendAuroraSecurityGroup, + performanceInsights: environmentConfig.aurora_databases.web_backend.performance_insights, + minSizeAcu: environmentConfig.aurora_databases.web_backend.min_acu, + maxSizeAcu: environmentConfig.aurora_databases.web_backend.max_acu, + kmsKey: Kms.rdsKmsKey, + auroraDbPassword: Secrets.webBackendAuroraPassword, + subnetGroup: AuroraCommons.auroraSubnetGroup + }) + + const OpenSearch = new OpenSearchServerlessStack(app, 'AOEOpenSearch', { + env: { region: 'eu-west-1' }, + stackName: `${environmentName}-open-search`, + collectionName: environmentConfig.open_search.collectionName, + description: environmentConfig.open_search.collectionDescription, + securityGroupIds: [SecurityGroups.openSearchSecurityGroup.securityGroupId], + vpc: Network.vpc, + kmsKey: Kms.openSearchKmsKey, + standbyReplicas: environmentConfig.open_search.standbyReplicas + }) + + const SemanticApisRedis = new ElasticacheServerlessStack(app, 'SemanticApisRedis', { + env: { region: 'eu-west-1' }, + stackName: `${environmentName}-semantic-apis-redis`, + elasticacheName: 'semantic-apis', + consumingServiceName: 'semantic-apis', + secret: Secrets.semanticApisPassword, + vpc: Network.vpc, + securityGroupId: SecurityGroups.semanticApisRedisSecurityGroup.securityGroupId, + redisKmsKeyId: Kms.redisKmsKey.keyId, + secretsManagerKmsKeyId: Kms.secretsManagerKey, + redisMajorVersion: environmentConfig.redis_serverless.semantic_apis.redis_major_version, + storageMin: environmentConfig.redis_serverless.semantic_apis.storage_min, + storageMax: environmentConfig.redis_serverless.semantic_apis.storage_max, + minEcpuPerSecond: environmentConfig.redis_serverless.semantic_apis.min_ecpu_per_second, + maxEcpuPerSecond: environmentConfig.redis_serverless.semantic_apis.max_ecpu_per_second + }) + + const Alb = new AlbStack(app, 'AlbStack', { + env: { region: 'eu-west-1' }, + crossRegionReferences: true, + stackName: `${environmentName}-alb`, + vpc: Network.vpc, + securityGroupId: SecurityGroups.albSecurityGroup.securityGroupId, + domain: domain, + publicHostedZone: HostedZones.publicHostedZone + }) + + const CloudfrontCertificate = new CloudFrontCertificateStack(app, 'CloudFrontCertificateStack', { + env: { region: 'us-east-1' }, + stackName: `${environmentName}-cloudfront-certificate`, + domain: domain, + hostedZone: HostedZones.publicHostedZone, + crossRegionReferences: true + }) + + const Cloudfront = new CloudfrontStack(app, 'CloudFrontStack', { + env: { region: 'eu-west-1' }, + stackName: `${environmentName}-cloudfront`, + alb: Alb.alb, + domain: domain, + publicHostedZone: HostedZones.publicHostedZone, + certificate: CloudfrontCertificate.certificate, + crossRegionReferences: true + }) + + const FrontEndBucket = new FrontendBucketStack(app, 'FrontEndBucketStack', { + env: { region: 'eu-west-1' }, + stackName: `${environmentName}-frontend-bucket`, + environment: environmentName, + cloudFrontDistribution: Cloudfront.distribution + }) + + const s3BucketStack = new S3Stack(app, 'S3BucketStack', { + env: { region: 'eu-west-1' }, + environment: environmentName, + aoeBucketName: environmentConfig.S3.aoeBucketName, + aoePdfBucketName: environmentConfig.S3.aoePdfBucketName, + aoeThumbnailBucketName: environmentConfig.S3.aoeThumbnailBucketName + }) + + const namespace = new NamespaceStack(app, 'NameSpaceStack', Network.vpc, { + env: { region: 'eu-west-1' }, + environment: environmentName + }) + + new FrontendStaticContentDeploymentStack(app, 'FrontEndContentDeploymentStack', { + env: { region: 'eu-west-1' }, + crossRegionReferences: true, + stackName: `${environmentName}-frontend-deployment`, + environment: environmentName, + bucket: FrontEndBucket.bucket, + cloudFrontDistribution: Cloudfront.distribution + }) + + const FargateCluster = new FargateClusterStack(app, 'FargateClusterStack', { + env: { region: 'eu-west-1' }, + stackName: `${environmentName}-fargate-cluster`, + environment: environmentName, + vpc: Network.vpc, + logGroupKmsKey: Kms.cloudwatchLogsKmsKey + }) + + const buckets = s3BucketStack.allBuckets() + const s3PolicyStatement = new PolicyStatement({ + actions: ['s3:ListBucket', 's3:PutObject', 's3:GetObject', 's3:DeleteObject'], + resources: buckets.flatMap((bucket) => [bucket.bucketArn, `${bucket.bucketArn}/*`]) + }) + + const efs = new EfsStack(app, 'AOEefsStack', { + env: { region: 'eu-west-1' }, + vpc: Network.vpc, + securityGroup: SecurityGroups.efsSecurityGroup, + accessPointPath: '/data', + throughputMode: environmentConfig.EFS.throughputMode + }) + + const docDb = new DocumentdbStack(app, 'AOEDocumentDB', { + instances: environmentConfig.document_db.instances, + instanceType: environmentConfig.document_db.instanceType, + env: { region: 'eu-west-1' }, + vpc: Network.vpc, + securityGroup: SecurityGroups.documentDbSecurityGroup, + engineVersion: environmentConfig.document_db.engineVersion, + user: Secrets.documentDbPassword, + kmsKey: Kms.documentDbKmsKey + }) + + const mskKafka = new MskStack(app, 'AOEMskKafka', { + env: { region: 'eu-west-1' }, + clusterName: environmentConfig.msk.clusterName, + instanceType: environmentConfig.msk.instanceType, + kmsKey: Kms.mskKmsKey, + numberOfBrokerNodes: environmentConfig.msk.numberOfBrokerNodes, + securityGroup: SecurityGroups.mskSecurityGroup, + version: environmentConfig.msk.version, + volumeSize: environmentConfig.msk.volumeSize, + vpc: Network.vpc + }) + + const kafkaClusterIamPolicy = new iam.PolicyStatement({ + actions: [ + 'kafka-cluster:Connect', + 'kafka-cluster:DescribeCluster', + 'kafka-cluster:GetBootstrapBrokers', + 'kafka-cluster:ListTopics', + 'kafka-cluster:AlterCluster' + ], + resources: [mskKafka.kafkaCluster.attrArn] + }) + + const kafkaTopicIamPolicy = new iam.PolicyStatement({ + actions: ['kafka-cluster:*Topic*', 'kafka-cluster:WriteData', 'kafka-cluster:ReadData'], + resources: [ + `arn:aws:kafka:${cdk.Aws.REGION}:${cdk.Aws.ACCOUNT_ID}:topic/${mskKafka.kafkaCluster.clusterName}/${cdk.Fn.select(2, cdk.Fn.split('/', mskKafka.kafkaCluster.attrArn))}/prod_material_activity`, + `arn:aws:kafka:${cdk.Aws.REGION}:${cdk.Aws.ACCOUNT_ID}:topic/${mskKafka.kafkaCluster.clusterName}/${cdk.Fn.select(2, cdk.Fn.split('/', mskKafka.kafkaCluster.attrArn))}/prod_search_requests` + ] + }) + + const kafkaGroupIamPolicy = new iam.PolicyStatement({ + actions: ['kafka-cluster:AlterGroup', 'kafka-cluster:DescribeGroup'], + resources: [ + `arn:aws:kafka:${cdk.Aws.REGION}:${cdk.Aws.ACCOUNT_ID}:group/${mskKafka.kafkaCluster.clusterName}/${cdk.Fn.select(2, cdk.Fn.split('/', mskKafka.kafkaCluster.attrArn))}/group-prod-material-activity`, + `arn:aws:kafka:${cdk.Aws.REGION}:${cdk.Aws.ACCOUNT_ID}:group/${mskKafka.kafkaCluster.clusterName}/${cdk.Fn.select(2, cdk.Fn.split('/', mskKafka.kafkaCluster.attrArn))}/group-prod-search-requests` + ] + }) + + new EcsServiceStack(app, 'DataAnalyticsEcsService', { + env: { region: 'eu-west-1' }, + stackName: `${environmentName}-data-analytics-service`, + serviceName: 'data-analytics', + environment: environmentName, + cluster: FargateCluster.fargateCluster, + vpc: Network.vpc, + securityGroup: SecurityGroups.dataAnalyticsServiceSecurityGroup, + imageTag: environmentConfig.services.data_analytics.image_tag, + allowEcsExec: environmentConfig.services.data_analytics.allow_ecs_exec, + taskCpu: environmentConfig.services.data_analytics.cpu_limit, + taskMemory: environmentConfig.services.data_analytics.memory_limit, + minimumCount: environmentConfig.services.data_analytics.min_count, + maximumCount: environmentConfig.services.data_analytics.max_count, + cpuArchitecture: CpuArchitecture.X86_64, + env_vars: { + ...environmentConfig.services.data_analytics.env_vars, + ...{ + MONGODB_PRIMARY_HOST: docDb.clusterEndpoint.hostname, + MONGODB_PRIMARY_PORT: docDb.clusterEndpoint.port, + SPRING_DATASOURCE_PRIMARY_URL: `jdbc:postgresql://${WebBackendAurora.endPoint.hostname}:${WebBackendAurora.endPoint.port}/aoe`, + SPRING_KAFKA_CONSUMER_BOOTSTRAPSERVERS: mskKafka.bootstrapBrokers, + SPRING_KAFKA_PRODUCER_BOOTSTRAPSERVERS: mskKafka.bootstrapBrokers + } + }, + parameter_store_secrets: [], + secrets_manager_secrets: [ + Secrets.secrets.ANALYTICS_PG_PASS, + Secrets.secrets.ANALYTICS_DOCDB_PASSWORD, + Secrets.secrets.ANALYTICS_TRUST_STORE_PASSWORD + ], + utilityAccountId: utilityAccountId, + alb: Alb.alb, + listener: Alb.albListener, + listenerPathPatterns: ['/analytics/api/*'], + healthCheckPath: '/analytics/api/status', + healthCheckGracePeriod: 180, + healthCheckInterval: 5, + healthCheckTimeout: 2, + albPriority: 140, + privateDnsNamespace: namespace.privateDnsNamespace, + iAmPolicyStatements: [kafkaClusterIamPolicy, kafkaTopicIamPolicy, kafkaGroupIamPolicy] + }) + + new EcsServiceStack(app, 'StreamingEcsService', { + env: { region: 'eu-west-1' }, + stackName: `${environmentName}-streaming-app-service`, + serviceName: 'streaming-app', + environment: environmentName, + cluster: FargateCluster.fargateCluster, + vpc: Network.vpc, + securityGroup: SecurityGroups.streamingServiceSecurityGroup, + imageTag: environmentConfig.services.streaming.image_tag, + allowEcsExec: environmentConfig.services.streaming.allow_ecs_exec, + taskCpu: environmentConfig.services.streaming.cpu_limit, + taskMemory: environmentConfig.services.streaming.memory_limit, + minimumCount: environmentConfig.services.streaming.min_count, + maximumCount: environmentConfig.services.streaming.max_count, + cpuArchitecture: CpuArchitecture.X86_64, + env_vars: environmentConfig.services.streaming.env_vars, + parameter_store_secrets: [], + secrets_manager_secrets: [], + utilityAccountId: utilityAccountId, + alb: Alb.alb, + listener: Alb.albListener, + listenerPathPatterns: ['/stream/api/v1*'], + healthCheckPath: '/', + healthCheckGracePeriod: 180, + healthCheckInterval: 5, + healthCheckTimeout: 2, + albPriority: 110, + iAmPolicyStatements: [s3PolicyStatement], + privateDnsNamespace: namespace.privateDnsNamespace + }) + + new EcsServiceStack(app, 'DataServicesEcsService', { + env: { region: 'eu-west-1' }, + stackName: `${environmentName}-data-services`, + serviceName: 'data-services', + environment: environmentName, + cluster: FargateCluster.fargateCluster, + vpc: Network.vpc, + securityGroup: SecurityGroups.dataServicesSecurityGroup, + imageTag: environmentConfig.services.data_services.image_tag, + allowEcsExec: environmentConfig.services.data_services.allow_ecs_exec, + taskCpu: environmentConfig.services.data_services.cpu_limit, + taskMemory: environmentConfig.services.data_services.memory_limit, + minimumCount: environmentConfig.services.data_services.min_count, + maximumCount: environmentConfig.services.data_services.max_count, + cpuArchitecture: CpuArchitecture.X86_64, + env_vars: environmentConfig.services.data_services.env_vars, + parameter_store_secrets: [], + secrets_manager_secrets: [], + utilityAccountId: utilityAccountId, + alb: Alb.alb, + listener: Alb.albListener, + listenerPathPatterns: ['/rest/oaipmh*'], + healthCheckPath: '/rest/health', + healthCheckGracePeriod: 180, + healthCheckInterval: 5, + healthCheckTimeout: 2, + albPriority: 130, + privateDnsNamespace: namespace.privateDnsNamespace + }) + + const aossPolicyStatement = new iam.PolicyStatement({ + actions: [ + 'aoss:CreateIndex', + 'aoss:DeleteIndex', + 'aoss:UpdateIndex', + 'aoss:DescribeIndex', + 'aoss:ReadDocument', + 'aoss:WriteDocument', + 'aoss:DescribeCollectionItems', + 'aoss:UpdateCollectionItems', + 'aoss:DeleteCollectionItems', + 'aoss:CreateCollectionItems', + 'aoss:APIAccessAll' + ], + resources: [OpenSearch.collectionArn] + }) + + const efsPolicyStatement = new iam.PolicyStatement({ + actions: [ + 'elasticfilesystem:DescribeFileSystems', + 'elasticfilesystem:ClientWrite', + 'elasticfilesystem:ClientMount', + 'elasticfilesystem:DescribeMountTargets' + ], + resources: [efs.fileSystem.fileSystemArn] + }) + + new EcsServiceStack(app, 'WebBackendEcsService', { + env: { region: 'eu-west-1' }, + stackName: `${environmentName}-web-backend-service`, + serviceName: 'web-backend', + environment: environmentName, + cluster: FargateCluster.fargateCluster, + vpc: Network.vpc, + securityGroup: SecurityGroups.webBackendsServiceSecurityGroup, + imageTag: environmentConfig.services.web_backend.image_tag, + allowEcsExec: environmentConfig.services.web_backend.allow_ecs_exec, + taskCpu: environmentConfig.services.web_backend.cpu_limit, + taskMemory: environmentConfig.services.web_backend.memory_limit, + minimumCount: environmentConfig.services.web_backend.min_count, + maximumCount: environmentConfig.services.web_backend.max_count, + cpuArchitecture: CpuArchitecture.X86_64, + env_vars: { + ...environmentConfig.services.web_backend.env_vars, + ...{ + REDIS_HOST: SemanticApisRedis.endpointAddress, + REDIS_PORT: SemanticApisRedis.endpointPort, + ES_NODE: OpenSearch.collectionEndpoint, + POSTGRESQL_HOST: WebBackendAurora.endPoint.hostname, + POSTGRESQL_PORT: WebBackendAurora.endPoint.port, + KAFKA_BROKER_SERVERS: mskKafka.bootstrapBrokers + } + }, + parameter_store_secrets: [], + secrets_manager_secrets: [ + Secrets.secrets.REDIS_PASS, + Secrets.secrets.PG_PASS, + Secrets.secrets.SESSION_SECRET, + Secrets.secrets.CLIENT_SECRET, + Secrets.secrets.JWT_SECRET, + Secrets.secrets.PROXY_URI, + Secrets.secrets.CLIENT_ID + ], + utilityAccountId: utilityAccountId, + alb: Alb.alb, + listener: Alb.albListener, + listenerPathPatterns: ['/api/*', '/h5p/*', '/embed/*', '/content/*'], + healthCheckPath: '/', + healthCheckGracePeriod: 180, + healthCheckInterval: 5, + healthCheckTimeout: 2, + albPriority: 120, + iAmPolicyStatements: [ + aossPolicyStatement, + s3PolicyStatement, + efsPolicyStatement, + kafkaClusterIamPolicy, + kafkaTopicIamPolicy + ], + privateDnsNamespace: namespace.privateDnsNamespace, + efs: { + volume: { + name: 'data', + efsVolumeConfiguration: { + fileSystemId: efs.fileSystemId, + transitEncryption: 'ENABLED', + authorizationConfig: { + accessPointId: efs.accessPoint.accessPointId, + iam: 'ENABLED' + } + } + }, + mountPoint: { + sourceVolume: 'data', + containerPath: '/mnt/data', + readOnly: false + } + } + }) + + new EcsServiceStack(app, 'WebFrontendEcsService', { + env: { region: 'eu-west-1' }, + stackName: `${environmentName}-web-frontend-service`, + serviceName: 'web-frontend', + environment: environmentName, + cluster: FargateCluster.fargateCluster, + vpc: Network.vpc, + securityGroup: SecurityGroups.webFrontendServiceSecurityGroup, + imageTag: environmentConfig.services.web_frontend.image_tag, + allowEcsExec: environmentConfig.services.web_frontend.allow_ecs_exec, + taskCpu: environmentConfig.services.web_frontend.cpu_limit, + taskMemory: environmentConfig.services.web_frontend.memory_limit, + minimumCount: environmentConfig.services.web_frontend.min_count, + maximumCount: environmentConfig.services.web_frontend.max_count, + cpuArchitecture: CpuArchitecture.X86_64, + env_vars: {}, + parameter_store_secrets: [], + secrets_manager_secrets: [], + utilityAccountId: utilityAccountId, + alb: Alb.alb, + listener: Alb.albListener, + listenerPathPatterns: ['/*'], + healthCheckPath: '/health', + healthCheckGracePeriod: 180, + healthCheckInterval: 5, + healthCheckTimeout: 2, + albPriority: 49000, + iAmPolicyStatements: [], + privateDnsNamespace: namespace.privateDnsNamespace + }) + + new EcsServiceStack(app, 'SemanticApisEcsService', { + env: { region: 'eu-west-1' }, + stackName: `${environmentName}-semantic-apis-service`, + serviceName: 'semantic-apis', + environment: environmentName, + cluster: FargateCluster.fargateCluster, + vpc: Network.vpc, + securityGroup: SecurityGroups.semanticApisServiceSecurityGroup, + imageTag: environmentConfig.services.semantic_apis.image_tag, + allowEcsExec: environmentConfig.services.semantic_apis.allow_ecs_exec, + taskCpu: environmentConfig.services.semantic_apis.cpu_limit, + taskMemory: environmentConfig.services.semantic_apis.memory_limit, + minimumCount: environmentConfig.services.semantic_apis.min_count, + maximumCount: environmentConfig.services.semantic_apis.max_count, + cpuArchitecture: CpuArchitecture.X86_64, + env_vars: { + ...environmentConfig.services.semantic_apis.env_vars, + ...{ REDIS_HOST: SemanticApisRedis.endpointAddress }, + REDIS_PORT: SemanticApisRedis.endpointPort + }, + parameter_store_secrets: [], + secrets_manager_secrets: [Secrets.secrets.REDIS_PASS], + utilityAccountId: utilityAccountId, + alb: Alb.alb, + listener: Alb.albListener, + listenerPathPatterns: ['/ref/api/v1*'], + healthCheckPath: '/health', + healthCheckGracePeriod: 180, + healthCheckInterval: 5, + healthCheckTimeout: 2, + albPriority: 100, + privateDnsNamespace: namespace.privateDnsNamespace + }) +} else if (environmentName === 'utility') { + const Utility = new UtilityStack(app, 'UtilityStack', { + env: { region: 'eu-west-1' }, + stackName: `${environmentName}-utility` + }) + + new EcrStack(app, 'FrontendEcrStack', { + env: { region: 'eu-west-1' }, + stackName: 'aoe-web-frontend-ecr', + serviceName: 'aoe-web-frontend', + githubActionsDeploymentRole: Utility.githubActionsDeploymentRole + }) + new EcrStack(app, 'BackendEcrStack', { + env: { region: 'eu-west-1' }, + stackName: 'aoe-web-backend-ecr', + serviceName: 'aoe-web-backend', + githubActionsDeploymentRole: Utility.githubActionsDeploymentRole + }) + new EcrStack(app, 'SemanticApisEcrStack', { + env: { region: 'eu-west-1' }, + stackName: 'aoe-semantic-apis-ecr', + serviceName: 'aoe-semantic-apis', + githubActionsDeploymentRole: Utility.githubActionsDeploymentRole + }) + new EcrStack(app, 'StreamingAppEcrStack', { + env: { region: 'eu-west-1' }, + stackName: 'aoe-streaming-app-ecr', + serviceName: 'aoe-streaming-app', + githubActionsDeploymentRole: Utility.githubActionsDeploymentRole + }) + new EcrStack(app, 'DataServicesEcrStack', { + env: { region: 'eu-west-1' }, + stackName: 'aoe-data-services-ecr', + serviceName: 'aoe-data-services', + githubActionsDeploymentRole: Utility.githubActionsDeploymentRole + }) + new EcrStack(app, 'DataAnalyticsEcrStack', { + env: { region: 'eu-west-1' }, + stackName: 'aoe-data-analytics-ecr', + serviceName: 'aoe-data-analytics', + githubActionsDeploymentRole: Utility.githubActionsDeploymentRole + }) +} diff --git a/aoe-infra/infra/cdk.json b/aoe-infra/cdk.json similarity index 100% rename from aoe-infra/infra/cdk.json rename to aoe-infra/cdk.json diff --git a/aoe-infra/cdk.sh b/aoe-infra/cdk.sh new file mode 100755 index 000000000..5199248d8 --- /dev/null +++ b/aoe-infra/cdk.sh @@ -0,0 +1,11 @@ +#!/bin/bash + +source "$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )/../scripts/common-functions.sh" + +main() { + use_correct_node_version + npm_ci_if_package_lock_has_changed + npx cdk --context environment="$ENV" --context UTILITY_ACCOUNT_ID="$UTILITY_ACCOUNT_ID" "$@" +} + +main "$@" diff --git a/aoe-infra/environments/dev.json b/aoe-infra/environments/dev.json new file mode 100644 index 000000000..95e75e013 --- /dev/null +++ b/aoe-infra/environments/dev.json @@ -0,0 +1,260 @@ +{ + "aws": { + "vpc_cidr": "10.5.0.0/18", + "availability_zones": 2, + "domain": "dev.aoe.fi" + }, + "services": { + "data_analytics": { + "cpu_limit": "512", + "memory_limit": "1024", + "min_count": 1, + "max_count": 1, + "image_tag": "ga-205", + "allow_ecs_exec": true, + "env_vars": { + "LOGGING_LEVEL_FI_CSC": "ERROR", + "LOGGING_LEVEL_ORG_APACHE_KAFKA": "ERROR", + "LOGGING_LEVEL_ORG_SPRINGFRAMEWORK": "ERROR", + "SERVER_PORT": "8080", + "SERVER_SERVLET_CONTEXTPATH": "/analytics/api", + "SPRING_PROFILES_ACTIVE": "prod", + "SPRING_AUTOCONFIGURATION_EXCLUDE": "org.springframework.boot.autoconfigure.mongo.MongoAutoConfiguration", + + "SPRING_DATASOURCE_PRIMARY_USERNAME": "reporter", + "SPRING_DATASOURCE_PRIMARY_INITIALIZATIONMODE": "never", + "SPRING_DATASOURCE_PRIMARY_DRIVERCLASSNAME": "org.postgresql.Driver", + + "MONGODB_PRIMARY_ENABLE_SSL": "true", + "MONGODB_PRIMARY_DATABASE": "analytics", + "MONGODB_PRIMARY_USERNAME": "aoeOwner", + + "KAFKA_ENABLED": "true", + "KAFKA_SASL_ENABLE": "true", + "TRUST_STORE_LOCATION": "/certs/rds-truststore.jks", + + "SPRING_KAFKA_CONSUMER_AUTO_STARTUP": "true", + "SPRING_KAFKA_CONSUMER_AUTOOFFSETRESET": "latest", + "SPRING_KAFKA_CONSUMER_ENABLEAUTOCOMMIT": "true", + "SPRING_KAFKA_PRODUCER_BATCH_SIZE": "10", + "SPRING_KAFKA_PRODUCER_CLIENTID": "aoe-kafka-client", + + "KAFKA_GROUPID_PRODMATERIALACTIVITY": "group-prod-material-activity", + "KAFKA_GROUPID_PRODSEARCHREQUESTS": "group-prod-search-requests", + + "KAFKA_TOPIC_PRODMATERIALACTIVITY": "prod_material_activity", + "KAFKA_TOPIC_PRODSEARCHREQUESTS": "prod_search_requests" + } + }, + "data_services": { + "cpu_limit": "512", + "memory_limit": "1024", + "min_count": 1, + "max_count": 1, + "image_tag": "ga-205", + "allow_ecs_exec": true, + "env_vars": { + "SPRING_PROFILES_ACTIVE": "prod", + "SERVER_PORT": "8080", + "AOE_OAIIDENTIFIER_REPOSITORYIDENTIFIER": "dev.aoe.fi", + "AOE_REQUEST_PERPAGE": "20", + "AOE_REQUEST_URL": "http://web-backend.dev.aoe.local:8080/api/v1/oaipmh/metadata" + } + }, + "web_frontend": { + "cpu_limit": "512", + "memory_limit": "1024", + "min_count": 1, + "max_count": 1, + "image_tag": "ga-205-dev", + "allow_ecs_exec": true + }, + "web_backend": { + "cpu_limit": "512", + "memory_limit": "1024", + "min_count": 1, + "max_count": 1, + "image_tag": "ga-281", + "allow_ecs_exec": true, + "env_vars": { + "NODE_ENV": "production", + "LOG_LEVEL": "error", + "PORT_LISTEN": "8080", + "MATERIAL_VERSION_URL": "https://dev.aoe.fi/#/materiaali/", + "HTTP_OPTIONS_TIMEOUT": "5000", + "HTTP_OPTIONS_RETRY": "2", + "HTTP_OPTIONS_CLOCK_TOLERANCE":"5", + + "POSTGRESQL_DATA": "aoe", + "PG_USER": "aoe_admin", + "SERVER_CONFIG_OAIPMH_ANALYTICS_URL": "http://data-analytics.dev.aoe.local:8080", + "KAFKA_EXCLUDED_AGENT_IDENTIFIERS": "oersi", + "KAFKA_BROKER_TOPIC_MATERIAL_ACTIVITY": "prod_material_activity", + "KAFKA_BROKER_TOPIC_SEARCH_REQUESTS": "prod_search_requests", + "KAFKA_CLIENT_ID": "aoe-web-backend", + "KAFKA_CLIENT_REGION": "eu-west-1", + + "SESSION_COOKIE_DOMAIN":".aoe.fi", + "SESSION_COOKIE_HTTP_ONLY": "true", + "SESSION_COOKIE_MAX_AGE": "86400000", + "SESSION_COOKIE_PATH": "/", + "SESSION_COOKIE_SAME_SITE": "lax", + "SESSION_COOKIE_SECURE": "true", + + "SESSION_OPTION_PROXY": "true", + "SESSION_OPTION_RESAVE": "false", + "SESSION_OPTION_ROLLING": "false", + "SESSION_OPTION_SAVE_UNINITIALIZED": "false", + + "CLOUD_STORAGE_REGION": "eu-west-1", + "CLOUD_STORAGE_BUCKET": "aoe-dev", + "CLOUD_STORAGE_BUCKET_PDF": "aoepdf-dev", + "CLOUD_STORAGE_BUCKET_THUMBNAIL": "aoethumbnail-dev", + + "THUMBNAIL_END_POINT": "/app/thumbnail/", + "FILE_SIZE_LIMIT": "10737418240", + "THUMBNAIL_FILE_SIZE_LIMIT": "10485760", + + "REDIRECT_URI": "https://dev.aoe.fi/api/secure/redirect", + "SUCCESS_REDIRECT_URI": "/", + "FAILURE_REDIRECT_URI": "/api/login", + + "CREATE_ES_INDEX": "1", + "ES_INDEX": "aoe", + "ES_MAPPING_FILE": "/app/aoemapping.json", + "ES_COLLECTION_INDEX": "aoecollection", + "ES_COLLECTION_MAPPING_FILE": "/app/aoecollectionmapping.json", + "ES_SIZE_DEFAULT": "1000", + "ES_FROM_DEFAULT": "0", + + "HTML_BASE_URL": "https://dev.aoe.fi", + "HTML_FOLDER": "/mnt/data/webdata/htmlfolder", + + "H5P_JSON_CONFIGURATION": "dist/services/config/h5p.json", + "H5P_PATH_LIBRARIES": "/mnt/data/webdata/h5p/libraries", + "H5P_PATH_TEMPORARY_STORAGE": "/mnt/data/webdata/h5p/temporary-storage", + "H5P_PATH_CONTENT": "/mnt/data/webdata/h5p/content", + "H5P_PATH_CORE": "/app/h5p", + "H5P_PATH_EDITOR": "/mnt/data/webdata/h5p/editor", + "H5P_PLAY_API": "https://dev.aoe.fi/h5p/play/", + "H5P_USER_EMAIL": "oppimateriaalivaranto@aoe.fi", + + "MATERIAL_FILE_UPLOAD_FOLDER": "/mnt/data/uploads", + + "CONVERSION_TO_PDF_API": "https://dev.aoe.fi/api/v1/pdf/content/", + "CONVERSION_TO_PDF_ENABLED": "1", + + "FILE_DOWNLOAD_URL": "https://dev.aoe.fi/api/v1/download/file/", + "THUMBNAIL_DOWNLOAD_URL": "https://dev.aoe.fi/api/v1/thumbnail/", + "COLLECTION_THUMBNAIL_DOWNLOAD_URL": "https://dev.aoe.fi/api/v1/collection/thumbnail/", + + "REDIS_USERNAME": "app", + "REDIS_USE_TLS": "true", + "BASE_URL": "https://dev.aoe.fi/api/v1/", + "EMAIL_FROM": "oppimateriaalivaranto@aoe.fi", + "TRANSPORT_AUTH_USER": "oppimateriaalivaranto@aoe.fi", + "TRANSPORT_AUTH_HOST": "XXXX", + "TRANSPORT_PORT": "25", + "SEND_EMAIL": "0", + "VERIFY_EMAIL_REDIRECT_URL": "/", + + "CLOUD_STORAGE_ENABLED": "1", + "KAFKA_ENABLED": "1", + "LOGIN_ENABLED": "1", + + "PID_SERVICE_RUN_SCHEDULED": "1", + "PID_SERVICE_ENABLED": "1", + + "STREAM_ENABLED": "1", + "STREAM_FILESIZE_MIN": "100000", + "STREAM_REDIRECT_URI": "https://dev.aoe.fi/stream/api/v1/material/", + "STREAM_STATUS_HOST": "streaming-app.dev.aoe.local", + "STREAM_STATUS_PORT": "8080", + "STREAM_STATUS_PATH": "/stream/api/v1/material/", + "STREAM_STATUS_HOST_HTTPS_ENABLED": "0" + } + }, + "streaming": { + "cpu_limit": "512", + "memory_limit": "1024", + "min_count": 1, + "max_count": 1, + "image_tag": "ga-205", + "allow_ecs_exec": true, + "env_vars": { + "LOG_LEVEL": "error", + "PORT": "8080", + "NODE_ENV": "production", + "STORAGE_BUCKET": "aoe-dev", + "STORAGE_REGION": "eu-west-1", + "STORAGE_MAX_RANGE": "10000000" + } + }, + "semantic_apis": { + "cpu_limit": "512", + "memory_limit": "1024", + "min_count": 1, + "max_count": 1, + "image_tag": "ga-205", + "allow_ecs_exec": true, + "env_vars": { + "NODE_ENV": "production", + "LOG_LEVEL": "error", + "PORT_LISTEN": "8080", + "REDIS_USERNAME": "app", + "REDIS_EXPIRE_TIME": "86400", + "REDIS_USE_TLS": "true", + "EXTERNAL_API_CALLERID_OID": "1.2.246.562.10.2013112012294919827487", + "EXTERNAL_API_CALLERID_SERVICE": "aoe", + "EXTERNAL_API_OPINTOPOLKU_KOODISTOT": "https://virkailija.opintopolku.fi/koodisto-service/rest/json", + "EXTERNAL_API_FINTO_ASIASANAT": "http://api.finto.fi/rest/v1", + "EXTERNAL_API_SUOMI_KOODISTOT": "https://koodistot.suomi.fi/codelist-api/api/v1/coderegistries", + "EXTERNAL_API_OPINTOPOLKU_ORGANISAATIOT": "https://virkailija.opintopolku.fi/organisaatio-service/rest", + "EXTERNAL_API_OPINTOPOLKU_EPERUSTEET": "https://virkailija.opintopolku.fi/eperusteet-service/api" + } + } + }, + "msk": { + "clusterName": "AOEKafkaCluster", + "instanceType": "kafka.t3.small", + "numberOfBrokerNodes": 2, + "version": "3.6.0", + "volumeSize": 100 + }, + "open_search": { + "standbyReplicas": "DISABLED", + "collectionName": "aoecollection", + "collectionDescription": "Collection for aoe" + }, + + "aurora_databases": { + "web_backend": { + "version": "16.4", + "min_size_acu": 0.5, + "max_size_acu": 1, + "performance_insights": false + } + }, + "document_db": { + "instances": 1, + "instanceType": "t3.medium", + "engineVersion": "4.0.0" + }, + "S3": { + "aoeBucketName": "aoe", + "aoePdfBucketName": "aoepdf", + "aoeThumbnailBucketName": "aoethumbnail" + }, + "EFS": { + "throughputMode": "bursting" + }, + "redis_serverless": { + "semantic_apis": { + "major_version": "7", + "storage_min": 1, + "storage_max": 5, + "min_ecpu_per_second": 1000, + "max_ecpu_per_second": 40000 + } + } +} diff --git a/aoe-infra/environments/prod.json b/aoe-infra/environments/prod.json new file mode 100644 index 000000000..d990e1d41 --- /dev/null +++ b/aoe-infra/environments/prod.json @@ -0,0 +1,260 @@ +{ + "aws": { + "vpc_cidr": "10.5.128.0/18", + "availability_zones": 3, + "domain": "aoe.fi" + }, + "services": { + "data_analytics": { + "cpu_limit": "1024", + "memory_limit": "2048", + "min_count": 1, + "max_count": 1, + "image_tag": "ga-276", + "allow_ecs_exec": true, + "env_vars": { + "LOGGING_LEVEL_FI_CSC": "ERROR", + "LOGGING_LEVEL_ORG_APACHE_KAFKA": "ERROR", + "LOGGING_LEVEL_ORG_SPRINGFRAMEWORK": "ERROR", + "SERVER_PORT": "8080", + "SERVER_SERVLET_CONTEXTPATH": "/analytics/api", + "SPRING_PROFILES_ACTIVE": "prod", + "SPRING_AUTOCONFIGURATION_EXCLUDE": "org.springframework.boot.autoconfigure.mongo.MongoAutoConfiguration", + + "SPRING_DATASOURCE_PRIMARY_USERNAME": "reporter", + "SPRING_DATASOURCE_PRIMARY_INITIALIZATIONMODE": "never", + "SPRING_DATASOURCE_PRIMARY_DRIVERCLASSNAME": "org.postgresql.Driver", + + "MONGODB_PRIMARY_ENABLE_SSL": "true", + "MONGODB_PRIMARY_DATABASE": "analytics", + "MONGODB_PRIMARY_USERNAME": "aoeOwner", + + "KAFKA_ENABLED": "true", + "KAFKA_SASL_ENABLE": "true", + "TRUST_STORE_LOCATION": "/certs/rds-truststore.jks", + + "SPRING_KAFKA_CONSUMER_AUTO_STARTUP": "true", + "SPRING_KAFKA_CONSUMER_AUTOOFFSETRESET": "latest", + "SPRING_KAFKA_CONSUMER_ENABLEAUTOCOMMIT": "true", + "SPRING_KAFKA_PRODUCER_BATCH_SIZE": "10", + "SPRING_KAFKA_PRODUCER_CLIENTID": "aoe-kafka-client", + + "KAFKA_GROUPID_PRODMATERIALACTIVITY": "group-prod-material-activity", + "KAFKA_GROUPID_PRODSEARCHREQUESTS": "group-prod-search-requests", + + "KAFKA_TOPIC_PRODMATERIALACTIVITY": "prod_material_activity", + "KAFKA_TOPIC_PRODSEARCHREQUESTS": "prod_search_requests" + } + }, + "data_services": { + "cpu_limit": "2048", + "memory_limit": "4096", + "min_count": 1, + "max_count": 1, + "image_tag": "ga-276", + "allow_ecs_exec": true, + "env_vars": { + "SPRING_PROFILES_ACTIVE": "prod", + "SERVER_PORT": "8080", + "AOE_OAIIDENTIFIER_REPOSITORYIDENTIFIER": "aws.aoe.fi", + "AOE_REQUEST_PERPAGE": "20", + "AOE_REQUEST_URL": "http://web-backend.prod.aoe.local:8080/api/v1/oaipmh/metadata" + } + }, + "web_frontend": { + "cpu_limit": "2048", + "memory_limit": "4096", + "min_count": 1, + "max_count": 1, + "image_tag": "ga-276-prod", + "allow_ecs_exec": true + }, + "web_backend": { + "cpu_limit": "2048", + "memory_limit": "4096", + "min_count": 1, + "max_count": 1, + "image_tag": "ga-281", + "allow_ecs_exec": true, + "env_vars": { + "NODE_ENV": "production", + "LOG_LEVEL": "error", + "PORT_LISTEN": "8080", + "MATERIAL_VERSION_URL": "https://aws.aoe.fi/#/materiaali/", + "HTTP_OPTIONS_TIMEOUT": "5000", + "HTTP_OPTIONS_RETRY": "2", + "HTTP_OPTIONS_CLOCK_TOLERANCE":"5", + + "POSTGRESQL_DATA": "aoe", + "PG_USER": "aoe_admin", + "SERVER_CONFIG_OAIPMH_ANALYTICS_URL": "http://data-analytics.prod.aoe.local:8080", + "KAFKA_EXCLUDED_AGENT_IDENTIFIERS": "oersi", + "KAFKA_BROKER_TOPIC_MATERIAL_ACTIVITY": "prod_material_activity", + "KAFKA_BROKER_TOPIC_SEARCH_REQUESTS": "prod_search_requests", + "KAFKA_CLIENT_ID": "aoe-web-backend", + "KAFKA_CLIENT_REGION": "eu-west-1", + + "SESSION_COOKIE_DOMAIN":".aoe.fi", + "SESSION_COOKIE_HTTP_ONLY": "true", + "SESSION_COOKIE_MAX_AGE": "86400000", + "SESSION_COOKIE_PATH": "/", + "SESSION_COOKIE_SAME_SITE": "lax", + "SESSION_COOKIE_SECURE": "true", + + "SESSION_OPTION_PROXY": "true", + "SESSION_OPTION_RESAVE": "false", + "SESSION_OPTION_ROLLING": "false", + "SESSION_OPTION_SAVE_UNINITIALIZED": "false", + + "CLOUD_STORAGE_REGION": "eu-west-1", + "CLOUD_STORAGE_BUCKET": "aoe-prod", + "CLOUD_STORAGE_BUCKET_PDF": "aoepdf-prod", + "CLOUD_STORAGE_BUCKET_THUMBNAIL": "aoethumbnail-prod", + + "THUMBNAIL_END_POINT": "/app/thumbnail/", + "FILE_SIZE_LIMIT": "10737418240", + "THUMBNAIL_FILE_SIZE_LIMIT": "10485760", + + "REDIRECT_URI": "https://aws.aoe.fi/api/secure/redirect", + "SUCCESS_REDIRECT_URI": "/", + "FAILURE_REDIRECT_URI": "/api/login", + + "CREATE_ES_INDEX": "1", + "ES_INDEX": "aoe", + "ES_MAPPING_FILE": "/app/aoemapping.json", + "ES_COLLECTION_INDEX": "aoecollection", + "ES_COLLECTION_MAPPING_FILE": "/app/aoecollectionmapping.json", + "ES_SIZE_DEFAULT": "1000", + "ES_FROM_DEFAULT": "0", + + "HTML_BASE_URL": "https://aws.aoe.fi", + "HTML_FOLDER": "/mnt/data/webdata/htmlfolder", + + "H5P_JSON_CONFIGURATION": "dist/services/config/h5p.json", + "H5P_PATH_LIBRARIES": "/mnt/data/webdata/h5p/libraries", + "H5P_PATH_TEMPORARY_STORAGE": "/mnt/data/webdata/h5p/temporary-storage", + "H5P_PATH_CONTENT": "/mnt/data/webdata/h5p/content", + "H5P_PATH_CORE": "/app/h5p", + "H5P_PATH_EDITOR": "/mnt/data/webdata/h5p/editor", + "H5P_PLAY_API": "https://aws.aoe.fi/h5p/play/", + "H5P_USER_EMAIL": "oppimateriaalivaranto@aoe.fi", + + "MATERIAL_FILE_UPLOAD_FOLDER": "/mnt/data/uploads", + + "CONVERSION_TO_PDF_API": "https://aws.aoe.fi/api/v1/pdf/content/", + "CONVERSION_TO_PDF_ENABLED": "1", + + "FILE_DOWNLOAD_URL": "https://aws.aoe.fi/api/v1/download/file/", + "THUMBNAIL_DOWNLOAD_URL": "https://aws.aoe.fi/api/v1/thumbnail/", + "COLLECTION_THUMBNAIL_DOWNLOAD_URL": "https://aws.aoe.fi/api/v1/collection/thumbnail/", + + "REDIS_USERNAME": "app", + "REDIS_USE_TLS": "true", + "BASE_URL": "https://aws.aoe.fi/api/v1/", + "EMAIL_FROM": "oppimateriaalivaranto@aoe.fi", + "TRANSPORT_AUTH_USER": "oppimateriaalivaranto@aoe.fi", + "TRANSPORT_AUTH_HOST": "XXXX", + "TRANSPORT_PORT": "25", + "SEND_EMAIL": "0", + "VERIFY_EMAIL_REDIRECT_URL": "/", + + "CLOUD_STORAGE_ENABLED": "1", + "KAFKA_ENABLED": "1", + "LOGIN_ENABLED": "1", + + "PID_SERVICE_RUN_SCHEDULED": "1", + "PID_SERVICE_ENABLED": "1", + + "STREAM_ENABLED": "1", + "STREAM_FILESIZE_MIN": "100000", + "STREAM_REDIRECT_URI": "https://aws.aoe.fi/stream/api/v1/material/", + "STREAM_STATUS_HOST": "streaming-app.prod.aoe.local", + "STREAM_STATUS_PORT": "8080", + "STREAM_STATUS_PATH": "/stream/api/v1/material/", + "STREAM_STATUS_HOST_HTTPS_ENABLED": "0" + } + }, + "streaming": { + "cpu_limit": "2048", + "memory_limit": "4096", + "min_count": 1, + "max_count": 1, + "image_tag": "ga-276", + "allow_ecs_exec": true, + "env_vars": { + "LOG_LEVEL": "error", + "PORT": "8080", + "NODE_ENV": "production", + "STORAGE_BUCKET": "aoe-prod", + "STORAGE_REGION": "eu-west-1", + "STORAGE_MAX_RANGE": "10000000" + } + }, + "semantic_apis": { + "cpu_limit": "2048", + "memory_limit": "4096", + "min_count": 1, + "max_count": 1, + "image_tag": "ga-276", + "allow_ecs_exec": true, + "env_vars": { + "NODE_ENV": "production", + "LOG_LEVEL": "error", + "PORT_LISTEN": "8080", + "REDIS_USERNAME": "app", + "REDIS_EXPIRE_TIME": "86400", + "REDIS_USE_TLS": "true", + "EXTERNAL_API_CALLERID_OID": "1.2.246.562.10.2013112012294919827487", + "EXTERNAL_API_CALLERID_SERVICE": "aoe", + "EXTERNAL_API_OPINTOPOLKU_KOODISTOT": "https://virkailija.opintopolku.fi/koodisto-service/rest/json", + "EXTERNAL_API_FINTO_ASIASANAT": "http://api.finto.fi/rest/v1", + "EXTERNAL_API_SUOMI_KOODISTOT": "https://koodistot.suomi.fi/codelist-api/api/v1/coderegistries", + "EXTERNAL_API_OPINTOPOLKU_ORGANISAATIOT": "https://virkailija.opintopolku.fi/organisaatio-service/rest", + "EXTERNAL_API_OPINTOPOLKU_EPERUSTEET": "https://virkailija.opintopolku.fi/eperusteet-service/api" + } + } + }, + "msk": { + "clusterName": "AOEKafkaCluster", + "instanceType": "kafka.t3.small", + "numberOfBrokerNodes": 2, + "version": "3.6.0", + "volumeSize": 100 + }, + "open_search": { + "standbyReplicas": "DISABLED", + "collectionName": "aoecollection", + "collectionDescription": "Collection for aoe" + }, + + "aurora_databases": { + "web_backend": { + "version": "16.4", + "min_size_acu": 0.5, + "max_size_acu": 1, + "performance_insights": false + } + }, + "document_db": { + "instances": 2, + "instanceType": "r5.large", + "engineVersion": "4.0.0" + }, + "S3": { + "aoeBucketName": "aoe", + "aoePdfBucketName": "aoepdf", + "aoeThumbnailBucketName": "aoethumbnail" + }, + "EFS": { + "throughputMode": "elastic" + }, + "redis_serverless": { + "semantic_apis": { + "major_version": "7", + "storage_min": 1, + "storage_max": 5, + "min_ecpu_per_second": 1000, + "max_ecpu_per_second": 40000 + } + } +} \ No newline at end of file diff --git a/aoe-infra/environments/qa.json b/aoe-infra/environments/qa.json new file mode 100644 index 000000000..14cc46f6e --- /dev/null +++ b/aoe-infra/environments/qa.json @@ -0,0 +1,260 @@ +{ + "aws": { + "vpc_cidr": "10.5.64.0/18", + "availability_zones": 2, + "domain": "qa.aoe.fi" + }, + "services": { + "data_analytics": { + "cpu_limit": "512", + "memory_limit": "1024", + "min_count": 1, + "max_count": 1, + "image_tag": "ga-205", + "allow_ecs_exec": true, + "env_vars": { + "LOGGING_LEVEL_FI_CSC": "ERROR", + "LOGGING_LEVEL_ORG_APACHE_KAFKA": "ERROR", + "LOGGING_LEVEL_ORG_SPRINGFRAMEWORK": "ERROR", + "SERVER_PORT": "8080", + "SERVER_SERVLET_CONTEXTPATH": "/analytics/api", + "SPRING_PROFILES_ACTIVE": "prod", + "SPRING_AUTOCONFIGURATION_EXCLUDE": "org.springframework.boot.autoconfigure.mongo.MongoAutoConfiguration", + + "SPRING_DATASOURCE_PRIMARY_USERNAME": "reporter", + "SPRING_DATASOURCE_PRIMARY_INITIALIZATIONMODE": "never", + "SPRING_DATASOURCE_PRIMARY_DRIVERCLASSNAME": "org.postgresql.Driver", + + "MONGODB_PRIMARY_ENABLE_SSL": "true", + "MONGODB_PRIMARY_DATABASE": "analytics", + "MONGODB_PRIMARY_USERNAME": "aoeOwner", + + "KAFKA_ENABLED": "true", + "KAFKA_SASL_ENABLE": "true", + "TRUST_STORE_LOCATION": "/certs/rds-truststore.jks", + + "SPRING_KAFKA_CONSUMER_AUTO_STARTUP": "true", + "SPRING_KAFKA_CONSUMER_AUTOOFFSETRESET": "latest", + "SPRING_KAFKA_CONSUMER_ENABLEAUTOCOMMIT": "true", + "SPRING_KAFKA_PRODUCER_BATCH_SIZE": "10", + "SPRING_KAFKA_PRODUCER_CLIENTID": "aoe-kafka-client", + + "KAFKA_GROUPID_PRODMATERIALACTIVITY": "group-prod-material-activity", + "KAFKA_GROUPID_PRODSEARCHREQUESTS": "group-prod-search-requests", + + "KAFKA_TOPIC_PRODMATERIALACTIVITY": "prod_material_activity", + "KAFKA_TOPIC_PRODSEARCHREQUESTS": "prod_search_requests" + } + }, + "data_services": { + "cpu_limit": "512", + "memory_limit": "1024", + "min_count": 1, + "max_count": 1, + "image_tag": "ga-205", + "allow_ecs_exec": true, + "env_vars": { + "SPRING_PROFILES_ACTIVE": "prod", + "SERVER_PORT": "8080", + "AOE_OAIIDENTIFIER_REPOSITORYIDENTIFIER": "qa.aoe.fi", + "AOE_REQUEST_PERPAGE": "20", + "AOE_REQUEST_URL": "http://web-backend.qa.aoe.local:8080/api/v1/oaipmh/metadata" + } + }, + "web_frontend": { + "cpu_limit": "512", + "memory_limit": "1024", + "min_count": 1, + "max_count": 1, + "image_tag": "ga-257-qa", + "allow_ecs_exec": true + }, + "web_backend": { + "cpu_limit": "512", + "memory_limit": "1024", + "min_count": 1, + "max_count": 1, + "image_tag": "ga-281", + "allow_ecs_exec": true, + "env_vars": { + "NODE_ENV": "production", + "LOG_LEVEL": "error", + "PORT_LISTEN": "8080", + "MATERIAL_VERSION_URL": "https://qa.aoe.fi/#/materiaali/", + "HTTP_OPTIONS_TIMEOUT": "5000", + "HTTP_OPTIONS_RETRY": "2", + "HTTP_OPTIONS_CLOCK_TOLERANCE":"5", + + "POSTGRESQL_DATA": "aoe", + "PG_USER": "aoe_admin", + "SERVER_CONFIG_OAIPMH_ANALYTICS_URL": "http://data-analytics.qa.aoe.local:8080", + "KAFKA_EXCLUDED_AGENT_IDENTIFIERS": "oersi", + "KAFKA_BROKER_TOPIC_MATERIAL_ACTIVITY": "prod_material_activity", + "KAFKA_BROKER_TOPIC_SEARCH_REQUESTS": "prod_search_requests", + "KAFKA_CLIENT_ID": "aoe-web-backend", + "KAFKA_CLIENT_REGION": "eu-west-1", + + "SESSION_COOKIE_DOMAIN":".aoe.fi", + "SESSION_COOKIE_HTTP_ONLY": "true", + "SESSION_COOKIE_MAX_AGE": "86400000", + "SESSION_COOKIE_PATH": "/", + "SESSION_COOKIE_SAME_SITE": "lax", + "SESSION_COOKIE_SECURE": "true", + + "SESSION_OPTION_PROXY": "true", + "SESSION_OPTION_RESAVE": "false", + "SESSION_OPTION_ROLLING": "false", + "SESSION_OPTION_SAVE_UNINITIALIZED": "false", + + "CLOUD_STORAGE_REGION": "eu-west-1", + "CLOUD_STORAGE_BUCKET": "aoe-qa", + "CLOUD_STORAGE_BUCKET_PDF": "aoepdf-qa", + "CLOUD_STORAGE_BUCKET_THUMBNAIL": "aoethumbnail-qa", + + "THUMBNAIL_END_POINT": "/app/thumbnail/", + "FILE_SIZE_LIMIT": "10737418240", + "THUMBNAIL_FILE_SIZE_LIMIT": "10485760", + + "REDIRECT_URI": "https://qa.aoe.fi/api/secure/redirect", + "SUCCESS_REDIRECT_URI": "/", + "FAILURE_REDIRECT_URI": "/api/login", + + "CREATE_ES_INDEX": "1", + "ES_INDEX": "aoe", + "ES_MAPPING_FILE": "/app/aoemapping.json", + "ES_COLLECTION_INDEX": "aoecollection", + "ES_COLLECTION_MAPPING_FILE": "/app/aoecollectionmapping.json", + "ES_SIZE_DEFAULT": "1000", + "ES_FROM_DEFAULT": "0", + + "HTML_BASE_URL": "https://qa.aoe.fi", + "HTML_FOLDER": "/mnt/data/webdata/htmlfolder", + + "H5P_JSON_CONFIGURATION": "dist/services/config/h5p.json", + "H5P_PATH_LIBRARIES": "/mnt/data/webdata/h5p/libraries", + "H5P_PATH_TEMPORARY_STORAGE": "/mnt/data/webdata/h5p/temporary-storage", + "H5P_PATH_CONTENT": "/mnt/data/webdata/h5p/content", + "H5P_PATH_CORE": "/app/h5p", + "H5P_PATH_EDITOR": "/mnt/data/webdata/h5p/editor", + "H5P_PLAY_API": "https://qa.aoe.fi/h5p/play/", + "H5P_USER_EMAIL": "oppimateriaalivaranto@aoe.fi", + + "MATERIAL_FILE_UPLOAD_FOLDER": "/mnt/data/uploads", + + "CONVERSION_TO_PDF_API": "https://qa.aoe.fi/api/v1/pdf/content/", + "CONVERSION_TO_PDF_ENABLED": "1", + + "FILE_DOWNLOAD_URL": "https://qa.aoe.fi/api/v1/download/file/", + "THUMBNAIL_DOWNLOAD_URL": "https://qa.aoe.fi/api/v1/thumbnail/", + "COLLECTION_THUMBNAIL_DOWNLOAD_URL": "https://qa.aoe.fi/api/v1/collection/thumbnail/", + + "REDIS_USERNAME": "app", + "REDIS_USE_TLS": "true", + "BASE_URL": "https://qa.aoe.fi/api/v1/", + "EMAIL_FROM": "oppimateriaalivaranto@aoe.fi", + "TRANSPORT_AUTH_USER": "oppimateriaalivaranto@aoe.fi", + "TRANSPORT_AUTH_HOST": "XXXX", + "TRANSPORT_PORT": "25", + "SEND_EMAIL": "0", + "VERIFY_EMAIL_REDIRECT_URL": "/", + + "CLOUD_STORAGE_ENABLED": "1", + "KAFKA_ENABLED": "1", + "LOGIN_ENABLED": "1", + + "PID_SERVICE_RUN_SCHEDULED": "1", + "PID_SERVICE_ENABLED": "1", + + "STREAM_ENABLED": "1", + "STREAM_FILESIZE_MIN": "100000", + "STREAM_REDIRECT_URI": "https://qa.aoe.fi/stream/api/v1/material/", + "STREAM_STATUS_HOST": "streaming-app.qa.aoe.local", + "STREAM_STATUS_PORT": "8080", + "STREAM_STATUS_PATH": "/stream/api/v1/material/", + "STREAM_STATUS_HOST_HTTPS_ENABLED": "0" + } + }, + "streaming": { + "cpu_limit": "512", + "memory_limit": "1024", + "min_count": 1, + "max_count": 1, + "image_tag": "ga-205", + "allow_ecs_exec": true, + "env_vars": { + "LOG_LEVEL": "error", + "PORT": "8080", + "NODE_ENV": "production", + "STORAGE_BUCKET": "aoe-qa", + "STORAGE_REGION": "eu-west-1", + "STORAGE_MAX_RANGE": "10000000" + } + }, + "semantic_apis": { + "cpu_limit": "512", + "memory_limit": "1024", + "min_count": 1, + "max_count": 1, + "image_tag": "ga-205", + "allow_ecs_exec": true, + "env_vars": { + "NODE_ENV": "production", + "LOG_LEVEL": "error", + "PORT_LISTEN": "8080", + "REDIS_USERNAME": "app", + "REDIS_EXPIRE_TIME": "86400", + "REDIS_USE_TLS": "true", + "EXTERNAL_API_CALLERID_OID": "1.2.246.562.10.2013112012294919827487", + "EXTERNAL_API_CALLERID_SERVICE": "aoe", + "EXTERNAL_API_OPINTOPOLKU_KOODISTOT": "https://virkailija.opintopolku.fi/koodisto-service/rest/json", + "EXTERNAL_API_FINTO_ASIASANAT": "http://api.finto.fi/rest/v1", + "EXTERNAL_API_SUOMI_KOODISTOT": "https://koodistot.suomi.fi/codelist-api/api/v1/coderegistries", + "EXTERNAL_API_OPINTOPOLKU_ORGANISAATIOT": "https://virkailija.opintopolku.fi/organisaatio-service/rest", + "EXTERNAL_API_OPINTOPOLKU_EPERUSTEET": "https://virkailija.opintopolku.fi/eperusteet-service/api" + } + } + }, + "msk": { + "clusterName": "AOEKafkaCluster", + "instanceType": "kafka.t3.small", + "numberOfBrokerNodes": 2, + "version": "3.6.0", + "volumeSize": 100 + }, + "open_search": { + "standbyReplicas": "DISABLED", + "collectionName": "aoecollection", + "collectionDescription": "Collection for aoe" + }, + + "aurora_databases": { + "web_backend": { + "version": "16.4", + "min_size_acu": 0.5, + "max_size_acu": 1, + "performance_insights": false + } + }, + "document_db": { + "instances": 1, + "instanceType": "t3.medium", + "engineVersion": "4.0.0" + }, + "S3": { + "aoeBucketName": "aoe", + "aoePdfBucketName": "aoepdf", + "aoeThumbnailBucketName": "aoethumbnail" + }, + "EFS": { + "throughputMode": "bursting" + }, + "redis_serverless": { + "semantic_apis": { + "major_version": "7", + "storage_min": 1, + "storage_max": 5, + "min_ecpu_per_second": 1000, + "max_ecpu_per_second": 40000 + } + } +} diff --git a/aoe-infra/infra/environments/utility.json b/aoe-infra/environments/utility.json similarity index 100% rename from aoe-infra/infra/environments/utility.json rename to aoe-infra/environments/utility.json diff --git a/aoe-infra/eslint.config.mjs b/aoe-infra/eslint.config.mjs new file mode 100644 index 000000000..8b2c1f975 --- /dev/null +++ b/aoe-infra/eslint.config.mjs @@ -0,0 +1,38 @@ +import prettier from 'eslint-plugin-prettier' +import tsParser from '@typescript-eslint/parser' +import eslint from '@eslint/js'; +import tseslint from 'typescript-eslint'; + +export default tseslint.config( + eslint.configs.recommended, + tseslint.configs.recommended, + { + plugins: { + prettier + }, + + languageOptions: { + parser: tsParser, + ecmaVersion: 2018, + sourceType: 'module', + + parserOptions: { + ecmaFeatures: { + jsx: true + } + } + }, + + rules: { + curly: 'error', + 'no-magic-numbers': 'off', + eqeqeq: 'error', + 'no-undef-init': 'error', + 'no-unneeded-ternary': 'error', + 'no-var': 'error', + 'prefer-promise-reject-errors': 'error', + 'prefer-template': 'error', + '@typescript-eslint/explicit-module-boundary-types': 'off' + } + } +) diff --git a/aoe-infra/infra/frontend/dist/index.html b/aoe-infra/frontend/dist/index.html similarity index 100% rename from aoe-infra/infra/frontend/dist/index.html rename to aoe-infra/frontend/dist/index.html diff --git a/aoe-infra/infra/README.md b/aoe-infra/infra/README.md deleted file mode 100644 index ebb5b1c1e..000000000 --- a/aoe-infra/infra/README.md +++ /dev/null @@ -1,41 +0,0 @@ -# AOE AWS infrastructure - -Infrastructure for aoe.fi - project - -## Getting started - -If you wish to run the CDK - commands from your local machine, install the global depencencies: `nodejs 20, npm, npx` and install the project dependencies with `npm install` in the `/infra` - directory. - -## AWS vault - -When deploying to the target environment from your local machine, use `aws-vault exec ` and then proceed with cdk - commands. - - -## Useful commands - -* `npx cdk deploy -c environment= --all` deploy all stacks to the target environment -* `npx cdk destroy -c environment= --all` destroy all stacks to the target environment (note: you need to empty S3 - buckets etc. manually) -* `npx cdk diff` compare deployed stack with current state -* `npx npm run build` compile typescript to js -* `npx npm run watch` watch for changes and compile -* `npx npm run test` perform the jest unit tests -* `npx cdk synth` emits the synthesized CloudFormation template - -## Environment variables - -Environment variables have been split into two places; - -* `environments/.json` contains environment specific non-sensitive configuration -* AWS Parameter Store contains variables with sensitive information. Parameters in the parameter store are expected to be prefixed with `///` - -## Subnetting - -Project uses a /16 network which has been split into /18 per VPC (=per environment), which in turn is designed to be split into 16x /22 networks with 1022 IP - addresses available per subnet. - -## Adding a new service - -First, add a new Security Group and Security Group rules to the `security-groups.ts`, add the service/environment specific configuration into `environments/.json` then create a new stack instance of `ecs-service.ts` in the `/bin/infra.ts` - -## Adding a new database - -First, add a new Security Group and Security Group rules to the `security-groups.ts`, add the service/environment specific database configuration into `environments/.json` then create a new stack instance of `rds-database.ts` in the `/bin/infra.ts` \ No newline at end of file diff --git a/aoe-infra/infra/bin/infra.ts b/aoe-infra/infra/bin/infra.ts deleted file mode 100644 index cd1474a32..000000000 --- a/aoe-infra/infra/bin/infra.ts +++ /dev/null @@ -1,273 +0,0 @@ -#!/usr/bin/env node -import 'source-map-support/register'; -import * as cdk from 'aws-cdk-lib'; -import * as utility from '../environments/utility.json'; -import * as dev from '../environments/dev.json'; -import * as qa from '../environments/qa.json'; -import * as prod from '../environments/prod.json'; -import { VpcStack } from '../lib/vpc-stack'; -import { SecurityGroupStack } from '../lib/security-groups' -import { AuroraDatabaseStack } from '../lib/aurora-serverless-database'; -import { AlbStack } from '../lib/alb-stack'; -import { CloudfrontStack } from '../lib/cloudfront-stack'; -import { KmsStack } from '../lib/kms-stack'; -import { FargateClusterStack } from '../lib/fargate-cluster-stack'; -import { EcsServiceStack } from '../lib/ecs-service'; -import { FrontendBucketStack } from '../lib/front-end-bucket-stack'; -import { FrontendStaticContentDeploymentStack } from '../lib/front-end-content-deployment-stack'; -import { EcrStack } from '../lib/ecr-stack'; -import { UtilityStack } from '../lib/utility-stack'; -import { ElasticacheServerlessStack } from '../lib/redis-stack'; -import { SubnetGroupsStack } from '../lib/subnet-groups' -import { CpuArchitecture } from 'aws-cdk-lib/aws-ecs'; -import { BastionStack } from '../lib/bastion-stack'; -import { SecretManagerStack } from '../lib/secrets-manager-stack' - -const app = new cdk.App(); - -// Load up configuration for the environment -const environmentName: string = app.node.tryGetContext("environment"); -const utilityAccountId: string = app.node.tryGetContext("UTILITY_ACCOUNT_ID") -let environmentConfig: any; -if (environmentName == 'utility') -{ - environmentConfig = utility; -} -else if (environmentName == 'dev') -{ - environmentConfig = dev; -} -else if (environmentName == 'qa') -{ - environmentConfig = qa; -} -else if (environmentName == 'prod') -{ - environmentConfig = prod; -} -else -{ - console.error("You must define a valid environment name in CDK context! Valid environment names are dev, qa, prod and utility"); - process.exit(1); -} - - -// dev, qa & prod account resources.. -if (environmentName == 'dev' || environmentName == 'qa' || environmentName == 'prod') { - -// Remember to update KMS key removal policy - const Kms = new KmsStack(app, 'KmsStack', { - env: { region: "eu-west-1" }, - stackName: `${environmentName}-kms`, - environment: environmentName - }) - - const Secrets = new SecretManagerStack(app, 'SecretManagerStack', { - env: { region: "eu-west-1" }, - stackName: `${environmentName}-secrets`, - kmsKey: Kms.secretsManagerKey, - }) - - const Network = new VpcStack(app, 'VpcStack', { - env: { region: "eu-west-1" }, - stackName: `${environmentName}-vpc`, - vpc_cidr: environmentConfig.aws.vpc_cidr, - availability_zones: environmentConfig.aws.availability_zones - }) - - const SubnetGroups = new SubnetGroupsStack(app, 'SubnetGroupsStack', { - env: { region: "eu-west-1" }, - stackName: `${environmentName}-subnet-groups`, - environment: environmentName, - vpc: Network.vpc - }) - - const SecurityGroups = new SecurityGroupStack(app, 'SecurityGroupStack', { - env: { region: "eu-west-1" }, - stackName: `${environmentName}-security-groups`, - vpc: Network.vpc, - }) - - const BastionHost = new BastionStack(app, 'BastionStack', { - env: { region: "eu-west-1" }, - stackName: `${environmentName}-bastion`, - vpc: Network.vpc, - securityGroup: SecurityGroups.bastionSecurityGroup, - kmsKey: Kms.ebsKmsKey, - environment: environmentName - }) - -/* - const TestAuroraStack = new AuroraDatabaseStack(app, 'TestAuroraStack', { - env: { region: "eu-west-1" }, - stackName: `${environmentName}-test-aurora`, - auroraVersion: "11.1", - environment: environmentName, - clusterName: "test-aurora", - vpc: Network.vpc, - securityGroup: SecurityGroups.testAuroraSecurityGroup, - performanceInsights: environmentConfig.aws.performance_insights, - minSizeAcu: environmentConfig.aws.min_acu, - maxSizeAcu: environmentConfig.aws.max_acu, - domainNames: environmentConfig.aws.domain_names, - route53HostedZone: environmentConfig.aws.route53_hosted_zone, - kmsKey: Kms.rdsKmsKey, - }) -*/ - - const SemanticApisRedis = new ElasticacheServerlessStack(app, 'SemanticApisRedis', { - env: { region: "eu-west-1" }, - stackName: `${environmentName}-semantic-apis-redis`, - elasticacheName: "semantic-apis", - consumingServiceName: "semantic-apis", - secret: Secrets.semanticApisPassword, - vpc: Network.vpc, - securityGroupId: SecurityGroups.semanticApisRedisSecurityGroup.securityGroupId, - redisKmsKeyId: Kms.redisKmsKey.keyId, - secretsManagerKmsKeyId: Kms.secretsManagerKey, - redisMajorVersion: environmentConfig.redis_serverless.semantic_apis.redis_major_version, - storageMin: environmentConfig.redis_serverless.semantic_apis.storage_min, - storageMax: environmentConfig.redis_serverless.semantic_apis.storage_max, - minEcpuPerSecond: environmentConfig.redis_serverless.semantic_apis.min_ecpu_per_second, - maxEcpuPerSecond: environmentConfig.redis_serverless.semantic_apis.max_ecpu_per_second - }) - - const Alb = new AlbStack(app, 'AlbStack', { - env: { region: "eu-west-1" }, - crossRegionReferences: true, - stackName: `${environmentName}-alb`, - vpc: Network.vpc, - securityGroupId: SecurityGroups.albSecurityGroup.securityGroupId, - }) - - // Remember to add correct domain - const Cloudfront = new CloudfrontStack(app, 'CloudFrontStack', { - env: { region: "eu-west-1" }, -// crossRegionReferences: true, - stackName: `${environmentName}-cloudfront`, -// environment: environmentName, - alb: Alb.alb, -// domain: environmentConfig.aws.domain, - }) - - - const FrontEndBucket = new FrontendBucketStack(app, 'FrontEndBucketStack', { - env: { region: "eu-west-1" }, -// crossRegionReferences: true, - stackName: `${environmentName}-frontend-bucket`, - environment: environmentName, - cloudFrontDistribution: Cloudfront.distribution, - }) - - - const FrontEndBucketDeployment = new FrontendStaticContentDeploymentStack(app, 'FrontEndContentDeploymentStack', { - env: { region: "eu-west-1" }, - crossRegionReferences: true, - stackName: `${environmentName}-frontend-deployment`, - environment: environmentName, - bucket: FrontEndBucket.bucket, - cloudFrontDistribution: Cloudfront.distribution, - }) - - const FargateCluster = new FargateClusterStack(app, 'FargateClusterStack', { - env: { region: "eu-west-1" }, - stackName: `${environmentName}-fargate-cluster`, - environment: environmentName, - vpc: Network.vpc, - logGroupKmsKey: Kms.cloudwatchLogsKmsKey - }) - - const SemanticApisService = new EcsServiceStack(app, 'SemanticApisEcsService', { - env: { region: "eu-west-1" }, - stackName: `${environmentName}-semantic-apis-service`, - serviceName: 'semantic-apis', - environment: environmentName, - cluster: FargateCluster.fargateCluster, - vpc: Network.vpc, - securityGroup: SecurityGroups.semanticApisServiceSecurityGroup, - imageTag: environmentConfig.services.semantic_apis.image_tag, - allowEcsExec: environmentConfig.services.semantic_apis.allow_ecs_exec, - taskCpu: environmentConfig.services.semantic_apis.cpu_limit, - taskMemory: environmentConfig.services.semantic_apis.memory_limit, - minimumCount: environmentConfig.services.semantic_apis.min_count, - maximumCount: environmentConfig.services.semantic_apis.max_count, - cpuArchitecture: CpuArchitecture.X86_64, - // env_vars: { - // LOG_LEVEL: "debug", - // PORT_LISTEN: "8080", - // REDIS_HOST: "asdasd", - // REDIS_PORT: "6379", - // REDIS_USERNAME: "app", - // REDIS_EXPIRE_TIME: "86400", - // EXTERNAL_API_CALLERID_OID: "1.2.246.562.10.2013112012294919827487", - // EXTERNAL_API_CALLERID_SERVICE: "aoe", - // EXTERNAL_API_OPINTOPOLKU_KOODISTOT: "https://virkailija.opintopolku.fi/koodisto-service/rest/json", - // EXTERNAL_API_FINTO_ASIASANAT: "http://api.finto.fi/rest/v1", - // EXTERNAL_API_SUOMI_KOODISTOT: "https://koodistot.suomi.fi/codelist-api/api/v1/coderegistries", - // EXTERNAL_API_OPINTOPOLKU_ORGANISAATIOT: "https://virkailija.opintopolku.fi/organisaatio-service/rest", - // EXTERNAL_API_OPINTOPOLKU_EPERUSTEET: "https://virkailija.opintopolku.fi/eperusteet-service/api" - // }, - env_vars: environmentConfig.services.semantic_apis.env_vars, - parameter_store_secrets: [ - ], - secrets_manager_secrets: [ - "REDIS_PASS", - ], - utilityAccountId: utilityAccountId, - alb: Alb.alb, - listener: Alb.albListener, - listenerPathPatterns: ["/ref/api/v1*"], - healthCheckPath: "/health", - healthCheckGracePeriod: 180, - healthCheckInterval: 5, - healthCheckTimeout: 2, - albPriority: 100, -// domain: environmentConfig.aws.domain, - }) - -// utility account resources.. -} -else if (environmentName == 'utility') -{ - - const Utility = new UtilityStack(app, 'UtilityStack', { - env: { region: "eu-west-1" }, - stackName: `${environmentName}-utility`, - }) - const FrontendEcr = new EcrStack(app, 'FrontendEcrStack', { - env: { region: "eu-west-1" }, - stackName: 'aoe-web-frontend-ecr', - serviceName: 'aoe-web-frontend', - githubActionsDeploymentRole: Utility.githubActionsDeploymentRole - }) - const BackendEcr = new EcrStack(app, 'BackendEcrStack', { - env: { region: "eu-west-1" }, - stackName: 'aoe-web-backend-ecr', - serviceName: 'aoe-web-backend', - githubActionsDeploymentRole: Utility.githubActionsDeploymentRole - }) - const SemanticApisEcr = new EcrStack(app, 'SemanticApisEcrStack', { - env: { region: "eu-west-1" }, - stackName: 'aoe-semantic-apis-ecr', - serviceName: 'aoe-semantic-apis', - githubActionsDeploymentRole: Utility.githubActionsDeploymentRole - }) - const StreamingAppEcr = new EcrStack(app, 'StreamingAppEcrStack', { - env: { region: "eu-west-1" }, - stackName: 'aoe-streaming-app-ecr', - serviceName: 'aoe-streaming-app', - githubActionsDeploymentRole: Utility.githubActionsDeploymentRole - }) - const DataServicesEcr = new EcrStack(app, 'DataServicesEcrStack', { - env: { region: "eu-west-1" }, - stackName: 'aoe-data-services-ecr', - serviceName: 'aoe-data-services', - githubActionsDeploymentRole: Utility.githubActionsDeploymentRole - }) - const DataAnalyticsEcr = new EcrStack(app, 'DataAnalyticsEcrStack', { - env: { region: "eu-west-1" }, - stackName: 'aoe-data-analytics-ecr', - serviceName: 'aoe-data-analytics', - githubActionsDeploymentRole: Utility.githubActionsDeploymentRole - }) -} \ No newline at end of file diff --git a/aoe-infra/infra/environments/dev.json b/aoe-infra/infra/environments/dev.json deleted file mode 100644 index c9277c40b..000000000 --- a/aoe-infra/infra/environments/dev.json +++ /dev/null @@ -1,50 +0,0 @@ -{ - "aws": { - "vpc_cidr": "10.5.0.0/18", - "availability_zones": 2 - }, - "services": { - "semantic_apis": { - "cpu_limit": "512", - "memory_limit": "1024", - "min_count": 1, - "max_count": 1, - "image_tag": "ga-25", - "allow_ecs_exec": true, - "env_vars": { - "LOG_LEVEL": "debug", - "PORT_LISTEN": "8080", - "REDIS_HOST": "semantic-apis-cdb80h.serverless.euw1.cache.amazonaws.com", - "REDIS_PORT": "6379", - "REDIS_USERNAME": "app", - "REDIS_EXPIRE_TIME": "86400", - "REDIS_USE_TLS": "true", - "EXTERNAL_API_CALLERID_OID": "1.2.246.562.10.2013112012294919827487", - "EXTERNAL_API_CALLERID_SERVICE": "aoe", - "EXTERNAL_API_OPINTOPOLKU_KOODISTOT": "https://virkailija.opintopolku.fi/koodisto-service/rest/json", - "EXTERNAL_API_FINTO_ASIASANAT": "http://api.finto.fi/rest/v1", - "EXTERNAL_API_SUOMI_KOODISTOT": "https://koodistot.suomi.fi/codelist-api/api/v1/coderegistries", - "EXTERNAL_API_OPINTOPOLKU_ORGANISAATIOT": "https://virkailija.opintopolku.fi/organisaatio-service/rest", - "EXTERNAL_API_OPINTOPOLKU_EPERUSTEET": "https://virkailija.opintopolku.fi/eperusteet-service/api" - } - } - }, - "aurora_databases": { - "testi": { - "version": "15.4", - "min_size_acu": 0.5, - "max_size_acu": 1, - "performance_insights": false, - "domain_names": ["testi"] - } - }, - "redis_serverless": { - "semantic_apis": { - "major_version": "7", - "storage_min": 1, - "storage_max": 5, - "min_ecpu_per_second": 1000, - "max_ecpu_per_second": 40000 - } - } -} \ No newline at end of file diff --git a/aoe-infra/infra/environments/prod.json b/aoe-infra/infra/environments/prod.json deleted file mode 100644 index f16e37ee4..000000000 --- a/aoe-infra/infra/environments/prod.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "aws": { - "vpc_cidr": "10.5.128.0/18", - "availability_zones": 3 - } - -} \ No newline at end of file diff --git a/aoe-infra/infra/environments/qa.json b/aoe-infra/infra/environments/qa.json deleted file mode 100644 index 1919930a1..000000000 --- a/aoe-infra/infra/environments/qa.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "aws": { - "vpc_cidr": "10.5.64.0/18", - "availability_zones": 2 - } - -} \ No newline at end of file diff --git a/aoe-infra/infra/lib/alb-stack.ts b/aoe-infra/infra/lib/alb-stack.ts deleted file mode 100644 index a032d291f..000000000 --- a/aoe-infra/infra/lib/alb-stack.ts +++ /dev/null @@ -1,80 +0,0 @@ -// Create a new cdk stack with a public ALB and a security group for it. Add a https listener with http redirect rule to https. Create outputs for alb and listener. -import * as cdk from 'aws-cdk-lib'; -import * as elbv2 from 'aws-cdk-lib/aws-elasticloadbalancingv2'; -import * as ec2 from 'aws-cdk-lib/aws-ec2'; -import * as route53 from 'aws-cdk-lib/aws-route53'; -import * as targets from 'aws-cdk-lib/aws-route53-targets'; -import * as acm from 'aws-cdk-lib/aws-certificatemanager'; -import * as s3 from 'aws-cdk-lib/aws-s3'; -import * as log from 'aws-cdk-lib/aws-logs'; -import * as iam from 'aws-cdk-lib/aws-iam'; -import { Construct } from 'constructs'; - -interface AlbStackProps extends cdk.StackProps { - vpc: ec2.IVpc, - securityGroupId: string, -// domain: string -} - -export class AlbStack extends cdk.Stack { - readonly alb: elbv2.ApplicationLoadBalancer; - readonly albListener: elbv2.ApplicationListener; - constructor(scope: Construct, id: string, props: AlbStackProps) { - super(scope, id, props); - - // New internet-facing application load balancer, import vpc from the VpcStack - this.alb = new elbv2.ApplicationLoadBalancer(this, 'alb', { - vpc: props.vpc, - vpcSubnets: { - onePerAz: true, - subnetType: ec2.SubnetType.PUBLIC - }, - internetFacing: true, - http2Enabled: true, - securityGroup: ec2.SecurityGroup.fromSecurityGroupId( - this, - "ImmutableSecurityGroup", - props.securityGroupId, - { mutable: false } - ) - }); -/* Use this when an actual domain is available for ACM certs - // new alb listener - this.albListener = this.alb.addListener('alb-listener', { - port: 443, - protocol: elbv2.ApplicationProtocol.HTTPS, - open: false, -// certificates: [certificate], - sslPolicy: elbv2.SslPolicy.TLS12, - }); -*/ - - // create ALB default target group - const albDefaultTargetGroup = new elbv2.ApplicationTargetGroup(this, 'alb-target-group', { - vpc: props.vpc, - port: 80, - protocol: elbv2.ApplicationProtocol.HTTP, - targetType: elbv2.TargetType.IP, - healthCheck: { - healthyThresholdCount: 5, - interval: cdk.Duration.seconds(30), - path: '/', - protocol: elbv2.Protocol.HTTP, - timeout: cdk.Duration.seconds(5), - unhealthyThresholdCount: 2 - }, - }); - - // new alb listener (ditch this when an actual domain is available) - this.albListener = this.alb.addListener('alb-listener', { - port: 80, - protocol: elbv2.ApplicationProtocol.HTTP, - open: false, - }); - - this.albListener.addTargetGroups('dummyTargetGroup', { - targetGroups: [albDefaultTargetGroup] - }); - - } -} \ No newline at end of file diff --git a/aoe-infra/infra/lib/front-end-bucket-stack.ts b/aoe-infra/infra/lib/front-end-bucket-stack.ts deleted file mode 100644 index 2a1f4b20e..000000000 --- a/aoe-infra/infra/lib/front-end-bucket-stack.ts +++ /dev/null @@ -1,35 +0,0 @@ -import { Stack, StackProps } from 'aws-cdk-lib'; -import { Construct } from 'constructs'; -import * as s3 from 'aws-cdk-lib/aws-s3'; -import * as cloudfront from 'aws-cdk-lib/aws-cloudfront'; -import * as origins from 'aws-cdk-lib/aws-cloudfront-origins'; - - -interface FrontendBucketStackProps extends StackProps { - // domain: string - environment: string, - cloudFrontDistribution: cloudfront.Distribution, - } - -export class FrontendBucketStack extends Stack { - readonly bucket: s3.Bucket; - constructor(scope: Construct, id: string, props: FrontendBucketStackProps) { - super(scope, id, props); - -// FrontEnd S3 bucket - OAI does not support KMS - encryption - this.bucket = new s3.Bucket(this, 'FrontEndBucket', { - bucketName: `aoe-static-content-${props.environment}`, - enforceSSL: true, - // encryption: s3.BucketEncryption.KMS, - // encryptionKey: props.s3KmsKey, - }); - -// CloudFront OAI, Origin & behaviour - const s3oai = new cloudfront.OriginAccessIdentity(this, 'OAI'); - const s3origin = new origins.S3Origin(this.bucket, { originAccessIdentity: s3oai }); - - props.cloudFrontDistribution.addBehavior('/static/*', s3origin, { - viewerProtocolPolicy: cloudfront.ViewerProtocolPolicy.REDIRECT_TO_HTTPS - }); - } -} \ No newline at end of file diff --git a/aoe-infra/infra/lib/redis-stack.ts b/aoe-infra/infra/lib/redis-stack.ts deleted file mode 100644 index 7d22e6342..000000000 --- a/aoe-infra/infra/lib/redis-stack.ts +++ /dev/null @@ -1,78 +0,0 @@ - -import * as cdk from "aws-cdk-lib"; -import { Construct } from "constructs"; -import { aws_elasticache as ElastiCache } from "aws-cdk-lib"; -import * as ec2 from "aws-cdk-lib/aws-ec2"; -import * as secretsmanager from "aws-cdk-lib/aws-secretsmanager"; -import { IKey } from "aws-cdk-lib/aws-kms"; - -interface ElastiCacheServerlessStackProps extends cdk.StackProps { - vpc: ec2.IVpc; - elasticacheName: string; - secret: secretsmanager.Secret; - consumingServiceName: string; - redisKmsKeyId: string; - secretsManagerKmsKeyId: IKey; - securityGroupId: string; - redisMajorVersion: string; - storageMin: number; - storageMax: number; - minEcpuPerSecond: number; - maxEcpuPerSecond: number; -} - -export class ElasticacheServerlessStack extends cdk.Stack { - constructor(scope: Construct, id: string, props: ElastiCacheServerlessStackProps) { - super(scope, id, props); - - const redisUserName = "app" - - - const cfnUser = new ElastiCache.CfnUser(this, 'MyCfnUser', { - engine: "redis", - userId: redisUserName, - userName: redisUserName, - noPasswordRequired: false, - passwords: [props.secret.secretValueFromJson('secretkey').unsafeUnwrap()], - accessString: "on ~* +@all", - }); - - const cfnUserGroup = new ElastiCache.CfnUserGroup(this, 'MyCfnUserGroup', { - engine: 'redis', - userGroupId: `${props.elasticacheName}-userGroupId`.toLowerCase(), - userIds: [cfnUser.userId, "default"], - }); - - cfnUserGroup.node.addDependency(cfnUser); - - const elastiCacheSubnetIds : string[] = []; - for (const subnet of props.vpc.isolatedSubnets) { - elastiCacheSubnetIds.push(subnet.subnetId); - } - - - const elastiCacheServlerless = new ElastiCache.CfnServerlessCache(this, "ServerlessCache", { - engine: "redis", - serverlessCacheName: props.elasticacheName.toLowerCase(), - cacheUsageLimits: { - dataStorage: { - unit: "GB", - - maximum: props.storageMax, - minimum: props.storageMin, - }, - ecpuPerSecond: { - maximum: props.maxEcpuPerSecond, - minimum: props.minEcpuPerSecond, - }, - }, - kmsKeyId: props.redisKmsKeyId, - majorEngineVersion: props.redisMajorVersion, - securityGroupIds: [props.securityGroupId], - subnetIds: elastiCacheSubnetIds, - userGroupId: `${props.elasticacheName}-userGroupId`.toLowerCase(), - }); - - elastiCacheServlerless.node.addDependency(cfnUserGroup) - } -} \ No newline at end of file diff --git a/aoe-infra/infra/lib/secrets-manager-stack.ts b/aoe-infra/infra/lib/secrets-manager-stack.ts deleted file mode 100644 index c106eadeb..000000000 --- a/aoe-infra/infra/lib/secrets-manager-stack.ts +++ /dev/null @@ -1,30 +0,0 @@ -import * as cdk from "aws-cdk-lib"; -import { Construct } from "constructs"; -import * as secretsmanager from "aws-cdk-lib/aws-secretsmanager"; -import { IKey } from "aws-cdk-lib/aws-kms"; - -interface SecretManagerStackProps extends cdk.StackProps { - kmsKey: IKey; -} - -// Stack for secrets generated on the fly (mostly resources that don't support Parameter Store) - -export class SecretManagerStack extends cdk.Stack { - public readonly semanticApisPassword: secretsmanager.Secret; - constructor(scope: Construct, id: string, props: SecretManagerStackProps) { - super(scope, id, props); - - this.semanticApisPassword = new secretsmanager.Secret(this, 'secret', { - secretName: '/service/semantic-apis/REDIS_PASS', - generateSecretString: { - secretStringTemplate: JSON.stringify({ }), - generateStringKey: 'secretkey', - passwordLength: 16, - excludeCharacters: '@%*()_+=`~{}|[]\\:";\'?,./' - }, - }); - } -} - - - diff --git a/aoe-infra/infra/lib/security-groups.ts b/aoe-infra/infra/lib/security-groups.ts deleted file mode 100644 index 50ab35ff6..000000000 --- a/aoe-infra/infra/lib/security-groups.ts +++ /dev/null @@ -1,84 +0,0 @@ -import * as cdk from 'aws-cdk-lib/core'; -import * as ec2 from 'aws-cdk-lib/aws-ec2'; -import { CfnOutput, StackProps } from 'aws-cdk-lib'; -import { Construct } from 'constructs'; - -/* -Security Groups for resources are defined here. -Security Groups defined in a centralized manner like this won't generate -an "order of creation" - dependency between the services / databases, -but adds a little operational complexity (when creating a new -service / database, Security Groups and SG rules must first be defined here) -*/ - -interface SecurityGroupStackProps extends StackProps { - vpc: ec2.IVpc -} - -export class SecurityGroupStack extends cdk.Stack { - public readonly semanticApisServiceSecurityGroup: ec2.SecurityGroup; - public readonly albSecurityGroup: ec2.SecurityGroup; - public readonly testAuroraSecurityGroup: ec2.SecurityGroup; - public readonly semanticApisRedisSecurityGroup: ec2.SecurityGroup; - public readonly bastionSecurityGroup: ec2.SecurityGroup; - - constructor(scope: Construct, id: string, props: SecurityGroupStackProps) { - super(scope, id, props); - -// Security Groups - this.bastionSecurityGroup = new ec2.SecurityGroup(this, 'BastionSecurityGroup', { - vpc: props.vpc, - allowAllOutbound: true, - }); - - this.albSecurityGroup = new ec2.SecurityGroup(this, 'AlbSecurityGroup', { - vpc: props.vpc, - allowAllOutbound: true, - }); - - this.semanticApisServiceSecurityGroup = new ec2.SecurityGroup(this, 'SemanticApisServiceSecurityGroup', { - vpc: props.vpc, - allowAllOutbound: true, - }); - - this.testAuroraSecurityGroup = new ec2.SecurityGroup(this, 'AuroraTestSecurityGroup', { - vpc: props.vpc, - allowAllOutbound: true, - }); - - this.semanticApisRedisSecurityGroup = new ec2.SecurityGroup(this, 'SemanticApisRedisSecurityGroup', { - vpc: props.vpc, - allowAllOutbound: true, - }); - -// Security Group rules - - this.semanticApisRedisSecurityGroup.addIngressRule( - this.semanticApisServiceSecurityGroup, - ec2.Port.tcp(6379) - ); - this.semanticApisRedisSecurityGroup.addIngressRule( - this.bastionSecurityGroup, - ec2.Port.tcp(6379) - ); - this.semanticApisServiceSecurityGroup.addIngressRule( - this.albSecurityGroup, - ec2.Port.tcp(8080) - ); - this.semanticApisServiceSecurityGroup.addIngressRule( - this.bastionSecurityGroup, - ec2.Port.tcp(8080) - ); - // allow port 80 to alb albSecuritygroup from Internet - this.albSecurityGroup.addIngressRule( - ec2.Peer.anyIpv4(), - ec2.Port.tcp(80) - ); - -// Output all security group IDs so that they can be consumed from ECS service/RDS stacks - new CfnOutput(this, 'albSecurityGroupId', { value: this.albSecurityGroup.securityGroupId }); - new CfnOutput(this, 'semanticApisServiceSecurityGroup', { value: this.semanticApisServiceSecurityGroup.securityGroupId }); - new CfnOutput(this, 'testAuroraSecurityGroup2Id', { value: this.testAuroraSecurityGroup.securityGroupId }); - } - -} \ No newline at end of file diff --git a/aoe-infra/infra/lib/utility-stack.ts b/aoe-infra/infra/lib/utility-stack.ts deleted file mode 100644 index e1db7b6fc..000000000 --- a/aoe-infra/infra/lib/utility-stack.ts +++ /dev/null @@ -1,36 +0,0 @@ -import * as iam from 'aws-cdk-lib/aws-iam'; -import * as cdk from 'aws-cdk-lib'; -import { Construct } from 'constructs'; - -export class UtilityStack extends cdk.Stack { - public readonly githubActionsDeploymentRole: iam.Role; - constructor(scope: Construct, id: string, props?: cdk.StackProps) { - super(scope, id, props); - - -// Github Actions OIDC role - const githubOidcProvider = new iam.OpenIdConnectProvider(this, `OvaraUtilityGithubOidcProvider`, { - url: 'https://token.actions.githubusercontent.com', - thumbprints: [ - '6938fd4d98bab03faadb97b34396831e3780aea1', - '1c58a3a8518e8759bf075b76b750d4f2df264fcd' - ], - clientIds: ['sts.amazonaws.com'], - }); - - this.githubActionsDeploymentRole = new iam.Role(this, `AoeUtilityGithubActionsUser`, { - assumedBy: new iam.WebIdentityPrincipal( - githubOidcProvider.openIdConnectProviderArn, - { - StringLike: { - 'token.actions.githubusercontent.com:sub': 'repo:Opetushallitus/aoe:*', - 'token.actions.githubusercontent.com:aud': 'sts.amazonaws.com', - }, - }, - ), - roleName: 'aoe-utility-github-actions-deployment-role', - }); - - } -} - diff --git a/aoe-infra/infra/scripts/bastion_userdata.sh b/aoe-infra/infra/scripts/bastion_userdata.sh deleted file mode 100644 index 6ea0be962..000000000 --- a/aoe-infra/infra/scripts/bastion_userdata.sh +++ /dev/null @@ -1,8 +0,0 @@ -# Note: addUserData() automatically adds shebang at the start of the file. -mkfs -t xfs /dev/nvme1n1 -mkdir /data -mount /dev/nvme1n1 /data -chmod 770 /data -yum update -yum install -y htop python3-pip python3-wheel jq bash tmux nohup -amazon-linux-extras install redis6 \ No newline at end of file diff --git a/aoe-infra/infra/jest.config.js b/aoe-infra/jest.config.js similarity index 100% rename from aoe-infra/infra/jest.config.js rename to aoe-infra/jest.config.js diff --git a/aoe-infra/lib/S3Stack.ts b/aoe-infra/lib/S3Stack.ts new file mode 100644 index 000000000..6eab3d6cf --- /dev/null +++ b/aoe-infra/lib/S3Stack.ts @@ -0,0 +1,49 @@ +import * as cdk from 'aws-cdk-lib/core' +import { RemovalPolicy } from 'aws-cdk-lib/core' +import * as s3 from 'aws-cdk-lib/aws-s3' +import { BucketAccessControl, ObjectOwnership } from 'aws-cdk-lib/aws-s3' +import { Construct } from 'constructs' +import { StackProps } from "aws-cdk-lib"; + + +interface S3StackProps extends StackProps { + aoeBucketName: string, + aoePdfBucketName: string + aoeThumbnailBucketName: string + environment: string +} + +export class S3Stack extends cdk.Stack { + public readonly aoeBucket: s3.Bucket + public readonly aoePdfBucket: s3.Bucket + public readonly aoeThumbnailBucket: s3.Bucket + + constructor(scope: Construct, id: string, props: S3StackProps) { + super(scope, id, props) + + this.aoeBucket = this.newBucket(props.aoeBucketName, props) + this.aoePdfBucket = this.newBucket(props.aoePdfBucketName, props) + this.aoeThumbnailBucket = this.newBucket(props.aoeThumbnailBucketName, props) + } + + newBucket(bucketName: string, props: S3StackProps): s3.Bucket { + return new s3.Bucket(this, `${bucketName}Bucket`, { + bucketName: `${bucketName}-${props.environment}`, + accessControl: BucketAccessControl.PRIVATE, + blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, + versioned: true, // Required for taking backups + objectOwnership: ObjectOwnership.BUCKET_OWNER_PREFERRED, // Required for restoring backups + lifecycleRules: [ + { + id: 'ExpireOldVersions', + noncurrentVersionExpiration: cdk.Duration.days(30), // Retain old versions for 30 days + } + ], + removalPolicy: RemovalPolicy.RETAIN + }) + } + + allBuckets(): s3.Bucket[] { + return Object.values(this).flatMap((v) => (v instanceof s3.Bucket ? v : [])) + } +} diff --git a/aoe-infra/lib/alb-stack.ts b/aoe-infra/lib/alb-stack.ts new file mode 100644 index 000000000..ba816e73f --- /dev/null +++ b/aoe-infra/lib/alb-stack.ts @@ -0,0 +1,86 @@ +// Create a new cdk stack with a public ALB and a security group for it. Add a https listener with http redirect rule to https. Create outputs for alb and listener. +import * as cdk from 'aws-cdk-lib'; +import * as elbv2 from 'aws-cdk-lib/aws-elasticloadbalancingv2'; +import * as ec2 from 'aws-cdk-lib/aws-ec2'; +import * as route53 from 'aws-cdk-lib/aws-route53'; +import * as targets from 'aws-cdk-lib/aws-route53-targets'; +import * as acm from 'aws-cdk-lib/aws-certificatemanager'; +import { Construct } from 'constructs'; + +interface AlbStackProps extends cdk.StackProps { + vpc: ec2.IVpc, + securityGroupId: string, + domain: string + publicHostedZone: route53.IHostedZone +} + +export class AlbStack extends cdk.Stack { + readonly alb: elbv2.ApplicationLoadBalancer; + readonly albListener: elbv2.ApplicationListener; + readonly certificate: acm.ICertificate; + constructor(scope: Construct, id: string, props: AlbStackProps) { + super(scope, id, props); + + // New internet-facing application load balancer, import vpc from the VpcStack + this.alb = new elbv2.ApplicationLoadBalancer(this, 'alb', { + vpc: props.vpc, + vpcSubnets: { + onePerAz: true, + subnetType: ec2.SubnetType.PUBLIC + }, + internetFacing: true, + http2Enabled: true, + securityGroup: ec2.SecurityGroup.fromSecurityGroupId( + this, + "ImmutableSecurityGroup", + props.securityGroupId, + { mutable: false } + ) + }); + // Use this when an actual domain is available for ACM certs + // new alb listener + this.certificate = new acm.Certificate(this, 'Certificate', { + domainName: `${props.domain}`, + validation: acm.CertificateValidation.fromDns(props.publicHostedZone), + subjectAlternativeNames: [`alb.${props.domain}`], + }); + + + // route53 alias record for cloudfront + new route53.ARecord(this, 'AliasRecord', { + zone: props.publicHostedZone, + recordName: `alb.${props.domain}`, + target: route53.RecordTarget.fromAlias(new targets.LoadBalancerTarget(this.alb)), + }); + + this.albListener = this.alb.addListener('alb-listener', { + port: 443, + protocol: elbv2.ApplicationProtocol.HTTPS, + open: false, + certificates: [this.certificate], + sslPolicy: elbv2.SslPolicy.TLS12, + }); + + + // create ALB default target group + const albDefaultTargetGroup = new elbv2.ApplicationTargetGroup(this, 'alb-target-group', { + vpc: props.vpc, + port: 80, + protocol: elbv2.ApplicationProtocol.HTTP, + targetType: elbv2.TargetType.IP, + healthCheck: { + healthyThresholdCount: 5, + interval: cdk.Duration.seconds(30), + path: '/', + protocol: elbv2.Protocol.HTTP, + timeout: cdk.Duration.seconds(5), + unhealthyThresholdCount: 2 + }, + }); + + this.albListener.addTargetGroups('dummyTargetGroup', { + targetGroups: [albDefaultTargetGroup] + }); + + } +} diff --git a/aoe-infra/lib/aurora-serverless-common.ts b/aoe-infra/lib/aurora-serverless-common.ts new file mode 100644 index 000000000..f79f0340a --- /dev/null +++ b/aoe-infra/lib/aurora-serverless-common.ts @@ -0,0 +1,32 @@ +import * as cdk from 'aws-cdk-lib/core'; +import { StackProps } from 'aws-cdk-lib'; +import { Construct } from 'constructs'; +import { IVpc, SubnetType } from 'aws-cdk-lib/aws-ec2'; +import { SubnetGroup } from 'aws-cdk-lib/aws-rds'; + +/* + Resources that are shared between all Aurora Databases +*/ + +interface AuroraCommonStackProps extends StackProps { + vpc: IVpc +} + +export class AuroraCommonStack extends cdk.Stack { + public readonly auroraSubnetGroup: SubnetGroup; + + + + constructor(scope: Construct, id: string, props: AuroraCommonStackProps) { + super(scope, id, props); + + this.auroraSubnetGroup = new SubnetGroup(this, 'AuroraSubnetGroup', { + description: 'Aurora Subnet Group', + vpc: props.vpc, + vpcSubnets: { + onePerAz: false, + subnetType: SubnetType.PRIVATE_ISOLATED, + }, + }) + } +} \ No newline at end of file diff --git a/aoe-infra/infra/lib/aurora-serverless-database.ts b/aoe-infra/lib/aurora-serverless-database.ts similarity index 62% rename from aoe-infra/infra/lib/aurora-serverless-database.ts rename to aoe-infra/lib/aurora-serverless-database.ts index 64eb7a0c8..76fb3b2e4 100644 --- a/aoe-infra/infra/lib/aurora-serverless-database.ts +++ b/aoe-infra/lib/aurora-serverless-database.ts @@ -1,32 +1,31 @@ -import { Stack, StackProps, Fn, Duration, SecretValue } from 'aws-cdk-lib'; +import { Stack, StackProps } from 'aws-cdk-lib'; import { Construct } from 'constructs'; -import { AuroraPostgresEngineVersion, DatabaseCluster, DatabaseClusterEngine, ClusterInstance, DBClusterStorageType, SubnetGroup, ParameterGroup, CaCertificate } from 'aws-cdk-lib/aws-rds'; -import { HostedZone, CnameRecord } from 'aws-cdk-lib/aws-route53' -import { IVpc, ISecurityGroup, SubnetType } from 'aws-cdk-lib/aws-ec2'; +import { AuroraPostgresEngineVersion, DatabaseCluster, DatabaseClusterEngine, ClusterInstance, DBClusterStorageType, SubnetGroup, ParameterGroup, CaCertificate, Endpoint } from 'aws-cdk-lib/aws-rds'; +import { IVpc, ISecurityGroup } from 'aws-cdk-lib/aws-ec2'; import { Key } from 'aws-cdk-lib/aws-kms'; +import { Secret } from 'aws-cdk-lib/aws-secretsmanager'; interface AuroraDatabaseProps extends StackProps { environment: string; auroraVersion: string; - route53HostedZone: string; clusterName: string; minSizeAcu: number; maxSizeAcu: number; performanceInsights: boolean; - domainNames: string[]; vpc: IVpc, securityGroup: ISecurityGroup, kmsKey: Key, -}; + auroraDbPassword: Secret; + subnetGroup: SubnetGroup; +} export class AuroraDatabaseStack extends Stack { + public readonly endPoint: Endpoint; constructor(scope: Construct, id: string, props: AuroraDatabaseProps) { super(scope, id, props); - const hostedZone = HostedZone.fromLookup(this, "HostedZone", { - domainName: props.route53HostedZone, - }) + const parameterGroup = new ParameterGroup(this, 'parameterGroup', { - engine: DatabaseClusterEngine.auroraPostgres({version: AuroraPostgresEngineVersion.of(props.auroraVersion, props.auroraVersion.split('.')[0])}), + engine: DatabaseClusterEngine.auroraPostgres({ version: AuroraPostgresEngineVersion.of(props.auroraVersion, props.auroraVersion.split('.')[0]) }), parameters: { 'shared_preload_libraries': 'pg_stat_statements,pg_hint_plan,auto_explain,pg_cron', 'auto_explain.log_analyze': '1', @@ -47,18 +46,18 @@ export class AuroraDatabaseStack extends Stack { const auroraCluster = new DatabaseCluster(this, `${props.environment}-${props.clusterName}`, { vpc: props.vpc, - engine: DatabaseClusterEngine.auroraPostgres({version: AuroraPostgresEngineVersion.of(props.auroraVersion, props.auroraVersion.split('.')[0])}), + engine: DatabaseClusterEngine.auroraPostgres({ version: AuroraPostgresEngineVersion.of(props.auroraVersion, props.auroraVersion.split('.')[0]) }), writer: ClusterInstance.serverlessV2('writer', { enablePerformanceInsights: props.performanceInsights, caCertificate: CaCertificate.RDS_CA_RSA4096_G1 }), - readers: props.environment === 'prod'? [ + readers: props.environment === 'prod' ? [ ClusterInstance.serverlessV2('reader', { enablePerformanceInsights: props.performanceInsights, caCertificate: CaCertificate.RDS_CA_RSA4096_G1, scaleWithWriter: true }), - ]: [], + ] : [], clusterIdentifier: `${props.environment}-${props.clusterName}`, serverlessV2MinCapacity: props.minSizeAcu, serverlessV2MaxCapacity: props.maxSizeAcu, @@ -69,19 +68,13 @@ export class AuroraDatabaseStack extends Stack { storageEncryptionKey: props.kmsKey, deletionProtection: true, securityGroups: [props.securityGroup], - vpcSubnets: SubnetType.PRIVATE_ISOLATED, + subnetGroup: props.subnetGroup, credentials: { - username: SecretValue.ssmSecure(`/auroradbs/${props.clusterName}/master-user-username`) - password: SecretValue.ssmSecure(`/auroradbs/${props.clusterName}/master-user-password`) + username: 'aoe_db_admin', + password: props.auroraDbPassword.secretValueFromJson('password') } }) - for (let i in props.domainNames) { - new CnameRecord(this, `${props.environment}-cname-${props.domainNames[i].replace('.', '-')}`, { - domainName: auroraCluster.clusterEndpoint.hostname, - zone: hostedZone, - recordName: `${props.domainNames[i]}.db.${props.route53HostedZone}`, - ttl: Duration.minutes(1) - }) - } + + this.endPoint = auroraCluster.clusterEndpoint } -} \ No newline at end of file +} diff --git a/aoe-infra/infra/lib/bastion-stack.ts b/aoe-infra/lib/bastion-stack.ts similarity index 81% rename from aoe-infra/infra/lib/bastion-stack.ts rename to aoe-infra/lib/bastion-stack.ts index f96102fc7..7c4f6ef19 100644 --- a/aoe-infra/infra/lib/bastion-stack.ts +++ b/aoe-infra/lib/bastion-stack.ts @@ -30,7 +30,20 @@ export class BastionStack extends Stack { 'ssm:GetDeployablePatchSnapshotForInstance', 'ssm:GetDocument', 'ssm:UpdateInstanceAssociationStatus', - 'ssm:PutInventory' + 'ssm:PutInventory', + 'aoss:DescribeCollectionItems', + 'aoss:ListCollections', + 'aoss:CreateIndex', + 'aoss:DeleteIndex', + 'aoss:UpdateIndex', + 'aoss:DescribeIndex', + 'aoss:ReadDocument', + 'aoss:WriteDocument', + 'kafka:GetBootstrapBrokers', + 'elasticfilesystem:DescribeFileSystems', + 'elasticfilesystem:ClientWrite', + 'elasticfilesystem:ClientMount', + 'elasticfilesystem:DescribeMountTargets' ], resources: ['*'] }) @@ -61,7 +74,6 @@ export class BastionStack extends Stack { const userDataScript = readFileSync('./scripts/bastion_userdata.sh', 'utf8'); -// bastionHost.instance.addUserData('yum install -y mariadb105 mongodb-mongosh-shared-openssl3 mongodb-database-tools postgresql15 redis7 htop python3-pip python3-wheel jq bash tmux nohup'); bastionHost.instance.addUserData(userDataScript); bastionHost.role.addToPrincipalPolicy(allowSessionManager) bastionHost.role.addToPrincipalPolicy(allowKmsDecrypt(props.kmsKey.keyArn)) diff --git a/aoe-infra/lib/cloudfront-certificate-stack.ts b/aoe-infra/lib/cloudfront-certificate-stack.ts new file mode 100644 index 000000000..2860042d5 --- /dev/null +++ b/aoe-infra/lib/cloudfront-certificate-stack.ts @@ -0,0 +1,23 @@ +import * as acm from 'aws-cdk-lib/aws-certificatemanager'; +import * as cdk from 'aws-cdk-lib'; +import * as route53 from 'aws-cdk-lib/aws-route53'; +import { Construct } from 'constructs'; + +interface CloudFrontCertificateStackProps extends cdk.StackProps { + domain: string + hostedZone: route53.IHostedZone +} + +export class CloudFrontCertificateStack extends cdk.Stack { + readonly certificate: acm.ICertificate; + constructor(scope: Construct, id: string, props: CloudFrontCertificateStackProps) { + super(scope, id, props); + + + this.certificate = new acm.Certificate(this, 'CloudfrontCertificate', { + domainName: props.domain, + validation: acm.CertificateValidation.fromDns(props.hostedZone), + }); + + } +} diff --git a/aoe-infra/infra/lib/cloudfront-stack.ts b/aoe-infra/lib/cloudfront-stack.ts similarity index 57% rename from aoe-infra/infra/lib/cloudfront-stack.ts rename to aoe-infra/lib/cloudfront-stack.ts index 772d69591..48e4eae20 100644 --- a/aoe-infra/infra/lib/cloudfront-stack.ts +++ b/aoe-infra/lib/cloudfront-stack.ts @@ -4,44 +4,52 @@ import { Construct } from 'constructs'; import * as cloudfront from 'aws-cdk-lib/aws-cloudfront'; import * as origins from 'aws-cdk-lib/aws-cloudfront-origins'; import * as elbv2 from 'aws-cdk-lib/aws-elasticloadbalancingv2'; -// import * as route53 from 'aws-cdk-lib/aws-route53'; -// import * as targets from 'aws-cdk-lib/aws-route53-targets'; +import * as route53 from 'aws-cdk-lib/aws-route53'; +import * as targets from 'aws-cdk-lib/aws-route53-targets'; +import * as acm from 'aws-cdk-lib/aws-certificatemanager'; interface CloudfrontStackProps extends StackProps { -// domain: string + domain: string + publicHostedZone: route53.IHostedZone // environment: string, alb: elbv2.ILoadBalancerV2 + certificate: acm.ICertificate // bucket: s3.Bucket, } export class CloudfrontStack extends Stack { readonly distribution: cloudfront.Distribution; + readonly certificate: acm.Certificate; constructor(scope: Construct, id: string, props: CloudfrontStackProps) { super(scope, id, props); -// // new certificate - // const certificate = new acm.Certificate(this, 'Certificate', { - // domainName: props.domain, - // validation: acm.CertificateValidation.fromDns(), - // }); - + // Cloudfront Distribution this.distribution = new cloudfront.Distribution(this, 'Distribution', { + domainNames: [props.domain], + certificate: props.certificate, defaultBehavior: { - origin: new origins.LoadBalancerV2Origin(props.alb), + origin: new origins.LoadBalancerV2Origin(props.alb, { + protocolPolicy: cloudfront.OriginProtocolPolicy.HTTPS_ONLY + }), viewerProtocolPolicy: cloudfront.ViewerProtocolPolicy.REDIRECT_TO_HTTPS, + allowedMethods: cloudfront.AllowedMethods.ALLOW_ALL, + cachePolicy: cloudfront.CachePolicy.CACHING_DISABLED, + originRequestPolicy: cloudfront.OriginRequestPolicy.ALL_VIEWER, }, }) + // route53 alias record for cloudfront + new route53.ARecord(this, 'AliasRecord', { + zone: props.publicHostedZone, + recordName: props.domain, + target: route53.RecordTarget.fromAlias(new targets.CloudFrontTarget(this.distribution)), + }); + // this.distribution.addBehavior('/static/*', s3origin, { viewerProtocolPolicy: cloudfront.ViewerProtocolPolicy.REDIRECT_TO_HTTPS}); - // route53 alias record for cloudfront - // new route53.ARecord(this, 'AliasRecord', { - // zone: route53.HostedZone.fromLookup(this, 'Domain', { domainName: props.domain }), - // target: route53.RecordTarget.fromAlias(new targets.CloudFrontTarget(distribution)), - // }); // Add s3 bucket as a new origin for the CloudFront distribution // this.distribution.addBehavior('/static/*', new origins.S3Origin(props.bucket ),{ diff --git a/aoe-infra/lib/documentdb-stack.ts b/aoe-infra/lib/documentdb-stack.ts new file mode 100644 index 000000000..8ac85851c --- /dev/null +++ b/aoe-infra/lib/documentdb-stack.ts @@ -0,0 +1,42 @@ +import * as cdk from "aws-cdk-lib"; +import { Construct } from "constructs"; +import { InstanceType, IVpc, SecurityGroup } from "aws-cdk-lib/aws-ec2"; +import { DatabaseCluster, Endpoint } from "aws-cdk-lib/aws-docdb"; +import { Secret } from "aws-cdk-lib/aws-secretsmanager"; +import { Key } from "aws-cdk-lib/aws-kms"; + +interface DocumentDbStackProps extends cdk.StackProps { + instances: number; + engineVersion: string; + vpc: IVpc; + securityGroup: SecurityGroup; + env: { region: string }; + user: Secret, + kmsKey: Key, + instanceType: InstanceType +} +export class DocumentdbStack extends cdk.Stack { + private docdbcluster: DatabaseCluster; + public readonly clusterEndpoint: Endpoint; + + constructor(scope: Construct, id: string, props: DocumentDbStackProps) { + super(scope, id, props) + this.docdbcluster = new DatabaseCluster(this, 'AoeDocumentDB', { + masterUser: { + username: 'docdbuser', + password: props.user.secretValueFromJson('password') + }, + engineVersion: props.engineVersion, + instances: props.instances, + instanceType: props.instanceType, + vpc: props.vpc, + vpcSubnets: {subnets: props.vpc.isolatedSubnets }, + securityGroup: props.securityGroup, + deletionProtection: true, + kmsKey: props.kmsKey + }) + + this.clusterEndpoint = this.docdbcluster.clusterEndpoint + + } +} \ No newline at end of file diff --git a/aoe-infra/infra/lib/ecr-stack.ts b/aoe-infra/lib/ecr-stack.ts similarity index 100% rename from aoe-infra/infra/lib/ecr-stack.ts rename to aoe-infra/lib/ecr-stack.ts diff --git a/aoe-infra/infra/lib/ecs-service.ts b/aoe-infra/lib/ecs-service.ts similarity index 59% rename from aoe-infra/infra/lib/ecs-service.ts rename to aoe-infra/lib/ecs-service.ts index dcc66cbbc..06d387cfa 100644 --- a/aoe-infra/infra/lib/ecs-service.ts +++ b/aoe-infra/lib/ecs-service.ts @@ -1,19 +1,26 @@ import * as _ from "lodash" -import { Stack, StackProps, Duration, Fn, CfnOutput, SecretValue, RemovalPolicy } from "aws-cdk-lib" +import { Stack, StackProps, Duration, CfnOutput, RemovalPolicy } from "aws-cdk-lib" import { Construct } from "constructs" import { LogGroup } from "aws-cdk-lib/aws-logs" import { ICluster, ContainerImage, AwsLogDriver, Secret, FargatePlatformVersion, CpuArchitecture, OperatingSystemFamily, UlimitName, FargateService, TaskDefinition, Compatibility } from "aws-cdk-lib/aws-ecs" import { IVpc, ISecurityGroup } from "aws-cdk-lib/aws-ec2" import { ApplicationListenerRule, ApplicationProtocol, ApplicationTargetGroup, IApplicationListener, IApplicationLoadBalancer, ListenerCondition, TargetGroupLoadBalancingAlgorithmType } from "aws-cdk-lib/aws-elasticloadbalancingv2" import { Repository } from "aws-cdk-lib/aws-ecr" -import { StringParameter } from "aws-cdk-lib/aws-ssm" import { AdjustmentType } from "aws-cdk-lib/aws-autoscaling" import * as ssm from "aws-cdk-lib/aws-ssm" import * as secretsmanager from "aws-cdk-lib/aws-secretsmanager" +import * as iam from "aws-cdk-lib/aws-iam"; +import * as servicediscovery from 'aws-cdk-lib/aws-servicediscovery'; +import { PrivateDnsNamespace } from "aws-cdk-lib/aws-servicediscovery"; +import { Volume } from "aws-cdk-lib/aws-ecs/lib/base/task-definition"; +import { MountPoint } from "aws-cdk-lib/aws-ecs/lib/container-definition"; +import { SecretEntry } from "./secrets-manager-stack"; interface EcsServiceStackProps extends StackProps { environment: string + // Allow any in this case, since we don't want to explicitely type json data + /* eslint-disable @typescript-eslint/no-explicit-any */ env_vars: any cluster: ICluster vpc: IVpc @@ -29,8 +36,7 @@ interface EcsServiceStackProps extends StackProps { imageTag: string allowEcsExec: boolean parameter_store_secrets: string[] - secrets_manager_secrets: any -// parameter_store_parameters: string[] + secrets_manager_secrets: SecretEntry[] cpuArchitecture: CpuArchitecture minimumCount: number maximumCount: number @@ -38,26 +44,17 @@ interface EcsServiceStackProps extends StackProps { healthCheckInterval: number healthCheckTimeout: number securityGroup: ISecurityGroup + iAmPolicyStatements?: iam.PolicyStatement[] + privateDnsNamespace: PrivateDnsNamespace + efs?: { + mountPoint: MountPoint + volume: Volume + } } export class EcsServiceStack extends Stack { constructor(scope: Construct, id: string, props: EcsServiceStackProps) { super(scope, id, props) - // if (this.node.tryGetContext("ImageTag") == undefined) { - // console.error("You must define an ImageTag (ga-xxx), latest will not exist") - // process.exit(1) - // } - - // const utilityAccountId = ssm.StringParameter.fromSecureStringParameterAttributes( - // this, - // 'UtilityAccountIdParameter', - // { - // version: 0, - // parameterName: `/aoe/utility_account_id`, - // }) - - // const utilityAccountId = SecretValue.ssmSecure('/aoe/utility_account_id').unsafeUnwrap() - const ImageRepository = Repository.fromRepositoryAttributes( this, @@ -67,87 +64,58 @@ export class EcsServiceStack extends Stack { }) - const ServiceLogGroup = new LogGroup( this, "LogGroup", { + const ServiceLogGroup = new LogGroup(this, "LogGroup", { logGroupName: `/service/${props.serviceName}`, removalPolicy: RemovalPolicy.DESTROY, }) - // const secretsManagerSecrets = apikey: ecs.Secret.fromSecretsManager(secret), - - // const parameterStoreSecrets = props.parameter_store_secrets.reduce( - // (secrets, secretName) => - // Object.assign(secrets, { - // [secretName]: Secret.fromSsmParameter( - // StringParameter.fromSecureStringParameterAttributes( - // this, - // `${_.upperFirst(_.camelCase(secretName))}Parameter`, - // { - // version: 0, - // parameterName: `/service/${props.serviceName}/${secretName}`, - // } - // ) - // ), - // }), - // {} - // ), - const secrets = { // SSM Parameter Store secure strings ...props.parameter_store_secrets.reduce((secretsAcc, secretName) => { - const ssmParameter = ssm.StringParameter.fromSecureStringParameterAttributes( - this, - `${_.upperFirst(_.camelCase(secretName))}Parameter`, - { - version: 0, - parameterName: `/service/${props.serviceName}/${secretName}`, - } - ); - return Object.assign(secretsAcc, { - [secretName]: Secret.fromSsmParameter(ssmParameter), - }); + const ssmParameter = ssm.StringParameter.fromSecureStringParameterAttributes( + this, + `${_.upperFirst(_.camelCase(secretName))}Parameter`, + { + version: 0, + parameterName: `/service/${props.serviceName}/${secretName}`, + } + ); + return Object.assign(secretsAcc, { + [secretName]: Secret.fromSsmParameter(ssmParameter), + }); }, {}), - ...props.secrets_manager_secrets.reduce((secretsAcc, secretName) => { + ...(props.secrets_manager_secrets || []).reduce((secretsAcc, se) => { + const secret = secretsmanager.Secret.fromSecretNameV2( - this, - `${_.upperFirst(_.camelCase(secretName))}Secret`, - `/service/${props.serviceName}/${secretName}` + this, + `${_.upperFirst(_.camelCase(se.path))}Secret`, + se.path ); + return Object.assign(secretsAcc, { - [secretName]: Secret.fromSecretsManager(secret, "secretkey"), + [se.envVarName]: Secret.fromSecretsManager(secret, se.secretKey), }); - }, {}), - }; - - - // const env_vars = { - // // SSM Parameter Store plain text parameters - // ...props.parameter_store_parameters.reduce((parameterAcc, parameterName) => { - // // const ssmParameter = ssm.StringParameter.fromStringParameterAttributes( - // // this, - // // `${_.upperFirst(_.camelCase(parameterName))}Parameter`, - // // { - // // version: 0, - // // parameterName: `/service/${props.serviceName}/${parameterName}`, - // // } - // // ); - // return Object.assign(parameterAcc, { - // [parameterName]: ssm.StringParameter.valueForStringParameter( - // this, `/service/${props.serviceName}/${parameterName}`), - // }); - // }, {}), - // }; - + }, {}) + }; const taskDefinition = new TaskDefinition(this, `${props.serviceName}`, { cpu: props.taskCpu, memoryMiB: props.taskMemory, compatibility: Compatibility.FARGATE, runtimePlatform: { - cpuArchitecture:props.cpuArchitecture, + cpuArchitecture: props.cpuArchitecture, operatingSystemFamily: OperatingSystemFamily.LINUX - }, + } }) - taskDefinition.addContainer(`${props.serviceName}`, { + + if (props.iAmPolicyStatements && Array.isArray(props.iAmPolicyStatements)) { + props.iAmPolicyStatements.forEach(statement => { + taskDefinition.addToTaskRolePolicy(statement); + }); + } + + const container = taskDefinition.addContainer(`${props.serviceName}`, { + image: ContainerImage.fromEcrRepository( ImageRepository, props.imageTag @@ -168,6 +136,12 @@ export class EcsServiceStack extends Stack { } ] }) + + if (props.efs) { + taskDefinition.addVolume(props.efs.volume); + container.addMountPoints(props.efs.mountPoint); + } + const ecsService = new FargateService( this, "EcsFargateService", @@ -177,9 +151,15 @@ export class EcsServiceStack extends Stack { taskDefinition, platformVersion: FargatePlatformVersion.LATEST, healthCheckGracePeriod: Duration.seconds(props.healthCheckGracePeriod), - enableExecuteCommand: props.allowEcsExec? true: false, + enableExecuteCommand: props.allowEcsExec, circuitBreaker: { rollback: true }, - securityGroups: [props.securityGroup] + securityGroups: [props.securityGroup], + cloudMapOptions: { + name: props.serviceName, + cloudMapNamespace: props.privateDnsNamespace, + dnsRecordType: servicediscovery.DnsRecordType.A, + dnsTtl: Duration.seconds(15), + }, } ) @@ -189,7 +169,7 @@ export class EcsServiceStack extends Stack { conditions: [ ListenerCondition.pathPatterns(props.listenerPathPatterns) ], - targetGroups: [ new ApplicationTargetGroup(this, `${props.serviceName}TargetGroup`, { + targetGroups: [new ApplicationTargetGroup(this, `${props.serviceName}TargetGroup`, { targets: [ecsService], vpc: props.vpc, healthCheck: { @@ -221,5 +201,9 @@ export class EcsServiceStack extends Stack { adjustmentType: AdjustmentType.CHANGE_IN_CAPACITY, cooldown: Duration.minutes(3), }) + + new CfnOutput(this, 'ServiceDiscoveryName', { + value: `${props.serviceName}.${props.privateDnsNamespace.namespaceName}`, + }); } -} \ No newline at end of file +} diff --git a/aoe-infra/lib/efs-stack.ts b/aoe-infra/lib/efs-stack.ts new file mode 100644 index 000000000..dd53fecc7 --- /dev/null +++ b/aoe-infra/lib/efs-stack.ts @@ -0,0 +1,50 @@ +import { CfnOutput, Stack, StackProps } from "aws-cdk-lib"; +import * as efs from 'aws-cdk-lib/aws-efs'; +import { Construct } from "constructs"; +import { ISecurityGroup, IVpc } from "aws-cdk-lib/aws-ec2"; +import { AccessPoint, ThroughputMode } from "aws-cdk-lib/aws-efs"; + +interface EfsStackProps extends StackProps { + securityGroup: ISecurityGroup; + vpc: IVpc + accessPointPath: string, + throughputMode: ThroughputMode +} + +export class EfsStack extends Stack { + public readonly fileSystem: efs.FileSystem; + public readonly fileSystemId: string; + public readonly accessPoint: AccessPoint; + + constructor(scope: Construct, id: string, props: EfsStackProps) { + super(scope, id, props); + + this.fileSystem = new efs.FileSystem(this, 'AOEEfs', { + vpc: props.vpc, + lifecyclePolicy: efs.LifecyclePolicy.AFTER_30_DAYS, + performanceMode: efs.PerformanceMode.GENERAL_PURPOSE, + throughputMode: props.throughputMode, + securityGroup: props.securityGroup, + }); + + this.accessPoint = new AccessPoint(this, 'AccessPoint', { + fileSystem: this.fileSystem, + path: props.accessPointPath, + createAcl: { + ownerGid: "1000", + ownerUid: "1000", + permissions: "755" + }, + posixUser: { + uid: "1000", + gid: "1000" + } + }); + + this.fileSystemId = this.fileSystem.fileSystemId + + new CfnOutput(this, 'FileSystemId', { + value: this.fileSystem.fileSystemId, + }); + } +} \ No newline at end of file diff --git a/aoe-infra/infra/lib/fargate-cluster-stack.ts b/aoe-infra/lib/fargate-cluster-stack.ts similarity index 83% rename from aoe-infra/infra/lib/fargate-cluster-stack.ts rename to aoe-infra/lib/fargate-cluster-stack.ts index 87da34687..b9140b208 100644 --- a/aoe-infra/infra/lib/fargate-cluster-stack.ts +++ b/aoe-infra/lib/fargate-cluster-stack.ts @@ -1,6 +1,6 @@ -import { Stack, StackProps, RemovalPolicy } from 'aws-cdk-lib'; -import { IVpc, Vpc } from 'aws-cdk-lib/aws-ec2'; -import { IKey, Key } from 'aws-cdk-lib/aws-kms'; +import { Stack, StackProps } from 'aws-cdk-lib'; +import { IVpc } from 'aws-cdk-lib/aws-ec2'; +import { Key } from 'aws-cdk-lib/aws-kms'; import { Cluster, ExecuteCommandLogging, ICluster } from 'aws-cdk-lib/aws-ecs'; import { LogGroup } from 'aws-cdk-lib/aws-logs'; import { Construct } from 'constructs'; @@ -22,7 +22,7 @@ export class FargateClusterStack extends Stack { // }); // add a new log group - const EcsExecLogGroup = new LogGroup(this, "EcsExecLogGroup", { + new LogGroup(this, "EcsExecLogGroup", { logGroupName: `${props.environment}-ecs-exec-audit`, encryptionKey: props.logGroupKmsKey, }); @@ -41,4 +41,4 @@ export class FargateClusterStack extends Stack { } }); } -} \ No newline at end of file +} diff --git a/aoe-infra/lib/front-end-bucket-stack.ts b/aoe-infra/lib/front-end-bucket-stack.ts new file mode 100644 index 000000000..663d98027 --- /dev/null +++ b/aoe-infra/lib/front-end-bucket-stack.ts @@ -0,0 +1,35 @@ +import { Stack, StackProps } from 'aws-cdk-lib'; +import { Construct } from 'constructs'; +import * as s3 from 'aws-cdk-lib/aws-s3'; +import * as cloudfront from 'aws-cdk-lib/aws-cloudfront'; +import * as origins from 'aws-cdk-lib/aws-cloudfront-origins'; + + +interface FrontendBucketStackProps extends StackProps { + // domain: string + environment: string, + cloudFrontDistribution: cloudfront.Distribution, +} + +export class FrontendBucketStack extends Stack { + readonly bucket: s3.Bucket; + constructor(scope: Construct, id: string, props: FrontendBucketStackProps) { + super(scope, id, props); + + // FrontEnd S3 bucket - OAI does not support KMS - encryption + this.bucket = new s3.Bucket(this, 'FrontEndBucket', { + bucketName: `aoe-static-content-${props.environment}`, + enforceSSL: true, + // encryption: s3.BucketEncryption.KMS, + // encryptionKey: props.s3KmsKey, + }); + + // CloudFront OAI, Origin & behaviour + const s3oai = new cloudfront.OriginAccessIdentity(this, 'OAI'); + const s3origin = new origins.S3Origin(this.bucket, { originAccessIdentity: s3oai }); + + props.cloudFrontDistribution.addBehavior('/static/*', s3origin, { + viewerProtocolPolicy: cloudfront.ViewerProtocolPolicy.REDIRECT_TO_HTTPS + }); + } +} \ No newline at end of file diff --git a/aoe-infra/infra/lib/front-end-content-deployment-stack.ts b/aoe-infra/lib/front-end-content-deployment-stack.ts similarity index 52% rename from aoe-infra/infra/lib/front-end-content-deployment-stack.ts rename to aoe-infra/lib/front-end-content-deployment-stack.ts index 0ef90ea24..8226f895a 100644 --- a/aoe-infra/infra/lib/front-end-content-deployment-stack.ts +++ b/aoe-infra/lib/front-end-content-deployment-stack.ts @@ -8,21 +8,21 @@ import * as path from 'path'; interface FrontendStaticContentDeploymentStackProps extends StackProps { // domain: string - environment: string, - cloudFrontDistribution: cloudfront.Distribution, - bucket: s3.Bucket, - } - + environment: string, + cloudFrontDistribution: cloudfront.Distribution, + bucket: s3.Bucket, +} + export class FrontendStaticContentDeploymentStack extends Stack { constructor(scope: Construct, id: string, props: FrontendStaticContentDeploymentStackProps) { - super(scope, id, props); + super(scope, id, props); - new s3deploy.BucketDeployment(this, 'XXXXXX', { - sources: [s3deploy.Source.asset(path.join(__dirname, '../frontend/dist'))], - destinationBucket: props.bucket, - destinationKeyPrefix: 'static', - distribution: props.cloudFrontDistribution, - distributionPaths: ['/static/*'], - }); + new s3deploy.BucketDeployment(this, 'XXXXXX', { + sources: [s3deploy.Source.asset(path.join(__dirname, '../frontend/dist'))], + destinationBucket: props.bucket, + destinationKeyPrefix: 'static', + distribution: props.cloudFrontDistribution, + distributionPaths: ['/static/*'], + }); } -} \ No newline at end of file +} diff --git a/aoe-infra/lib/githubActionsStack.ts b/aoe-infra/lib/githubActionsStack.ts new file mode 100644 index 000000000..26db26926 --- /dev/null +++ b/aoe-infra/lib/githubActionsStack.ts @@ -0,0 +1,51 @@ +import * as cdk from 'aws-cdk-lib' +import { Construct } from 'constructs' +import * as iam from 'aws-cdk-lib/aws-iam' +import { StackProps } from "aws-cdk-lib"; + +interface CommonStackProps extends StackProps { + environment: string +} + +export class GithubActionsStack extends cdk.Stack { + public githubActionsRole: iam.Role + + constructor(scope: Construct, id: string, props: CommonStackProps) { + super(scope, id, props) + + const githubOidcProvider = new iam.OpenIdConnectProvider(this, `GithubOidcProvider`, { + url: 'https://token.actions.githubusercontent.com', + thumbprints: [ + '6938fd4d98bab03faadb97b34396831e3780aea1', + '1c58a3a8518e8759bf075b76b750d4f2df264fcd' + ], + clientIds: ['sts.amazonaws.com'], + }); + + this.githubActionsRole = new iam.Role(this, `AoeGithubActionsUser`, { + assumedBy: new iam.WebIdentityPrincipal( + githubOidcProvider.openIdConnectProviderArn, + { + StringLike: { + 'token.actions.githubusercontent.com:sub': 'repo:Opetushallitus/aoe:*', + 'token.actions.githubusercontent.com:aud': 'sts.amazonaws.com', + }, + }, + ), + roleName: `aoe-github-actions-deployment-role-${props.environment}`, + }); + + const cdkPolicyStatement = new iam.PolicyStatement({ + actions: [ 'sts:AssumeRole', 'iam:PassRole'], + resources: [ + "arn:aws:iam::*:role/cdk-hnb659fds-deploy-role-*", + "arn:aws:iam::*:role/cdk-hnb659fds-file-publishing-*", + "arn:aws:iam::*:role/cdk-hnb659fds-image-publishing-*", + "arn:aws:iam::*:role/cdk-hnb659fds-lookup-*", + "arn:aws:iam::*:role/cdk-hnb659fds-cfn-exec-*" + ] + }) + this.githubActionsRole.addToPolicy(cdkPolicyStatement) + + } +} diff --git a/aoe-infra/lib/hosted-zone-stack.ts b/aoe-infra/lib/hosted-zone-stack.ts new file mode 100644 index 000000000..20dac7e88 --- /dev/null +++ b/aoe-infra/lib/hosted-zone-stack.ts @@ -0,0 +1,31 @@ +import * as cdk from 'aws-cdk-lib/core'; +import { StackProps } from 'aws-cdk-lib'; +import { Construct } from 'constructs'; +import * as route53 from 'aws-cdk-lib/aws-route53'; +import { IVpc } from 'aws-cdk-lib/aws-ec2'; + + +interface HostedZoneStackProps extends StackProps { + domain: string, + vpc: IVpc, +} + +export class HostedZoneStack extends cdk.Stack { + public readonly publicHostedZone: route53.HostedZone; + public readonly privateHostedZone: route53.HostedZone; + + + constructor(scope: Construct, id: string, props: HostedZoneStackProps) { + super(scope, id, props); + + this.publicHostedZone = new route53.PublicHostedZone(this, 'PublicHostedZone', { + zoneName: props.domain + }); + + this.privateHostedZone = new route53.PrivateHostedZone(this, 'PrivateHostedZone', { + zoneName: props.domain, + vpc: props.vpc + }); + + } +} \ No newline at end of file diff --git a/aoe-infra/infra/lib/kms-stack.ts b/aoe-infra/lib/kms-stack.ts similarity index 73% rename from aoe-infra/infra/lib/kms-stack.ts rename to aoe-infra/lib/kms-stack.ts index 4dd8aa999..4dcfabddc 100644 --- a/aoe-infra/infra/lib/kms-stack.ts +++ b/aoe-infra/lib/kms-stack.ts @@ -16,10 +16,28 @@ export class KmsStack extends Stack { readonly ebsKmsKey: Key; readonly parameterStoreKey: Key; readonly secretsManagerKey: Key; + readonly openSearchKmsKey: Key; + readonly documentDbKmsKey: Key; + readonly mskKmsKey: Key; + constructor(scope: Construct, id: string, props: KmsStackProps) { super(scope, id, props); -// Change removal policies to something sensible when stacks are more mature + // Change removal policies to something sensible when stacks are more mature + this.mskKmsKey = new Key(this, 'mskKmsKey', { + alias: `alias/${props.environment}-msk-aoe-key`, + removalPolicy: RemovalPolicy.DESTROY, + }); + + this.openSearchKmsKey = new Key(this, 'openSearchKmsKey', { + alias: `alias/${props.environment}-opensearch-aoe-key`, + removalPolicy: RemovalPolicy.DESTROY, + }); + + this.documentDbKmsKey = new Key(this, 'documentDbKmsKey', { + alias: `alias/${props.environment}-documentdb-aoe-key`, + removalPolicy: RemovalPolicy.DESTROY, + }); this.rdsKmsKey = new Key(this, 'rdsKmsKey', { alias: `alias/${props.environment}-rds-aoe-key`, @@ -43,7 +61,7 @@ export class KmsStack extends Stack { this.cloudwatchLogsKmsKey.grantEncryptDecrypt( new aws_iam.ServicePrincipal(`logs.${this.region}.amazonaws.com`) - ); + ); this.ebsKmsKey = new Key(this, 'ebsKmsKey', { alias: `alias/${props.environment}-ebs-aoe-key`, @@ -60,4 +78,4 @@ export class KmsStack extends Stack { removalPolicy: RemovalPolicy.DESTROY, }); } -} \ No newline at end of file +} diff --git a/aoe-infra/lib/msk-stack.ts b/aoe-infra/lib/msk-stack.ts new file mode 100644 index 000000000..733fb650a --- /dev/null +++ b/aoe-infra/lib/msk-stack.ts @@ -0,0 +1,120 @@ +import * as cdk from "aws-cdk-lib"; +import { Construct } from "constructs"; +import { IVpc, SecurityGroup } from "aws-cdk-lib/aws-ec2"; +import * as msk from "aws-cdk-lib/aws-msk"; +import { Key } from "aws-cdk-lib/aws-kms"; +import * as iam from 'aws-cdk-lib/aws-iam'; +import * as cr from 'aws-cdk-lib/custom-resources'; +import * as lambda from 'aws-cdk-lib/aws-lambda'; +import { Duration } from "aws-cdk-lib"; + +interface MskStackProps extends cdk.StackProps { + volumeSize: number; + instanceType: string; + numberOfBrokerNodes: number; + vpc: IVpc; + securityGroup: SecurityGroup; + env: { region: string }; + kmsKey: Key, + clusterName: string, + version: string +} + +export class MskStack extends cdk.Stack { + + public readonly kafkaCluster: msk.CfnCluster; + public readonly bootstrapBrokers: string; + + constructor(scope: Construct, id: string, props: MskStackProps) { + super(scope, id, props); + + this.kafkaCluster = new msk.CfnCluster(this, "AOEKafkaCluster", { + + brokerNodeGroupInfo: { + securityGroups: [ props.securityGroup.securityGroupId ], + clientSubnets: props.vpc.privateSubnets.map(subnet => subnet.subnetId), + instanceType: props.instanceType, + storageInfo: { + ebsStorageInfo: { + volumeSize: props.volumeSize + } + } + }, + clusterName: props.clusterName, + kafkaVersion: props.version, + numberOfBrokerNodes: props.numberOfBrokerNodes, + clientAuthentication: { + sasl: { + iam: { + enabled: true + } + } + }, + encryptionInfo: { + encryptionInTransit: { + inCluster: true, + clientBroker: "TLS", + }, + encryptionAtRest: { + dataVolumeKmsKeyId: props.kmsKey.keyId, + }, + }, + }); + + const getBootstrapBrokersLambda = new lambda.Function(this, 'GetBootstrapBrokersLambda', { + runtime: lambda.Runtime.NODEJS_20_X, + handler: 'index.handler', + code: lambda.Code.fromInline(` + exports.handler = async (event) => { + const { KafkaClient, GetBootstrapBrokersCommand } = require("@aws-sdk/client-kafka"); // CommonJS import + const client = new KafkaClient(); + const clusterArn = event.ResourceProperties.ClusterArn; + + if (event.RequestType === 'Delete') { + return { PhysicalResourceId: clusterArn }; + } + + try { + const command = new GetBootstrapBrokersCommand({ ClusterArn: clusterArn }); + const response = await client.send(command); + + return { + PhysicalResourceId: clusterArn, + Data: { + BootstrapBrokerStringSaslIam: response.BootstrapBrokerStringSaslIam, + }, + }; + } catch (error) { + console.error(error); + throw new Error('Failed to retrieve bootstrap brokers: ' + error.message); + } + }; + `), + timeout: Duration.minutes(2), + initialPolicy: [ + new iam.PolicyStatement({ + actions: ['kafka:GetBootstrapBrokers'], + resources: [this.kafkaCluster.attrArn], + }), + ], + }); + + const customResourceProvider = new cr.Provider(this, 'CustomResourceProvider', { + onEventHandler: getBootstrapBrokersLambda, + }); + + const bootstrapBrokersResource = new cdk.CustomResource(this, 'BootstrapBrokersResource', { + serviceToken: customResourceProvider.serviceToken, + properties: { + ClusterArn: this.kafkaCluster.attrArn, + }, + }); + + this.bootstrapBrokers = bootstrapBrokersResource.getAttString('BootstrapBrokerStringSaslIam') + + new cdk.CfnOutput(this, 'BootstrapServers', { + value: this.bootstrapBrokers, + }); + + } +} diff --git a/aoe-infra/lib/namespaceStack.ts b/aoe-infra/lib/namespaceStack.ts new file mode 100644 index 000000000..7135dcca9 --- /dev/null +++ b/aoe-infra/lib/namespaceStack.ts @@ -0,0 +1,22 @@ +import * as cdk from 'aws-cdk-lib'; +import * as ec2 from 'aws-cdk-lib/aws-ec2'; +import * as servicediscovery from 'aws-cdk-lib/aws-servicediscovery'; +import { StackProps } from "aws-cdk-lib"; + +interface NamespaceStackProps extends StackProps { + environment: string +} + +export class NamespaceStack extends cdk.Stack { + public readonly privateDnsNamespace: servicediscovery.PrivateDnsNamespace; + + constructor(scope: cdk.App, id: string, vpc: ec2.IVpc, props: NamespaceStackProps) { + super(scope, id, props); + + this.privateDnsNamespace = new servicediscovery.PrivateDnsNamespace(this, 'Namespace', { + name: `${props.environment}.aoe.local`, + vpc, + description: 'Shared service discovery namespace', + }); + } +} \ No newline at end of file diff --git a/aoe-infra/lib/opensearch-stack.ts b/aoe-infra/lib/opensearch-stack.ts new file mode 100644 index 000000000..d00d8e31d --- /dev/null +++ b/aoe-infra/lib/opensearch-stack.ts @@ -0,0 +1,110 @@ +import * as cdk from 'aws-cdk-lib'; +import * as ec2 from 'aws-cdk-lib/aws-ec2'; +import * as ops from 'aws-cdk-lib/aws-opensearchserverless'; +import {Key} from "aws-cdk-lib/aws-kms"; + +interface OpenSearchServerlessStackProps extends cdk.StackProps { + collectionName: string; + description: string; + vpc: ec2.IVpc; + securityGroupIds: string[]; + kmsKey: Key; + standbyReplicas: 'DISABLED' | 'ENABLED', +} + +export class OpenSearchServerlessStack extends cdk.Stack { + public readonly collectionArn: string + public readonly collectionEndpoint: string; + constructor(scope: cdk.App, id: string, props: OpenSearchServerlessStackProps) { + super(scope, id, props); + + const vpce = new ops.CfnVpcEndpoint(this, 'VpcEndPoint,', { + name: 'aoe-opensearch-endpoint', + subnetIds: props.vpc.isolatedSubnets.map(x => x.subnetId), + vpcId: props.vpc.vpcId, + securityGroupIds: props.securityGroupIds, + }); + + const collection = new ops.CfnCollection(this, 'AOECollection', { + name: props.collectionName, + description: props.description, + type: 'SEARCH', + standbyReplicas: props.standbyReplicas + }); + + const encryptionPolicy = new ops.CfnSecurityPolicy(this, 'EncryptionPolicy', { + name: 'aoe-serverless-encryption-policy', + type: 'encryption', + policy: JSON.stringify({ + Rules: [ + { + ResourceType: 'collection', + Resource: [`collection/${props.collectionName}`], + }, + ], + AWSOwnedKey:false, + KmsARN: props.kmsKey.keyArn, + + }), + }); + + const networkPolicy = new ops.CfnSecurityPolicy(this, 'NetworkPolicy', { + name: 'aoe-serverless-network-policy', + type: 'network', + policy: JSON.stringify([{ + Rules: [ + { + ResourceType: 'collection', + Resource: [`collection/${props.collectionName}`], + } + ], + SourceVPCEs: [vpce.ref], + + }]), + }); + + const dataAccessPolicy = new ops.CfnAccessPolicy(this, 'DataAccessPolicy', { + name: `${props.collectionName}-dap`, + description: `Data access policy for: ${props.collectionName}`, + type: "data", + + policy: JSON.stringify([{ + Principal: [`arn:aws:iam::${this.account}:root`], + Rules: [ + + { + ResourceType: 'collection', + Resource: [`collection/${props.collectionName}`], + Permission: [ + "aoss:*" + ] + }, + { + ResourceType: 'index', + Resource: [ + `index/${props.collectionName}/*`, + ], + Permission: [ + 'aoss:*' + ] + } + ] + }]) + + }) + + collection.addDependency(encryptionPolicy); + collection.addDependency(networkPolicy); + collection.addDependency(dataAccessPolicy); + + this.collectionArn = collection.attrArn + this.collectionEndpoint = collection.attrCollectionEndpoint + + new cdk.CfnOutput(this, 'CollectionArn', { + value: collection.attrArn, + }); + new cdk.CfnOutput(this, 'CollectionEndpoint', { + value: collection.attrCollectionEndpoint, + }); + } +} diff --git a/aoe-infra/lib/redis-stack.ts b/aoe-infra/lib/redis-stack.ts new file mode 100644 index 000000000..376ea9757 --- /dev/null +++ b/aoe-infra/lib/redis-stack.ts @@ -0,0 +1,80 @@ +import * as cdk from "aws-cdk-lib"; +import { Construct } from "constructs"; +import { aws_elasticache as ElastiCache } from "aws-cdk-lib"; +import * as ec2 from "aws-cdk-lib/aws-ec2"; +import * as secretsmanager from "aws-cdk-lib/aws-secretsmanager"; +import { IKey } from "aws-cdk-lib/aws-kms"; + +interface ElastiCacheServerlessStackProps extends cdk.StackProps { + vpc: ec2.IVpc; + elasticacheName: string; + secret: secretsmanager.Secret; + consumingServiceName: string; + redisKmsKeyId: string; + secretsManagerKmsKeyId: IKey; + securityGroupId: string; + redisMajorVersion: string; + storageMin: number; + storageMax: number; + minEcpuPerSecond: number; + maxEcpuPerSecond: number; +} + +export class ElasticacheServerlessStack extends cdk.Stack { + public readonly endpointAddress: string; + public readonly endpointPort: string; + constructor(scope: Construct, id: string, props: ElastiCacheServerlessStackProps) { + super(scope, id, props); + + const redisUserName = "app" + + const cfnUser = new ElastiCache.CfnUser(this, 'MyCfnUser', { + engine: "redis", + userId: redisUserName, + userName: redisUserName, + noPasswordRequired: false, + passwords: [ props.secret.secretValueFromJson('secretkey').unsafeUnwrap() ], + accessString: "on ~* +@all", + }); + + const cfnUserGroup = new ElastiCache.CfnUserGroup(this, 'MyCfnUserGroup', { + engine: 'redis', + userGroupId: `${props.elasticacheName}-userGroupId`.toLowerCase(), + userIds: [ cfnUser.userId, "default" ], + }); + + cfnUserGroup.node.addDependency(cfnUser); + + const elastiCacheSubnetIds: string[] = []; + for (const subnet of props.vpc.isolatedSubnets) { + elastiCacheSubnetIds.push(subnet.subnetId); + } + + const elastiCacheServlerless = new ElastiCache.CfnServerlessCache(this, "ServerlessCache", { + engine: "redis", + serverlessCacheName: props.elasticacheName.toLowerCase(), + cacheUsageLimits: { + dataStorage: { + unit: "GB", + + maximum: props.storageMax, + minimum: props.storageMin, + }, + ecpuPerSecond: { + maximum: props.maxEcpuPerSecond, + minimum: props.minEcpuPerSecond, + }, + }, + kmsKeyId: props.redisKmsKeyId, + majorEngineVersion: props.redisMajorVersion, + securityGroupIds: [ props.securityGroupId ], + subnetIds: elastiCacheSubnetIds, + userGroupId: `${props.elasticacheName}-userGroupId`.toLowerCase(), + }); + + elastiCacheServlerless.node.addDependency(cfnUserGroup) + + this.endpointAddress = elastiCacheServlerless.attrEndpointAddress + this.endpointPort = elastiCacheServlerless.attrEndpointPort + } +} \ No newline at end of file diff --git a/aoe-infra/lib/secrets-manager-stack.ts b/aoe-infra/lib/secrets-manager-stack.ts new file mode 100644 index 000000000..9d62b02cd --- /dev/null +++ b/aoe-infra/lib/secrets-manager-stack.ts @@ -0,0 +1,86 @@ +import * as cdk from "aws-cdk-lib"; +import { Construct } from "constructs"; +import * as secretsmanager from "aws-cdk-lib/aws-secretsmanager"; +import { IKey } from "aws-cdk-lib/aws-kms"; + +interface SecretManagerStackProps extends cdk.StackProps { + kmsKey: IKey; +} + + +export type SecretEntry = { + secretKey: string; + envVarName: string; + path: string; +}; + +export type Secrets = { + [key: string]: SecretEntry; +}; + +// Stack for secrets generated on the fly (mostly resources that don't support Parameter Store) +export class SecretManagerStack extends cdk.Stack { + public readonly semanticApisPassword: secretsmanager.Secret; + public readonly webBackendAuroraPassword: secretsmanager.Secret; + public readonly webBackendPassportSessionSecret: secretsmanager.Secret + public readonly documentDbPassword: secretsmanager.Secret; + + public readonly secrets: Secrets = { + CLIENT_ID: {envVarName: 'CLIENT_ID', path: '/service/web-backend/CLIENT_ID', secretKey: 'secretkey'}, + PROXY_URI: {envVarName: 'PROXY_URI', path: '/service/web-backend/PROXY_URI', secretKey: 'secretkey'}, + REDIS_PASS: { envVarName: 'REDIS_PASS', path: '/service/semantic-apis/REDIS_PASS', secretKey: 'secretkey' }, + PG_PASS: { envVarName: 'PG_PASS', path: '/service/web-backend/PG_PASS', secretKey: 'secretkey' }, + SESSION_SECRET: { envVarName: 'SESSION_SECRET', path: '/service/web-backend/SESSION_SECRET', secretKey: 'secretkey' }, + CLIENT_SECRET: { envVarName: 'CLIENT_SECRET', path: '/service/web-backend/CLIENT_SECRET', secretKey: 'secretkey' }, + JWT_SECRET: { envVarName: 'JWT_SECRET', path: '/service/web-backend/JWT_SECRET', secretKey: 'secretkey' }, + ANALYTICS_PG_PASS: {envVarName: 'SPRING_DATASOURCE_PRIMARY_PASSWORD', path: '/auroradbs/web-backend/dev/reporter', secretKey: 'password' }, + ANALYTICS_DOCDB_PASSWORD: {envVarName: 'MONGODB_PRIMARY_PASSWORD', path: '/service/data-analytics/DOCDB_PASS', secretKey: 'secretkey' }, + ANALYTICS_TRUST_STORE_PASSWORD: {envVarName: 'TRUST_STORE_PASS', path: '/service/data-analytics/TRUST_STORE_PASS', secretKey: 'secretkey' }, + } + + constructor(scope: Construct, id: string, props: SecretManagerStackProps) { + super(scope, id, props); + + this.semanticApisPassword = new secretsmanager.Secret(this, 'secret', { + secretName: '/service/semantic-apis/REDIS_PASS', + generateSecretString: { + secretStringTemplate: JSON.stringify({}), + generateStringKey: 'secretkey', + passwordLength: 16, + excludeCharacters: '@%*()_+=`~{}|[]\\:";\'?,./' + }, + }); + + this.webBackendPassportSessionSecret = new secretsmanager.Secret(this, 'PassportSessionSecret', { + secretName: '/service/web-backend/SESSION_SECRET', + generateSecretString: { + secretStringTemplate: JSON.stringify({}), + passwordLength: 32, + generateStringKey: 'secretkey', + excludeCharacters: '@%*()_+=`~{}|[]\\:";\'?,./' + }, + }); + + this.webBackendAuroraPassword = new secretsmanager.Secret(this, 'WebBackendAuroraPassword', { + secretName: '/auroradbs/web-backend/master-user-password', + generateSecretString: { + secretStringTemplate: JSON.stringify({ username: "aoe_db_admin", }), + generateStringKey: 'password', + passwordLength: 24, + excludeCharacters: '@%*()_+=`~{}|[]\\:";\'?,./' + }, + }); + + this.documentDbPassword = new secretsmanager.Secret(this, 'DocumentDbSecret', { + generateSecretString: { + secretStringTemplate: JSON.stringify({ username: 'docdbuser' }), + generateStringKey: 'password', + excludeCharacters: '/@" ', + }, + }); + + } +} + + + diff --git a/aoe-infra/lib/security-groups.ts b/aoe-infra/lib/security-groups.ts new file mode 100644 index 000000000..fa7ff762c --- /dev/null +++ b/aoe-infra/lib/security-groups.ts @@ -0,0 +1,283 @@ +import * as cdk from 'aws-cdk-lib/core'; +import * as ec2 from 'aws-cdk-lib/aws-ec2'; +import { StackProps } from 'aws-cdk-lib'; +import { Construct } from 'constructs'; + +/* +Security Groups for resources are defined here. +Security Groups defined in a centralized manner like this won't generate +an "order of creation" - dependency between the services / databases, +but adds a little operational complexity (when creating a new +service / database, Security Groups and SG rules must first be defined here) +*/ + +interface SecurityGroupStackProps extends StackProps { + vpc: ec2.IVpc +} + +export class SecurityGroupStack extends cdk.Stack { + public readonly semanticApisServiceSecurityGroup: ec2.SecurityGroup; + public readonly albSecurityGroup: ec2.SecurityGroup; + public readonly webBackendAuroraSecurityGroup: ec2.SecurityGroup; + public readonly semanticApisRedisSecurityGroup: ec2.SecurityGroup; + public readonly bastionSecurityGroup: ec2.SecurityGroup; + public readonly openSearchSecurityGroup: ec2.SecurityGroup; + public readonly dataAnalyticsServiceSecurityGroup: ec2.SecurityGroup; + public readonly streamingServiceSecurityGroup: ec2.SecurityGroup; + public readonly dataServicesSecurityGroup: ec2.SecurityGroup + public readonly webBackendsServiceSecurityGroup: ec2.SecurityGroup; + public readonly efsSecurityGroup: ec2.SecurityGroup; + public readonly documentDbSecurityGroup: ec2.SecurityGroup; + public readonly mskSecurityGroup: ec2.SecurityGroup; + public readonly webFrontendServiceSecurityGroup: ec2.SecurityGroup; + + constructor(scope: Construct, id: string, props: SecurityGroupStackProps) { + super(scope, id, props); + + // Security Groups + this.documentDbSecurityGroup = new ec2.SecurityGroup(this, 'DocumentDbSecurityGroup', { + vpc: props.vpc, + description: 'Allow access to DocumentDB cluster', + allowAllOutbound: true, + }); + + this.mskSecurityGroup = new ec2.SecurityGroup(this, 'MskSecurityGroup', { + vpc: props.vpc, + description: 'Allow access to MSK kafka', + allowAllOutbound: true, + }); + + this.efsSecurityGroup = new ec2.SecurityGroup(this, 'EfsSecurityGroup', { + vpc: props.vpc, + allowAllOutbound: true, + }) + + this.bastionSecurityGroup = new ec2.SecurityGroup(this, 'BastionSecurityGroup', { + vpc: props.vpc, + allowAllOutbound: true, + }); + + this.albSecurityGroup = new ec2.SecurityGroup(this, 'AlbSecurityGroup', { + vpc: props.vpc, + allowAllOutbound: true, + }); + + this.dataAnalyticsServiceSecurityGroup = new ec2.SecurityGroup(this, 'DataAnalyticsServiceSecurityGroupSecurityGroup', { + vpc: props.vpc, + allowAllOutbound: true, + }); + + this.webBackendsServiceSecurityGroup = new ec2.SecurityGroup(this, 'WebBackendServiceSecurityGroupSecurityGroup', { + vpc: props.vpc, + allowAllOutbound: true, + }); + + this.webFrontendServiceSecurityGroup = new ec2.SecurityGroup(this, 'WebFrontendServiceSecurityGroupSecurityGroup', { + vpc: props.vpc, + allowAllOutbound: true, + }); + + this.semanticApisServiceSecurityGroup = new ec2.SecurityGroup(this, 'SemanticApisServiceSecurityGroup', { + vpc: props.vpc, + allowAllOutbound: true, + }); + + this.webBackendAuroraSecurityGroup = new ec2.SecurityGroup(this, 'WebBackendAuroraSecurityGroupSecurityGroup', { + vpc: props.vpc, + allowAllOutbound: true, + }); + + this.semanticApisRedisSecurityGroup = new ec2.SecurityGroup(this, 'SemanticApisRedisSecurityGroup', { + vpc: props.vpc, + allowAllOutbound: true, + }); + + this.openSearchSecurityGroup = new ec2.SecurityGroup(this, 'openSearchSecurityGroup', { + vpc: props.vpc, + allowAllOutbound: true + }) + + this.streamingServiceSecurityGroup = new ec2.SecurityGroup(this, 'StreamingServiceSecurityGroupSecurityGroup', { + vpc: props.vpc, + allowAllOutbound: true, + }); + + this.dataServicesSecurityGroup = new ec2.SecurityGroup(this, 'DataServicesSecurityGroup', { + vpc: props.vpc, + allowAllOutbound: true, + }) + + // Security Group rules + this.mskSecurityGroup.addIngressRule( + this.dataAnalyticsServiceSecurityGroup, + ec2.Port.tcp(9098) + ) + + this.mskSecurityGroup.addIngressRule( + this.webBackendsServiceSecurityGroup, + ec2.Port.tcp(9098) + ) + + this.mskSecurityGroup.addIngressRule( + this.bastionSecurityGroup, + ec2.Port.tcp(9098) + ); + + this.documentDbSecurityGroup.addIngressRule( + this.dataAnalyticsServiceSecurityGroup, + ec2.Port.tcp(27017) + ) + + this.documentDbSecurityGroup.addIngressRule( + this.bastionSecurityGroup, + ec2.Port.tcp(27017) + ) + + this.efsSecurityGroup.addIngressRule( + this.webBackendsServiceSecurityGroup, + ec2.Port.tcp(2049) + ) + + this.efsSecurityGroup.addIngressRule( + this.bastionSecurityGroup, + ec2.Port.tcp(2049) + ) + + this.openSearchSecurityGroup.addIngressRule( + this.bastionSecurityGroup, + ec2.Port.tcp(443) + ) + + this.openSearchSecurityGroup.addIngressRule( + this.webBackendsServiceSecurityGroup, + ec2.Port.tcp(443) + ) + + this.semanticApisRedisSecurityGroup.addIngressRule( + this.semanticApisServiceSecurityGroup, + ec2.Port.tcp(6379) + ); + + this.semanticApisRedisSecurityGroup.addIngressRule( + this.bastionSecurityGroup, + ec2.Port.tcp(6379) + ); + + this.dataAnalyticsServiceSecurityGroup.addIngressRule( + this.albSecurityGroup, + ec2.Port.tcp(8080) + ) + + this.dataAnalyticsServiceSecurityGroup.addIngressRule( + this.bastionSecurityGroup, + ec2.Port.tcp(8080) + ); + + this.semanticApisRedisSecurityGroup.addIngressRule( + this.webBackendsServiceSecurityGroup, + ec2.Port.tcp(6379) + ); + + this.semanticApisServiceSecurityGroup.addIngressRule( + this.albSecurityGroup, + ec2.Port.tcp(8080) + ); + + this.semanticApisServiceSecurityGroup.addIngressRule( + this.bastionSecurityGroup, + ec2.Port.tcp(8080) + ); + + this.dataServicesSecurityGroup.addIngressRule( + this.albSecurityGroup, + ec2.Port.tcp(8080) + ); + + this.dataServicesSecurityGroup.addIngressRule( + this.bastionSecurityGroup, + ec2.Port.tcp(8080) + ); + + this.webBackendsServiceSecurityGroup.addIngressRule( + this.albSecurityGroup, + ec2.Port.tcp(8080) + ); + + this.webBackendsServiceSecurityGroup.addIngressRule( + this.bastionSecurityGroup, + ec2.Port.tcp(8080) + ); + + this.webBackendsServiceSecurityGroup.addIngressRule( + this.dataServicesSecurityGroup, + ec2.Port.tcp(8080) + ); + + this.streamingServiceSecurityGroup.addIngressRule( + this.albSecurityGroup, + ec2.Port.tcp(8080) + ); + + this.streamingServiceSecurityGroup.addIngressRule( + this.bastionSecurityGroup, + ec2.Port.tcp(8080) + ); + + this.streamingServiceSecurityGroup.addIngressRule( + this.webBackendsServiceSecurityGroup, + ec2.Port.tcp(8080) + ) + + this.dataAnalyticsServiceSecurityGroup.addIngressRule( + this.webBackendsServiceSecurityGroup, + ec2.Port.tcp(8080) + ) + + // allow port 80 to alb albSecuritygroup from Internet + this.albSecurityGroup.addIngressRule( + ec2.Peer.anyIpv4(), + ec2.Port.tcp(443) + ); + + this.webBackendAuroraSecurityGroup.addIngressRule( + this.webBackendsServiceSecurityGroup, + ec2.Port.tcp(5432) + ); + + this.webBackendAuroraSecurityGroup.addIngressRule( + this.dataAnalyticsServiceSecurityGroup, + ec2.Port.tcp(5432) + ) + + this.webBackendAuroraSecurityGroup.addIngressRule( + this.bastionSecurityGroup, + ec2.Port.tcp(5432) + ); + + this.webBackendsServiceSecurityGroup.addIngressRule( + this.bastionSecurityGroup, + ec2.Port.tcp(8080) + ) + + this.webFrontendServiceSecurityGroup.addIngressRule( + this.albSecurityGroup, + ec2.Port.tcp(8080) + ) + + this.webFrontendServiceSecurityGroup.addIngressRule( + this.bastionSecurityGroup, + ec2.Port.tcp(8080) + ) + + this.webBackendAuroraSecurityGroup.addIngressRule( + this.bastionSecurityGroup, + ec2.Port.tcp(5432) + ); + + // Output all security group IDs so that they can be consumed from ECS service/RDS stacks + // new CfnOutput(this, 'albSecurityGroupId', { value: this.albSecurityGroup.securityGroupId }); + // new CfnOutput(this, 'semanticApisServiceSecurityGroup', { value: this.semanticApisServiceSecurityGroup.securityGroupId }); + // new CfnOutput(this, 'testAuroraSecurityGroup2Id', { value: this.testAuroraSecurityGroup.securityGroupId }); + } + +} diff --git a/aoe-infra/infra/lib/subnet-groups.ts b/aoe-infra/lib/subnet-groups.ts similarity index 100% rename from aoe-infra/infra/lib/subnet-groups.ts rename to aoe-infra/lib/subnet-groups.ts diff --git a/aoe-infra/lib/utility-stack.ts b/aoe-infra/lib/utility-stack.ts new file mode 100644 index 000000000..9f3794820 --- /dev/null +++ b/aoe-infra/lib/utility-stack.ts @@ -0,0 +1,36 @@ +import * as iam from 'aws-cdk-lib/aws-iam'; +import * as cdk from 'aws-cdk-lib'; +import { Construct } from 'constructs'; + +export class UtilityStack extends cdk.Stack { + public readonly githubActionsDeploymentRole: iam.Role; + constructor(scope: Construct, id: string, props?: cdk.StackProps) { + super(scope, id, props); + + + // Github Actions OIDC role + const githubOidcProvider = new iam.OpenIdConnectProvider(this, `OvaraUtilityGithubOidcProvider`, { + url: 'https://token.actions.githubusercontent.com', + thumbprints: [ + '6938fd4d98bab03faadb97b34396831e3780aea1', + '1c58a3a8518e8759bf075b76b750d4f2df264fcd' + ], + clientIds: ['sts.amazonaws.com'], + }); + + this.githubActionsDeploymentRole = new iam.Role(this, `AoeUtilityGithubActionsUser`, { + assumedBy: new iam.WebIdentityPrincipal( + githubOidcProvider.openIdConnectProviderArn, + { + StringLike: { + 'token.actions.githubusercontent.com:sub': 'repo:Opetushallitus/aoe:*', + 'token.actions.githubusercontent.com:aud': 'sts.amazonaws.com', + }, + }, + ), + roleName: 'aoe-utility-github-actions-deployment-role', + }); + + } +} + diff --git a/aoe-infra/infra/lib/vpc-stack.ts b/aoe-infra/lib/vpc-stack.ts similarity index 100% rename from aoe-infra/infra/lib/vpc-stack.ts rename to aoe-infra/lib/vpc-stack.ts diff --git a/aoe-infra/infra/package-lock.json b/aoe-infra/package-lock.json similarity index 64% rename from aoe-infra/infra/package-lock.json rename to aoe-infra/package-lock.json index b8e8795a7..ab046036d 100644 --- a/aoe-infra/infra/package-lock.json +++ b/aoe-infra/package-lock.json @@ -8,6 +8,7 @@ "name": "infra", "version": "0.1.0", "dependencies": { + "@aws-cdk/aws-msk": "^1.203.0", "aws-cdk-lib": "2.150.0", "constructs": "^10.0.0", "lodash": "^4.17.21", @@ -17,13 +18,22 @@ "infra": "bin/infra.js" }, "devDependencies": { + "@eslint/eslintrc": "^3.2.0", + "@eslint/js": "^9.17.0", "@types/jest": "^29.5.12", "@types/node": "20.14.9", + "@typescript-eslint/parser": "^8.18.0", "aws-cdk": "2.150.0", + "eslint": "^9.17.0", + "eslint-config-prettier": "^9.1.0", + "eslint-plugin-prettier": "^5.2.1", + "globals": "^15.13.0", "jest": "^29.7.0", + "prettier": "^3.4.2", "ts-jest": "^29.1.5", "ts-node": "^10.9.2", - "typescript": "~5.5.3" + "typescript": "~5.5.3", + "typescript-eslint": "^8.18.0" } }, "node_modules/@ampproject/remapping": { @@ -54,6 +64,1159 @@ "resolved": "https://registry.npmjs.org/@aws-cdk/asset-node-proxy-agent-v6/-/asset-node-proxy-agent-v6-2.0.3.tgz", "integrity": "sha512-twhuEG+JPOYCYPx/xy5uH2+VUsIEhPTzDY0F1KuB+ocjWWB/KEDiOVL19nHvbPCB6fhWnkykXEMJ4HHcKvjtvg==" }, + "node_modules/@aws-cdk/assets": { + "version": "1.203.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/assets/-/assets-1.203.0.tgz", + "integrity": "sha512-aEexr1PEZPqTcBXiKLwmiWYtpoC6vPpsGAqe39U6lovXP675qO6/VTpE26AUtS0sDkuhCBnXDQR5H49e0+A+jQ==", + "license": "Apache-2.0", + "dependencies": { + "@aws-cdk/core": "1.203.0", + "@aws-cdk/cx-api": "1.203.0", + "constructs": "^3.3.69" + }, + "engines": { + "node": ">= 14.15.0" + }, + "peerDependencies": { + "@aws-cdk/core": "1.203.0", + "@aws-cdk/cx-api": "1.203.0", + "constructs": "^3.3.69" + } + }, + "node_modules/@aws-cdk/assets/node_modules/constructs": { + "version": "3.4.344", + "resolved": "https://registry.npmjs.org/constructs/-/constructs-3.4.344.tgz", + "integrity": "sha512-Qq3upn44oGdvgasHUKWVFsrynyYrtVRd9fd8ko9cJOrFzx9eCm3iI4bhBryQqaISdausbTYUOXmoEe/YSJ16Nw==", + "license": "Apache-2.0", + "engines": { + "node": ">= 16.14.0" + } + }, + "node_modules/@aws-cdk/aws-acmpca": { + "version": "1.203.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/aws-acmpca/-/aws-acmpca-1.203.0.tgz", + "integrity": "sha512-0+7de1E0JxTjPe8pm73Io3WAEC9pvEjdKmNYwXApYCslqlFqBOuHjU9pQh++6cYcUWCMmav1iwM9zqCB6ZVRSQ==", + "license": "Apache-2.0", + "dependencies": { + "@aws-cdk/core": "1.203.0", + "constructs": "^3.3.69" + }, + "engines": { + "node": ">= 14.15.0" + }, + "peerDependencies": { + "@aws-cdk/core": "1.203.0", + "constructs": "^3.3.69" + } + }, + "node_modules/@aws-cdk/aws-acmpca/node_modules/constructs": { + "version": "3.4.344", + "resolved": "https://registry.npmjs.org/constructs/-/constructs-3.4.344.tgz", + "integrity": "sha512-Qq3upn44oGdvgasHUKWVFsrynyYrtVRd9fd8ko9cJOrFzx9eCm3iI4bhBryQqaISdausbTYUOXmoEe/YSJ16Nw==", + "license": "Apache-2.0", + "engines": { + "node": ">= 16.14.0" + } + }, + "node_modules/@aws-cdk/aws-applicationautoscaling": { + "version": "1.203.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/aws-applicationautoscaling/-/aws-applicationautoscaling-1.203.0.tgz", + "integrity": "sha512-tz/BzZ++hd+T5s4AQo4l7fKaK6+xeGEbR7fGulgz8xByW5V46/krXvpTWVvM7VV2WSigVx1QeS9vcT3MOdsYBQ==", + "license": "Apache-2.0", + "dependencies": { + "@aws-cdk/aws-autoscaling-common": "1.203.0", + "@aws-cdk/aws-cloudwatch": "1.203.0", + "@aws-cdk/aws-iam": "1.203.0", + "@aws-cdk/core": "1.203.0", + "constructs": "^3.3.69" + }, + "engines": { + "node": ">= 14.15.0" + }, + "peerDependencies": { + "@aws-cdk/aws-autoscaling-common": "1.203.0", + "@aws-cdk/aws-cloudwatch": "1.203.0", + "@aws-cdk/aws-iam": "1.203.0", + "@aws-cdk/core": "1.203.0", + "constructs": "^3.3.69" + } + }, + "node_modules/@aws-cdk/aws-applicationautoscaling/node_modules/constructs": { + "version": "3.4.344", + "resolved": "https://registry.npmjs.org/constructs/-/constructs-3.4.344.tgz", + "integrity": "sha512-Qq3upn44oGdvgasHUKWVFsrynyYrtVRd9fd8ko9cJOrFzx9eCm3iI4bhBryQqaISdausbTYUOXmoEe/YSJ16Nw==", + "license": "Apache-2.0", + "engines": { + "node": ">= 16.14.0" + } + }, + "node_modules/@aws-cdk/aws-autoscaling-common": { + "version": "1.203.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/aws-autoscaling-common/-/aws-autoscaling-common-1.203.0.tgz", + "integrity": "sha512-ET3whArynAzRMxrnlkgAdWWzdaRg2QJWcepjSr615pwF0cOFjTW2scpPWIpcaugN2z+wFPlylP5zloA16FM0Tg==", + "license": "Apache-2.0", + "dependencies": { + "@aws-cdk/aws-iam": "1.203.0", + "@aws-cdk/core": "1.203.0", + "constructs": "^3.3.69" + }, + "engines": { + "node": ">= 14.15.0" + }, + "peerDependencies": { + "@aws-cdk/aws-iam": "1.203.0", + "@aws-cdk/core": "1.203.0", + "constructs": "^3.3.69" + } + }, + "node_modules/@aws-cdk/aws-autoscaling-common/node_modules/constructs": { + "version": "3.4.344", + "resolved": "https://registry.npmjs.org/constructs/-/constructs-3.4.344.tgz", + "integrity": "sha512-Qq3upn44oGdvgasHUKWVFsrynyYrtVRd9fd8ko9cJOrFzx9eCm3iI4bhBryQqaISdausbTYUOXmoEe/YSJ16Nw==", + "license": "Apache-2.0", + "engines": { + "node": ">= 16.14.0" + } + }, + "node_modules/@aws-cdk/aws-cloudformation": { + "version": "1.203.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/aws-cloudformation/-/aws-cloudformation-1.203.0.tgz", + "integrity": "sha512-vwNrk5KT0TgIKy+hinjWu/CBEet394mK8o2eFzQG5G82An0O3n/ILdaBrXZut0tyHxJSYQ90+Zaor6Ndld49jA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-cdk/aws-iam": "1.203.0", + "@aws-cdk/aws-lambda": "1.203.0", + "@aws-cdk/aws-s3": "1.203.0", + "@aws-cdk/aws-sns": "1.203.0", + "@aws-cdk/core": "1.203.0", + "@aws-cdk/cx-api": "1.203.0", + "constructs": "^3.3.69" + }, + "engines": { + "node": ">= 14.15.0" + }, + "peerDependencies": { + "@aws-cdk/aws-iam": "1.203.0", + "@aws-cdk/aws-lambda": "1.203.0", + "@aws-cdk/aws-s3": "1.203.0", + "@aws-cdk/aws-sns": "1.203.0", + "@aws-cdk/core": "1.203.0", + "@aws-cdk/cx-api": "1.203.0", + "constructs": "^3.3.69" + } + }, + "node_modules/@aws-cdk/aws-cloudformation/node_modules/constructs": { + "version": "3.4.344", + "resolved": "https://registry.npmjs.org/constructs/-/constructs-3.4.344.tgz", + "integrity": "sha512-Qq3upn44oGdvgasHUKWVFsrynyYrtVRd9fd8ko9cJOrFzx9eCm3iI4bhBryQqaISdausbTYUOXmoEe/YSJ16Nw==", + "license": "Apache-2.0", + "engines": { + "node": ">= 16.14.0" + } + }, + "node_modules/@aws-cdk/aws-cloudwatch": { + "version": "1.203.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/aws-cloudwatch/-/aws-cloudwatch-1.203.0.tgz", + "integrity": "sha512-RJfUHAtCPefcgWlH6Nqcv0f1svPfxbESUC52tK6yKqhmzo+57ePLkONsn88x22TuWo/hXOpjxYqrUECr0S5YqA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-cdk/aws-iam": "1.203.0", + "@aws-cdk/core": "1.203.0", + "constructs": "^3.3.69" + }, + "engines": { + "node": ">= 14.15.0" + }, + "peerDependencies": { + "@aws-cdk/aws-iam": "1.203.0", + "@aws-cdk/core": "1.203.0", + "constructs": "^3.3.69" + } + }, + "node_modules/@aws-cdk/aws-cloudwatch/node_modules/constructs": { + "version": "3.4.344", + "resolved": "https://registry.npmjs.org/constructs/-/constructs-3.4.344.tgz", + "integrity": "sha512-Qq3upn44oGdvgasHUKWVFsrynyYrtVRd9fd8ko9cJOrFzx9eCm3iI4bhBryQqaISdausbTYUOXmoEe/YSJ16Nw==", + "license": "Apache-2.0", + "engines": { + "node": ">= 16.14.0" + } + }, + "node_modules/@aws-cdk/aws-codeguruprofiler": { + "version": "1.203.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/aws-codeguruprofiler/-/aws-codeguruprofiler-1.203.0.tgz", + "integrity": "sha512-Tl9DGRY/BN615J8H4SUQxQ0Zb2TzkaBYHFx4F1lt4NYdWHYh6oALxLEBSkXD5txPvvtH4MouDENuqMBV/fZI3w==", + "license": "Apache-2.0", + "dependencies": { + "@aws-cdk/aws-iam": "1.203.0", + "@aws-cdk/core": "1.203.0", + "constructs": "^3.3.69" + }, + "engines": { + "node": ">= 14.15.0" + }, + "peerDependencies": { + "@aws-cdk/aws-iam": "1.203.0", + "@aws-cdk/core": "1.203.0", + "constructs": "^3.3.69" + } + }, + "node_modules/@aws-cdk/aws-codeguruprofiler/node_modules/constructs": { + "version": "3.4.344", + "resolved": "https://registry.npmjs.org/constructs/-/constructs-3.4.344.tgz", + "integrity": "sha512-Qq3upn44oGdvgasHUKWVFsrynyYrtVRd9fd8ko9cJOrFzx9eCm3iI4bhBryQqaISdausbTYUOXmoEe/YSJ16Nw==", + "license": "Apache-2.0", + "engines": { + "node": ">= 16.14.0" + } + }, + "node_modules/@aws-cdk/aws-codestarnotifications": { + "version": "1.203.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/aws-codestarnotifications/-/aws-codestarnotifications-1.203.0.tgz", + "integrity": "sha512-Qd3iJiFx5Nt+rZRNnmMUmknNetq4069pUhOTv0NTxB+ZmWQ1leQ4iSrhQ5YOANWBK0+EtPMz9YBjKjE4zBPf+Q==", + "license": "Apache-2.0", + "dependencies": { + "@aws-cdk/core": "1.203.0", + "constructs": "^3.3.69" + }, + "engines": { + "node": ">= 14.15.0" + }, + "peerDependencies": { + "@aws-cdk/core": "1.203.0", + "constructs": "^3.3.69" + } + }, + "node_modules/@aws-cdk/aws-codestarnotifications/node_modules/constructs": { + "version": "3.4.344", + "resolved": "https://registry.npmjs.org/constructs/-/constructs-3.4.344.tgz", + "integrity": "sha512-Qq3upn44oGdvgasHUKWVFsrynyYrtVRd9fd8ko9cJOrFzx9eCm3iI4bhBryQqaISdausbTYUOXmoEe/YSJ16Nw==", + "license": "Apache-2.0", + "engines": { + "node": ">= 16.14.0" + } + }, + "node_modules/@aws-cdk/aws-ec2": { + "version": "1.203.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/aws-ec2/-/aws-ec2-1.203.0.tgz", + "integrity": "sha512-6Q/kvAKsprESeGCYMYS9KiChDyhzMk6NerSreVD83UbcvUJU9EjGUJF+HJfRQXn66YjdOsXKcm89Ni31ftP5PQ==", + "license": "Apache-2.0", + "dependencies": { + "@aws-cdk/aws-cloudwatch": "1.203.0", + "@aws-cdk/aws-iam": "1.203.0", + "@aws-cdk/aws-kms": "1.203.0", + "@aws-cdk/aws-logs": "1.203.0", + "@aws-cdk/aws-s3": "1.203.0", + "@aws-cdk/aws-s3-assets": "1.203.0", + "@aws-cdk/aws-ssm": "1.203.0", + "@aws-cdk/cloud-assembly-schema": "1.203.0", + "@aws-cdk/core": "1.203.0", + "@aws-cdk/cx-api": "1.203.0", + "@aws-cdk/region-info": "1.203.0", + "constructs": "^3.3.69" + }, + "engines": { + "node": ">= 14.15.0" + }, + "peerDependencies": { + "@aws-cdk/aws-cloudwatch": "1.203.0", + "@aws-cdk/aws-iam": "1.203.0", + "@aws-cdk/aws-kms": "1.203.0", + "@aws-cdk/aws-logs": "1.203.0", + "@aws-cdk/aws-s3": "1.203.0", + "@aws-cdk/aws-s3-assets": "1.203.0", + "@aws-cdk/aws-ssm": "1.203.0", + "@aws-cdk/cloud-assembly-schema": "1.203.0", + "@aws-cdk/core": "1.203.0", + "@aws-cdk/cx-api": "1.203.0", + "@aws-cdk/region-info": "1.203.0", + "constructs": "^3.3.69" + } + }, + "node_modules/@aws-cdk/aws-ec2/node_modules/constructs": { + "version": "3.4.344", + "resolved": "https://registry.npmjs.org/constructs/-/constructs-3.4.344.tgz", + "integrity": "sha512-Qq3upn44oGdvgasHUKWVFsrynyYrtVRd9fd8ko9cJOrFzx9eCm3iI4bhBryQqaISdausbTYUOXmoEe/YSJ16Nw==", + "license": "Apache-2.0", + "engines": { + "node": ">= 16.14.0" + } + }, + "node_modules/@aws-cdk/aws-ecr": { + "version": "1.203.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/aws-ecr/-/aws-ecr-1.203.0.tgz", + "integrity": "sha512-pMijGTZzncb10zJLuqDzFtHpVxpK2dQdD/1Md7FYF1Mm7/r2CFx4Q2H3f1fOi4sBXk3ftypV8X7HFzIrWPwCSg==", + "license": "Apache-2.0", + "dependencies": { + "@aws-cdk/aws-events": "1.203.0", + "@aws-cdk/aws-iam": "1.203.0", + "@aws-cdk/aws-kms": "1.203.0", + "@aws-cdk/core": "1.203.0", + "constructs": "^3.3.69" + }, + "engines": { + "node": ">= 14.15.0" + }, + "peerDependencies": { + "@aws-cdk/aws-events": "1.203.0", + "@aws-cdk/aws-iam": "1.203.0", + "@aws-cdk/aws-kms": "1.203.0", + "@aws-cdk/core": "1.203.0", + "constructs": "^3.3.69" + } + }, + "node_modules/@aws-cdk/aws-ecr-assets": { + "version": "1.203.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/aws-ecr-assets/-/aws-ecr-assets-1.203.0.tgz", + "integrity": "sha512-wPAjllyLs+TvdakhwATjIyc5mBuNJFurPM2S1mo7YC4fURp2NOb9vHpIFgV9ZC+r8xadBFWt13de72cyl+h2iw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-cdk/assets": "1.203.0", + "@aws-cdk/aws-ecr": "1.203.0", + "@aws-cdk/aws-iam": "1.203.0", + "@aws-cdk/aws-s3": "1.203.0", + "@aws-cdk/core": "1.203.0", + "@aws-cdk/cx-api": "1.203.0", + "constructs": "^3.3.69" + }, + "engines": { + "node": ">= 14.15.0" + }, + "peerDependencies": { + "@aws-cdk/assets": "1.203.0", + "@aws-cdk/aws-ecr": "1.203.0", + "@aws-cdk/aws-iam": "1.203.0", + "@aws-cdk/aws-s3": "1.203.0", + "@aws-cdk/core": "1.203.0", + "@aws-cdk/cx-api": "1.203.0", + "constructs": "^3.3.69" + } + }, + "node_modules/@aws-cdk/aws-ecr-assets/node_modules/constructs": { + "version": "3.4.344", + "resolved": "https://registry.npmjs.org/constructs/-/constructs-3.4.344.tgz", + "integrity": "sha512-Qq3upn44oGdvgasHUKWVFsrynyYrtVRd9fd8ko9cJOrFzx9eCm3iI4bhBryQqaISdausbTYUOXmoEe/YSJ16Nw==", + "license": "Apache-2.0", + "engines": { + "node": ">= 16.14.0" + } + }, + "node_modules/@aws-cdk/aws-ecr/node_modules/constructs": { + "version": "3.4.344", + "resolved": "https://registry.npmjs.org/constructs/-/constructs-3.4.344.tgz", + "integrity": "sha512-Qq3upn44oGdvgasHUKWVFsrynyYrtVRd9fd8ko9cJOrFzx9eCm3iI4bhBryQqaISdausbTYUOXmoEe/YSJ16Nw==", + "license": "Apache-2.0", + "engines": { + "node": ">= 16.14.0" + } + }, + "node_modules/@aws-cdk/aws-efs": { + "version": "1.203.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/aws-efs/-/aws-efs-1.203.0.tgz", + "integrity": "sha512-JYy8N7GwKCXjFpsBZZqy9VbS/zJGDSq8etXAvBsrscDveDzzQr4/6CiXYU70aL5lLV34ex07A5lKBmII9cqq0Q==", + "license": "Apache-2.0", + "dependencies": { + "@aws-cdk/aws-ec2": "1.203.0", + "@aws-cdk/aws-iam": "1.203.0", + "@aws-cdk/aws-kms": "1.203.0", + "@aws-cdk/cloud-assembly-schema": "1.203.0", + "@aws-cdk/core": "1.203.0", + "@aws-cdk/cx-api": "1.203.0", + "constructs": "^3.3.69" + }, + "engines": { + "node": ">= 14.15.0" + }, + "peerDependencies": { + "@aws-cdk/aws-ec2": "1.203.0", + "@aws-cdk/aws-iam": "1.203.0", + "@aws-cdk/aws-kms": "1.203.0", + "@aws-cdk/cloud-assembly-schema": "1.203.0", + "@aws-cdk/core": "1.203.0", + "@aws-cdk/cx-api": "1.203.0", + "constructs": "^3.3.69" + } + }, + "node_modules/@aws-cdk/aws-efs/node_modules/constructs": { + "version": "3.4.344", + "resolved": "https://registry.npmjs.org/constructs/-/constructs-3.4.344.tgz", + "integrity": "sha512-Qq3upn44oGdvgasHUKWVFsrynyYrtVRd9fd8ko9cJOrFzx9eCm3iI4bhBryQqaISdausbTYUOXmoEe/YSJ16Nw==", + "license": "Apache-2.0", + "engines": { + "node": ">= 16.14.0" + } + }, + "node_modules/@aws-cdk/aws-events": { + "version": "1.203.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/aws-events/-/aws-events-1.203.0.tgz", + "integrity": "sha512-LYjKGqqosXG3HLAmvSAS/N5OSxH0uqHmDlisapwl4kEBPfSOvzzVF9TTuMulDYfSBV0Esspb3RIkXc/ja7f9JQ==", + "license": "Apache-2.0", + "dependencies": { + "@aws-cdk/aws-iam": "1.203.0", + "@aws-cdk/core": "1.203.0", + "constructs": "^3.3.69" + }, + "engines": { + "node": ">= 14.15.0" + }, + "peerDependencies": { + "@aws-cdk/aws-iam": "1.203.0", + "@aws-cdk/core": "1.203.0", + "constructs": "^3.3.69" + } + }, + "node_modules/@aws-cdk/aws-events/node_modules/constructs": { + "version": "3.4.344", + "resolved": "https://registry.npmjs.org/constructs/-/constructs-3.4.344.tgz", + "integrity": "sha512-Qq3upn44oGdvgasHUKWVFsrynyYrtVRd9fd8ko9cJOrFzx9eCm3iI4bhBryQqaISdausbTYUOXmoEe/YSJ16Nw==", + "license": "Apache-2.0", + "engines": { + "node": ">= 16.14.0" + } + }, + "node_modules/@aws-cdk/aws-iam": { + "version": "1.203.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/aws-iam/-/aws-iam-1.203.0.tgz", + "integrity": "sha512-8qXLtOzkaLlk7WlssocExMYruOb59irhspTvHvf/kpcMBmWQoyNRC80Ab3QtXkZNBppfrMkROGZvqzW2GlxAwQ==", + "license": "Apache-2.0", + "dependencies": { + "@aws-cdk/core": "1.203.0", + "@aws-cdk/cx-api": "1.203.0", + "@aws-cdk/region-info": "1.203.0", + "constructs": "^3.3.69" + }, + "engines": { + "node": ">= 14.15.0" + }, + "peerDependencies": { + "@aws-cdk/core": "1.203.0", + "@aws-cdk/region-info": "1.203.0", + "constructs": "^3.3.69" + } + }, + "node_modules/@aws-cdk/aws-iam/node_modules/constructs": { + "version": "3.4.344", + "resolved": "https://registry.npmjs.org/constructs/-/constructs-3.4.344.tgz", + "integrity": "sha512-Qq3upn44oGdvgasHUKWVFsrynyYrtVRd9fd8ko9cJOrFzx9eCm3iI4bhBryQqaISdausbTYUOXmoEe/YSJ16Nw==", + "license": "Apache-2.0", + "engines": { + "node": ">= 16.14.0" + } + }, + "node_modules/@aws-cdk/aws-kms": { + "version": "1.203.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/aws-kms/-/aws-kms-1.203.0.tgz", + "integrity": "sha512-TlG5Td7pRrgo5xta0ePIp6x6+4uq0P+1K3tQtpjEFbD2E21krujXdWNdHmuvRoNsY5NbLhOCF5g6PNCdJdvLJA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-cdk/aws-iam": "1.203.0", + "@aws-cdk/cloud-assembly-schema": "1.203.0", + "@aws-cdk/core": "1.203.0", + "@aws-cdk/cx-api": "1.203.0", + "constructs": "^3.3.69" + }, + "engines": { + "node": ">= 14.15.0" + }, + "peerDependencies": { + "@aws-cdk/aws-iam": "1.203.0", + "@aws-cdk/cloud-assembly-schema": "1.203.0", + "@aws-cdk/core": "1.203.0", + "@aws-cdk/cx-api": "1.203.0", + "constructs": "^3.3.69" + } + }, + "node_modules/@aws-cdk/aws-kms/node_modules/constructs": { + "version": "3.4.344", + "resolved": "https://registry.npmjs.org/constructs/-/constructs-3.4.344.tgz", + "integrity": "sha512-Qq3upn44oGdvgasHUKWVFsrynyYrtVRd9fd8ko9cJOrFzx9eCm3iI4bhBryQqaISdausbTYUOXmoEe/YSJ16Nw==", + "license": "Apache-2.0", + "engines": { + "node": ">= 16.14.0" + } + }, + "node_modules/@aws-cdk/aws-lambda": { + "version": "1.203.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/aws-lambda/-/aws-lambda-1.203.0.tgz", + "integrity": "sha512-O88yvNpxi4tcizHhwRkGfEBg+WDoagxgwr2TUNFWkKQVEaL/by9BeZk84LNXLC6OO5WrnC+0qgSz1KHyDXMrqw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-cdk/aws-applicationautoscaling": "1.203.0", + "@aws-cdk/aws-cloudwatch": "1.203.0", + "@aws-cdk/aws-codeguruprofiler": "1.203.0", + "@aws-cdk/aws-ec2": "1.203.0", + "@aws-cdk/aws-ecr": "1.203.0", + "@aws-cdk/aws-ecr-assets": "1.203.0", + "@aws-cdk/aws-efs": "1.203.0", + "@aws-cdk/aws-events": "1.203.0", + "@aws-cdk/aws-iam": "1.203.0", + "@aws-cdk/aws-kms": "1.203.0", + "@aws-cdk/aws-logs": "1.203.0", + "@aws-cdk/aws-s3": "1.203.0", + "@aws-cdk/aws-s3-assets": "1.203.0", + "@aws-cdk/aws-signer": "1.203.0", + "@aws-cdk/aws-sns": "1.203.0", + "@aws-cdk/aws-sqs": "1.203.0", + "@aws-cdk/core": "1.203.0", + "@aws-cdk/cx-api": "1.203.0", + "@aws-cdk/region-info": "1.203.0", + "constructs": "^3.3.69" + }, + "engines": { + "node": ">= 14.15.0" + }, + "peerDependencies": { + "@aws-cdk/aws-applicationautoscaling": "1.203.0", + "@aws-cdk/aws-cloudwatch": "1.203.0", + "@aws-cdk/aws-codeguruprofiler": "1.203.0", + "@aws-cdk/aws-ec2": "1.203.0", + "@aws-cdk/aws-ecr": "1.203.0", + "@aws-cdk/aws-ecr-assets": "1.203.0", + "@aws-cdk/aws-efs": "1.203.0", + "@aws-cdk/aws-events": "1.203.0", + "@aws-cdk/aws-iam": "1.203.0", + "@aws-cdk/aws-kms": "1.203.0", + "@aws-cdk/aws-logs": "1.203.0", + "@aws-cdk/aws-s3": "1.203.0", + "@aws-cdk/aws-s3-assets": "1.203.0", + "@aws-cdk/aws-signer": "1.203.0", + "@aws-cdk/aws-sns": "1.203.0", + "@aws-cdk/aws-sqs": "1.203.0", + "@aws-cdk/core": "1.203.0", + "@aws-cdk/cx-api": "1.203.0", + "@aws-cdk/region-info": "1.203.0", + "constructs": "^3.3.69" + } + }, + "node_modules/@aws-cdk/aws-lambda/node_modules/constructs": { + "version": "3.4.344", + "resolved": "https://registry.npmjs.org/constructs/-/constructs-3.4.344.tgz", + "integrity": "sha512-Qq3upn44oGdvgasHUKWVFsrynyYrtVRd9fd8ko9cJOrFzx9eCm3iI4bhBryQqaISdausbTYUOXmoEe/YSJ16Nw==", + "license": "Apache-2.0", + "engines": { + "node": ">= 16.14.0" + } + }, + "node_modules/@aws-cdk/aws-logs": { + "version": "1.203.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/aws-logs/-/aws-logs-1.203.0.tgz", + "integrity": "sha512-I5/0+NuPdTRSq0cK8W1w8hHyGGj4mqTXFou0sHed3BUN4zYim2cB88uosOVMOhkpGRr19+FEwEThvyntRLcTOQ==", + "license": "Apache-2.0", + "dependencies": { + "@aws-cdk/aws-cloudwatch": "1.203.0", + "@aws-cdk/aws-iam": "1.203.0", + "@aws-cdk/aws-kms": "1.203.0", + "@aws-cdk/aws-s3-assets": "1.203.0", + "@aws-cdk/core": "1.203.0", + "@aws-cdk/cx-api": "1.203.0", + "constructs": "^3.3.69" + }, + "engines": { + "node": ">= 14.15.0" + }, + "peerDependencies": { + "@aws-cdk/aws-cloudwatch": "1.203.0", + "@aws-cdk/aws-iam": "1.203.0", + "@aws-cdk/aws-kms": "1.203.0", + "@aws-cdk/aws-s3-assets": "1.203.0", + "@aws-cdk/core": "1.203.0", + "@aws-cdk/cx-api": "1.203.0", + "constructs": "^3.3.69" + } + }, + "node_modules/@aws-cdk/aws-logs/node_modules/constructs": { + "version": "3.4.344", + "resolved": "https://registry.npmjs.org/constructs/-/constructs-3.4.344.tgz", + "integrity": "sha512-Qq3upn44oGdvgasHUKWVFsrynyYrtVRd9fd8ko9cJOrFzx9eCm3iI4bhBryQqaISdausbTYUOXmoEe/YSJ16Nw==", + "license": "Apache-2.0", + "engines": { + "node": ">= 16.14.0" + } + }, + "node_modules/@aws-cdk/aws-msk": { + "version": "1.203.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/aws-msk/-/aws-msk-1.203.0.tgz", + "integrity": "sha512-t/ShG04fCFMDaCaSA697vNCH63e27kFi+V1zRsbMPomYsNLtHR+wihSorT6v2yFRtDsLBKbrWO/Hishmbbr9lw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-cdk/aws-acmpca": "1.203.0", + "@aws-cdk/aws-ec2": "1.203.0", + "@aws-cdk/aws-iam": "1.203.0", + "@aws-cdk/aws-kms": "1.203.0", + "@aws-cdk/aws-logs": "1.203.0", + "@aws-cdk/aws-s3": "1.203.0", + "@aws-cdk/aws-secretsmanager": "1.203.0", + "@aws-cdk/core": "1.203.0", + "@aws-cdk/custom-resources": "1.203.0", + "constructs": "^3.3.69" + }, + "engines": { + "node": ">= 14.15.0" + }, + "peerDependencies": { + "@aws-cdk/aws-acmpca": "1.203.0", + "@aws-cdk/aws-ec2": "1.203.0", + "@aws-cdk/aws-iam": "1.203.0", + "@aws-cdk/aws-kms": "1.203.0", + "@aws-cdk/aws-logs": "1.203.0", + "@aws-cdk/aws-s3": "1.203.0", + "@aws-cdk/aws-secretsmanager": "1.203.0", + "@aws-cdk/core": "1.203.0", + "@aws-cdk/custom-resources": "1.203.0", + "constructs": "^3.3.69" + } + }, + "node_modules/@aws-cdk/aws-msk/node_modules/constructs": { + "version": "3.4.344", + "resolved": "https://registry.npmjs.org/constructs/-/constructs-3.4.344.tgz", + "integrity": "sha512-Qq3upn44oGdvgasHUKWVFsrynyYrtVRd9fd8ko9cJOrFzx9eCm3iI4bhBryQqaISdausbTYUOXmoEe/YSJ16Nw==", + "license": "Apache-2.0", + "engines": { + "node": ">= 16.14.0" + } + }, + "node_modules/@aws-cdk/aws-s3": { + "version": "1.203.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/aws-s3/-/aws-s3-1.203.0.tgz", + "integrity": "sha512-xPxCv7l6zh7nGoFwLNzHmP07ULtWx/ubDE6l2ZqkRF4ZHVWoyTk7zVs4Q+BmRM1mUn0yDHyMTKKf887lQu9eVg==", + "license": "Apache-2.0", + "dependencies": { + "@aws-cdk/aws-events": "1.203.0", + "@aws-cdk/aws-iam": "1.203.0", + "@aws-cdk/aws-kms": "1.203.0", + "@aws-cdk/core": "1.203.0", + "@aws-cdk/cx-api": "1.203.0", + "constructs": "^3.3.69" + }, + "engines": { + "node": ">= 14.15.0" + }, + "peerDependencies": { + "@aws-cdk/aws-events": "1.203.0", + "@aws-cdk/aws-iam": "1.203.0", + "@aws-cdk/aws-kms": "1.203.0", + "@aws-cdk/core": "1.203.0", + "@aws-cdk/cx-api": "1.203.0", + "constructs": "^3.3.69" + } + }, + "node_modules/@aws-cdk/aws-s3-assets": { + "version": "1.203.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/aws-s3-assets/-/aws-s3-assets-1.203.0.tgz", + "integrity": "sha512-BXvIWRdHmwNkpDbQXsB0hZWGJ3szB264Dq14CzWu6+6J8beaUNrT+1/z1mglr6S3UraHMsOhhSO48zT9k+MzhA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-cdk/assets": "1.203.0", + "@aws-cdk/aws-iam": "1.203.0", + "@aws-cdk/aws-kms": "1.203.0", + "@aws-cdk/aws-s3": "1.203.0", + "@aws-cdk/core": "1.203.0", + "@aws-cdk/cx-api": "1.203.0", + "constructs": "^3.3.69" + }, + "engines": { + "node": ">= 14.15.0" + }, + "peerDependencies": { + "@aws-cdk/assets": "1.203.0", + "@aws-cdk/aws-iam": "1.203.0", + "@aws-cdk/aws-kms": "1.203.0", + "@aws-cdk/aws-s3": "1.203.0", + "@aws-cdk/core": "1.203.0", + "@aws-cdk/cx-api": "1.203.0", + "constructs": "^3.3.69" + } + }, + "node_modules/@aws-cdk/aws-s3-assets/node_modules/constructs": { + "version": "3.4.344", + "resolved": "https://registry.npmjs.org/constructs/-/constructs-3.4.344.tgz", + "integrity": "sha512-Qq3upn44oGdvgasHUKWVFsrynyYrtVRd9fd8ko9cJOrFzx9eCm3iI4bhBryQqaISdausbTYUOXmoEe/YSJ16Nw==", + "license": "Apache-2.0", + "engines": { + "node": ">= 16.14.0" + } + }, + "node_modules/@aws-cdk/aws-s3/node_modules/constructs": { + "version": "3.4.344", + "resolved": "https://registry.npmjs.org/constructs/-/constructs-3.4.344.tgz", + "integrity": "sha512-Qq3upn44oGdvgasHUKWVFsrynyYrtVRd9fd8ko9cJOrFzx9eCm3iI4bhBryQqaISdausbTYUOXmoEe/YSJ16Nw==", + "license": "Apache-2.0", + "engines": { + "node": ">= 16.14.0" + } + }, + "node_modules/@aws-cdk/aws-sam": { + "version": "1.203.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/aws-sam/-/aws-sam-1.203.0.tgz", + "integrity": "sha512-bTq1mCd2MPIcLlh2etbubVPBNVBKVE/JTfcmXWF0YPFpPKbOmNobgp6APyJdwZDrpBzqNRRCoP1np9s2TUZyvA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-cdk/core": "1.203.0", + "constructs": "^3.3.69" + }, + "engines": { + "node": ">= 14.15.0" + }, + "peerDependencies": { + "@aws-cdk/core": "1.203.0", + "constructs": "^3.3.69" + } + }, + "node_modules/@aws-cdk/aws-sam/node_modules/constructs": { + "version": "3.4.344", + "resolved": "https://registry.npmjs.org/constructs/-/constructs-3.4.344.tgz", + "integrity": "sha512-Qq3upn44oGdvgasHUKWVFsrynyYrtVRd9fd8ko9cJOrFzx9eCm3iI4bhBryQqaISdausbTYUOXmoEe/YSJ16Nw==", + "license": "Apache-2.0", + "engines": { + "node": ">= 16.14.0" + } + }, + "node_modules/@aws-cdk/aws-secretsmanager": { + "version": "1.203.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/aws-secretsmanager/-/aws-secretsmanager-1.203.0.tgz", + "integrity": "sha512-B3oIHys70F7OtEDe6pqPm4BWYk+M1T6WMmzTMyJsgomXSzqAlo9MdzuzLY4yOIvFDEHZNet05om40aXEWi5omw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-cdk/aws-ec2": "1.203.0", + "@aws-cdk/aws-iam": "1.203.0", + "@aws-cdk/aws-kms": "1.203.0", + "@aws-cdk/aws-lambda": "1.203.0", + "@aws-cdk/aws-sam": "1.203.0", + "@aws-cdk/core": "1.203.0", + "@aws-cdk/cx-api": "1.203.0", + "constructs": "^3.3.69" + }, + "engines": { + "node": ">= 14.15.0" + }, + "peerDependencies": { + "@aws-cdk/aws-ec2": "1.203.0", + "@aws-cdk/aws-iam": "1.203.0", + "@aws-cdk/aws-kms": "1.203.0", + "@aws-cdk/aws-lambda": "1.203.0", + "@aws-cdk/aws-sam": "1.203.0", + "@aws-cdk/core": "1.203.0", + "@aws-cdk/cx-api": "1.203.0", + "constructs": "^3.3.69" + } + }, + "node_modules/@aws-cdk/aws-secretsmanager/node_modules/constructs": { + "version": "3.4.344", + "resolved": "https://registry.npmjs.org/constructs/-/constructs-3.4.344.tgz", + "integrity": "sha512-Qq3upn44oGdvgasHUKWVFsrynyYrtVRd9fd8ko9cJOrFzx9eCm3iI4bhBryQqaISdausbTYUOXmoEe/YSJ16Nw==", + "license": "Apache-2.0", + "engines": { + "node": ">= 16.14.0" + } + }, + "node_modules/@aws-cdk/aws-signer": { + "version": "1.203.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/aws-signer/-/aws-signer-1.203.0.tgz", + "integrity": "sha512-czHs8DIIA2wxYW2iFTEWAVydFURMvyZO+rtv7V4LXV9G18qRdBVxyx7VU2Lhr8nd5sg+uPV4Vonjt8OvUfO12A==", + "license": "Apache-2.0", + "dependencies": { + "@aws-cdk/core": "1.203.0", + "constructs": "^3.3.69" + }, + "engines": { + "node": ">= 14.15.0" + }, + "peerDependencies": { + "@aws-cdk/core": "1.203.0", + "constructs": "^3.3.69" + } + }, + "node_modules/@aws-cdk/aws-signer/node_modules/constructs": { + "version": "3.4.344", + "resolved": "https://registry.npmjs.org/constructs/-/constructs-3.4.344.tgz", + "integrity": "sha512-Qq3upn44oGdvgasHUKWVFsrynyYrtVRd9fd8ko9cJOrFzx9eCm3iI4bhBryQqaISdausbTYUOXmoEe/YSJ16Nw==", + "license": "Apache-2.0", + "engines": { + "node": ">= 16.14.0" + } + }, + "node_modules/@aws-cdk/aws-sns": { + "version": "1.203.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/aws-sns/-/aws-sns-1.203.0.tgz", + "integrity": "sha512-9HBgItNq7zM+ElEUEnomHjAgYsZWOjHaafv1x+Gbk/UH4xKzoM7t6kdlRa0qCVeyWhaClGCyRAJtOP42UiGmSg==", + "license": "Apache-2.0", + "dependencies": { + "@aws-cdk/aws-cloudwatch": "1.203.0", + "@aws-cdk/aws-codestarnotifications": "1.203.0", + "@aws-cdk/aws-events": "1.203.0", + "@aws-cdk/aws-iam": "1.203.0", + "@aws-cdk/aws-kms": "1.203.0", + "@aws-cdk/aws-sqs": "1.203.0", + "@aws-cdk/core": "1.203.0", + "constructs": "^3.3.69" + }, + "engines": { + "node": ">= 14.15.0" + }, + "peerDependencies": { + "@aws-cdk/aws-cloudwatch": "1.203.0", + "@aws-cdk/aws-codestarnotifications": "1.203.0", + "@aws-cdk/aws-events": "1.203.0", + "@aws-cdk/aws-iam": "1.203.0", + "@aws-cdk/aws-kms": "1.203.0", + "@aws-cdk/aws-sqs": "1.203.0", + "@aws-cdk/core": "1.203.0", + "constructs": "^3.3.69" + } + }, + "node_modules/@aws-cdk/aws-sns/node_modules/constructs": { + "version": "3.4.344", + "resolved": "https://registry.npmjs.org/constructs/-/constructs-3.4.344.tgz", + "integrity": "sha512-Qq3upn44oGdvgasHUKWVFsrynyYrtVRd9fd8ko9cJOrFzx9eCm3iI4bhBryQqaISdausbTYUOXmoEe/YSJ16Nw==", + "license": "Apache-2.0", + "engines": { + "node": ">= 16.14.0" + } + }, + "node_modules/@aws-cdk/aws-sqs": { + "version": "1.203.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/aws-sqs/-/aws-sqs-1.203.0.tgz", + "integrity": "sha512-GDSWc/O9HcxjP4pbCYuZjqE2GXymzvv6rM8Q17NtERACpUMf/eAuNITLCulE1Y6ufE9A9/lgkT96I3QLd9HBNw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-cdk/aws-cloudwatch": "1.203.0", + "@aws-cdk/aws-iam": "1.203.0", + "@aws-cdk/aws-kms": "1.203.0", + "@aws-cdk/core": "1.203.0", + "constructs": "^3.3.69" + }, + "engines": { + "node": ">= 14.15.0" + }, + "peerDependencies": { + "@aws-cdk/aws-cloudwatch": "1.203.0", + "@aws-cdk/aws-iam": "1.203.0", + "@aws-cdk/aws-kms": "1.203.0", + "@aws-cdk/core": "1.203.0", + "constructs": "^3.3.69" + } + }, + "node_modules/@aws-cdk/aws-sqs/node_modules/constructs": { + "version": "3.4.344", + "resolved": "https://registry.npmjs.org/constructs/-/constructs-3.4.344.tgz", + "integrity": "sha512-Qq3upn44oGdvgasHUKWVFsrynyYrtVRd9fd8ko9cJOrFzx9eCm3iI4bhBryQqaISdausbTYUOXmoEe/YSJ16Nw==", + "license": "Apache-2.0", + "engines": { + "node": ">= 16.14.0" + } + }, + "node_modules/@aws-cdk/aws-ssm": { + "version": "1.203.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/aws-ssm/-/aws-ssm-1.203.0.tgz", + "integrity": "sha512-zR/eLVPO+O3MMbnMgWSLybrj7VxIt1d90QPHPZMV92hYdzWDKBfVZ09P82muNILRF+SMcK7TrFLCDi5Nos0S2g==", + "license": "Apache-2.0", + "dependencies": { + "@aws-cdk/aws-iam": "1.203.0", + "@aws-cdk/aws-kms": "1.203.0", + "@aws-cdk/cloud-assembly-schema": "1.203.0", + "@aws-cdk/core": "1.203.0", + "constructs": "^3.3.69" + }, + "engines": { + "node": ">= 14.15.0" + }, + "peerDependencies": { + "@aws-cdk/aws-iam": "1.203.0", + "@aws-cdk/aws-kms": "1.203.0", + "@aws-cdk/cloud-assembly-schema": "1.203.0", + "@aws-cdk/core": "1.203.0", + "constructs": "^3.3.69" + } + }, + "node_modules/@aws-cdk/aws-ssm/node_modules/constructs": { + "version": "3.4.344", + "resolved": "https://registry.npmjs.org/constructs/-/constructs-3.4.344.tgz", + "integrity": "sha512-Qq3upn44oGdvgasHUKWVFsrynyYrtVRd9fd8ko9cJOrFzx9eCm3iI4bhBryQqaISdausbTYUOXmoEe/YSJ16Nw==", + "license": "Apache-2.0", + "engines": { + "node": ">= 16.14.0" + } + }, + "node_modules/@aws-cdk/cloud-assembly-schema": { + "version": "1.203.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/cloud-assembly-schema/-/cloud-assembly-schema-1.203.0.tgz", + "integrity": "sha512-r252InZ8Oh7q7ztriaA3n6F48QOFVfNcT/KO4XOlYyt1xDWRMENDYf+D+DVr6O5klcaa3ivvvDT7DRuW3xdVOQ==", + "bundleDependencies": [ + "jsonschema", + "semver" + ], + "license": "Apache-2.0", + "dependencies": { + "jsonschema": "^1.4.1", + "semver": "^7.3.8" + }, + "engines": { + "node": ">= 14.15.0" + } + }, + "node_modules/@aws-cdk/cloud-assembly-schema/node_modules/jsonschema": { + "version": "1.4.1", + "inBundle": true, + "license": "MIT", + "engines": { + "node": "*" + } + }, + "node_modules/@aws-cdk/cloud-assembly-schema/node_modules/lru-cache": { + "version": "6.0.0", + "inBundle": true, + "license": "ISC", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@aws-cdk/cloud-assembly-schema/node_modules/semver": { + "version": "7.3.8", + "inBundle": true, + "license": "ISC", + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@aws-cdk/cloud-assembly-schema/node_modules/yallist": { + "version": "4.0.0", + "inBundle": true, + "license": "ISC" + }, + "node_modules/@aws-cdk/core": { + "version": "1.203.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/core/-/core-1.203.0.tgz", + "integrity": "sha512-3/quPwnGWKHm/Bzna/du5WP5a/Wp/NYqDyL1rJ1A3EPpsRQYywJPj77+M8nG5sD5qNIoFbhN7Q5aee+bcS7GGA==", + "bundleDependencies": [ + "fs-extra", + "minimatch", + "@balena/dockerignore", + "ignore" + ], + "license": "Apache-2.0", + "dependencies": { + "@aws-cdk/cloud-assembly-schema": "1.203.0", + "@aws-cdk/cx-api": "1.203.0", + "@aws-cdk/region-info": "1.203.0", + "@balena/dockerignore": "^1.0.2", + "constructs": "^3.3.69", + "fs-extra": "^9.1.0", + "ignore": "^5.2.4", + "minimatch": "^3.1.2" + }, + "engines": { + "node": ">= 14.15.0" + }, + "peerDependencies": { + "@aws-cdk/cloud-assembly-schema": "1.203.0", + "@aws-cdk/cx-api": "1.203.0", + "@aws-cdk/region-info": "1.203.0", + "constructs": "^3.3.69" + } + }, + "node_modules/@aws-cdk/core/node_modules/@balena/dockerignore": { + "version": "1.0.2", + "inBundle": true, + "license": "Apache-2.0" + }, + "node_modules/@aws-cdk/core/node_modules/at-least-node": { + "version": "1.0.0", + "inBundle": true, + "license": "ISC", + "engines": { + "node": ">= 4.0.0" + } + }, + "node_modules/@aws-cdk/core/node_modules/balanced-match": { + "version": "1.0.2", + "inBundle": true, + "license": "MIT" + }, + "node_modules/@aws-cdk/core/node_modules/brace-expansion": { + "version": "1.1.11", + "inBundle": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/@aws-cdk/core/node_modules/concat-map": { + "version": "0.0.1", + "inBundle": true, + "license": "MIT" + }, + "node_modules/@aws-cdk/core/node_modules/constructs": { + "version": "3.4.344", + "resolved": "https://registry.npmjs.org/constructs/-/constructs-3.4.344.tgz", + "integrity": "sha512-Qq3upn44oGdvgasHUKWVFsrynyYrtVRd9fd8ko9cJOrFzx9eCm3iI4bhBryQqaISdausbTYUOXmoEe/YSJ16Nw==", + "license": "Apache-2.0", + "engines": { + "node": ">= 16.14.0" + } + }, + "node_modules/@aws-cdk/core/node_modules/fs-extra": { + "version": "9.1.0", + "inBundle": true, + "license": "MIT", + "dependencies": { + "at-least-node": "^1.0.0", + "graceful-fs": "^4.2.0", + "jsonfile": "^6.0.1", + "universalify": "^2.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@aws-cdk/core/node_modules/graceful-fs": { + "version": "4.2.10", + "inBundle": true, + "license": "ISC" + }, + "node_modules/@aws-cdk/core/node_modules/ignore": { + "version": "5.2.4", + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">= 4" + } + }, + "node_modules/@aws-cdk/core/node_modules/jsonfile": { + "version": "6.1.0", + "inBundle": true, + "license": "MIT", + "dependencies": { + "universalify": "^2.0.0" + }, + "optionalDependencies": { + "graceful-fs": "^4.1.6" + } + }, + "node_modules/@aws-cdk/core/node_modules/minimatch": { + "version": "3.1.2", + "inBundle": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/@aws-cdk/core/node_modules/universalify": { + "version": "2.0.0", + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">= 10.0.0" + } + }, + "node_modules/@aws-cdk/custom-resources": { + "version": "1.203.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/custom-resources/-/custom-resources-1.203.0.tgz", + "integrity": "sha512-JpUH3d5gG/2MrWCbnFC96FgRGUqcecY84xZHsfTKUlPQ2bUzElHih2tvbR21DQ5ZIKvFznol3DheduDSD/TgpQ==", + "license": "Apache-2.0", + "dependencies": { + "@aws-cdk/aws-cloudformation": "1.203.0", + "@aws-cdk/aws-ec2": "1.203.0", + "@aws-cdk/aws-iam": "1.203.0", + "@aws-cdk/aws-lambda": "1.203.0", + "@aws-cdk/aws-logs": "1.203.0", + "@aws-cdk/aws-sns": "1.203.0", + "@aws-cdk/core": "1.203.0", + "constructs": "^3.3.69" + }, + "engines": { + "node": ">= 14.15.0" + }, + "peerDependencies": { + "@aws-cdk/aws-cloudformation": "1.203.0", + "@aws-cdk/aws-ec2": "1.203.0", + "@aws-cdk/aws-iam": "1.203.0", + "@aws-cdk/aws-lambda": "1.203.0", + "@aws-cdk/aws-logs": "1.203.0", + "@aws-cdk/aws-sns": "1.203.0", + "@aws-cdk/core": "1.203.0", + "constructs": "^3.3.69" + } + }, + "node_modules/@aws-cdk/custom-resources/node_modules/constructs": { + "version": "3.4.344", + "resolved": "https://registry.npmjs.org/constructs/-/constructs-3.4.344.tgz", + "integrity": "sha512-Qq3upn44oGdvgasHUKWVFsrynyYrtVRd9fd8ko9cJOrFzx9eCm3iI4bhBryQqaISdausbTYUOXmoEe/YSJ16Nw==", + "license": "Apache-2.0", + "engines": { + "node": ">= 16.14.0" + } + }, + "node_modules/@aws-cdk/cx-api": { + "version": "1.203.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/cx-api/-/cx-api-1.203.0.tgz", + "integrity": "sha512-W2flnJFGytifPw2ojEsh9l8MAI4UANaUcMKr+qt4eJmFwrtVcS7nasdJQGSatQdxkAwd2pX4x10brAHYoAqjjQ==", + "bundleDependencies": [ + "semver" + ], + "license": "Apache-2.0", + "dependencies": { + "@aws-cdk/cloud-assembly-schema": "1.203.0", + "semver": "^7.3.8" + }, + "engines": { + "node": ">= 14.15.0" + }, + "peerDependencies": { + "@aws-cdk/cloud-assembly-schema": "1.203.0" + } + }, + "node_modules/@aws-cdk/cx-api/node_modules/lru-cache": { + "version": "6.0.0", + "inBundle": true, + "license": "ISC", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@aws-cdk/cx-api/node_modules/semver": { + "version": "7.3.8", + "inBundle": true, + "license": "ISC", + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@aws-cdk/cx-api/node_modules/yallist": { + "version": "4.0.0", + "inBundle": true, + "license": "ISC" + }, + "node_modules/@aws-cdk/region-info": { + "version": "1.203.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/region-info/-/region-info-1.203.0.tgz", + "integrity": "sha512-3GzFYrdUO2NGcOrlIJ1TvjRxB0/ntBEyQgwFtVJQSvt3msCznE/w1n6pZS+oDF12NWtIPFbsJ5zTGdJ+PLMJhg==", + "license": "Apache-2.0", + "engines": { + "node": ">= 14.15.0" + } + }, "node_modules/@babel/code-frame": { "version": "7.24.7", "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.24.7.tgz", @@ -521,115 +1684,351 @@ "dependencies": { "@babel/helper-plugin-utils": "^7.8.0" }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-top-level-await": { + "version": "7.14.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.14.5.tgz", + "integrity": "sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.14.5" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-typescript": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.24.7.tgz", + "integrity": "sha512-c/+fVeJBB0FeKsFvwytYiUD+LBvhHjGSI0g446PRGdSVGZLRNArBUno2PETbAly3tpiNAQR5XaZ+JslxkotsbA==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/template": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.24.7.tgz", + "integrity": "sha512-jYqfPrU9JTF0PmPy1tLYHW4Mp4KlgxJD9l2nP9fD6yT/ICi554DmrWBAEYpIelzjHf1msDP3PxJIRt/nFNfBig==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.24.7", + "@babel/parser": "^7.24.7", + "@babel/types": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/traverse": { + "version": "7.24.8", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.24.8.tgz", + "integrity": "sha512-t0P1xxAPzEDcEPmjprAQq19NWum4K0EQPjMwZQZbHt+GiZqvjCHjj755Weq1YRPVzBI+3zSfvScfpnuIecVFJQ==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.24.7", + "@babel/generator": "^7.24.8", + "@babel/helper-environment-visitor": "^7.24.7", + "@babel/helper-function-name": "^7.24.7", + "@babel/helper-hoist-variables": "^7.24.7", + "@babel/helper-split-export-declaration": "^7.24.7", + "@babel/parser": "^7.24.8", + "@babel/types": "^7.24.8", + "debug": "^4.3.1", + "globals": "^11.1.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/traverse/node_modules/globals": { + "version": "11.12.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", + "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/types": { + "version": "7.24.9", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.24.9.tgz", + "integrity": "sha512-xm8XrMKz0IlUdocVbYJe0Z9xEgidU7msskG8BbhnTPK/HZ2z/7FP7ykqPgrUH+C+r414mNfNWam1f2vqOjqjYQ==", + "dev": true, + "dependencies": { + "@babel/helper-string-parser": "^7.24.8", + "@babel/helper-validator-identifier": "^7.24.7", + "to-fast-properties": "^2.0.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@bcoe/v8-coverage": { + "version": "0.2.3", + "resolved": "https://registry.npmjs.org/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz", + "integrity": "sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==", + "dev": true + }, + "node_modules/@cspotcode/source-map-support": { + "version": "0.8.1", + "resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz", + "integrity": "sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==", + "dev": true, + "dependencies": { + "@jridgewell/trace-mapping": "0.3.9" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@cspotcode/source-map-support/node_modules/@jridgewell/trace-mapping": { + "version": "0.3.9", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz", + "integrity": "sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==", + "dev": true, + "dependencies": { + "@jridgewell/resolve-uri": "^3.0.3", + "@jridgewell/sourcemap-codec": "^1.4.10" + } + }, + "node_modules/@eslint-community/eslint-utils": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.4.1.tgz", + "integrity": "sha512-s3O3waFUrMV8P/XaF/+ZTp1X9XBZW1a4B97ZnjQF2KYWaFD2A8KyFBsrsfSjEmjn3RGWAIuvlneuZm3CUK3jbA==", + "dev": true, + "license": "MIT", + "dependencies": { + "eslint-visitor-keys": "^3.4.3" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + }, + "peerDependencies": { + "eslint": "^6.0.0 || ^7.0.0 || >=8.0.0" + } + }, + "node_modules/@eslint-community/eslint-utils/node_modules/eslint-visitor-keys": { + "version": "3.4.3", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz", + "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/@eslint-community/regexpp": { + "version": "4.12.1", + "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.12.1.tgz", + "integrity": "sha512-CCZCDJuduB9OUkFkY2IgppNZMi2lBQgD2qzwXkEia16cge2pijY/aXi96CJMquDMn3nJdlPV1A5KrJEXwfLNzQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^12.0.0 || ^14.0.0 || >=16.0.0" + } + }, + "node_modules/@eslint/config-array": { + "version": "0.19.1", + "resolved": "https://registry.npmjs.org/@eslint/config-array/-/config-array-0.19.1.tgz", + "integrity": "sha512-fo6Mtm5mWyKjA/Chy1BYTdn5mGJoDNjC7C64ug20ADsRDGrA85bN3uK3MaKbeRkRuuIEAR5N33Jr1pbm411/PA==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@eslint/object-schema": "^2.1.5", + "debug": "^4.3.1", + "minimatch": "^3.1.2" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, + "node_modules/@eslint/core": { + "version": "0.9.1", + "resolved": "https://registry.npmjs.org/@eslint/core/-/core-0.9.1.tgz", + "integrity": "sha512-GuUdqkyyzQI5RMIWkHhvTWLCyLo1jNK3vzkSyaExH5kHPDHcuL2VOpHjmMY+y3+NC69qAKToBqldTBgYeLSr9Q==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@types/json-schema": "^7.0.15" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, + "node_modules/@eslint/eslintrc": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-3.2.0.tgz", + "integrity": "sha512-grOjVNN8P3hjJn/eIETF1wwd12DdnwFDoyceUJLYYdkpbwq3nLi+4fqrTAONx7XDALqlL220wC/RHSC/QTI/0w==", + "dev": true, + "license": "MIT", + "dependencies": { + "ajv": "^6.12.4", + "debug": "^4.3.2", + "espree": "^10.0.1", + "globals": "^14.0.0", + "ignore": "^5.2.0", + "import-fresh": "^3.2.1", + "js-yaml": "^4.1.0", + "minimatch": "^3.1.2", + "strip-json-comments": "^3.1.1" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/@eslint/eslintrc/node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "dev": true, + "license": "Python-2.0" + }, + "node_modules/@eslint/eslintrc/node_modules/globals": { + "version": "14.0.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-14.0.0.tgz", + "integrity": "sha512-oahGvuMGQlPw/ivIYBjVSrWAfWLBeku5tpPE2fOPLi+WHffIWbuh2tCjhyQhTBPMf5E9jDEH4FOmTYgYwbKwtQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@eslint/eslintrc/node_modules/js-yaml": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", + "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "dev": true, + "license": "MIT", + "dependencies": { + "argparse": "^2.0.1" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" } }, - "node_modules/@babel/plugin-syntax-top-level-await": { - "version": "7.14.5", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.14.5.tgz", - "integrity": "sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw==", + "node_modules/@eslint/js": { + "version": "9.17.0", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.17.0.tgz", + "integrity": "sha512-Sxc4hqcs1kTu0iID3kcZDW3JHq2a77HO9P8CP6YEA/FpH3Ll8UXE2r/86Rz9YJLKme39S9vU5OWNjC6Xl0Cr3w==", "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.14.5" - }, + "license": "MIT", "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" } }, - "node_modules/@babel/plugin-syntax-typescript": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.24.7.tgz", - "integrity": "sha512-c/+fVeJBB0FeKsFvwytYiUD+LBvhHjGSI0g446PRGdSVGZLRNArBUno2PETbAly3tpiNAQR5XaZ+JslxkotsbA==", + "node_modules/@eslint/object-schema": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@eslint/object-schema/-/object-schema-2.1.5.tgz", + "integrity": "sha512-o0bhxnL89h5Bae5T318nFoFzGy+YE5i/gGkoPAgkmTVdRKTiv3p8JHevPiPaMwoloKfEiiaHlawCqaZMqRm+XQ==", "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.24.7" - }, + "license": "Apache-2.0", "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" } }, - "node_modules/@babel/template": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.24.7.tgz", - "integrity": "sha512-jYqfPrU9JTF0PmPy1tLYHW4Mp4KlgxJD9l2nP9fD6yT/ICi554DmrWBAEYpIelzjHf1msDP3PxJIRt/nFNfBig==", + "node_modules/@eslint/plugin-kit": { + "version": "0.2.4", + "resolved": "https://registry.npmjs.org/@eslint/plugin-kit/-/plugin-kit-0.2.4.tgz", + "integrity": "sha512-zSkKow6H5Kdm0ZUQUB2kV5JIXqoG0+uH5YADhaEHswm664N9Db8dXSi0nMJpacpMf+MyyglF1vnZohpEg5yUtg==", "dev": true, + "license": "Apache-2.0", "dependencies": { - "@babel/code-frame": "^7.24.7", - "@babel/parser": "^7.24.7", - "@babel/types": "^7.24.7" + "levn": "^0.4.1" }, "engines": { - "node": ">=6.9.0" + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" } }, - "node_modules/@babel/traverse": { - "version": "7.24.8", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.24.8.tgz", - "integrity": "sha512-t0P1xxAPzEDcEPmjprAQq19NWum4K0EQPjMwZQZbHt+GiZqvjCHjj755Weq1YRPVzBI+3zSfvScfpnuIecVFJQ==", + "node_modules/@humanfs/core": { + "version": "0.19.1", + "resolved": "https://registry.npmjs.org/@humanfs/core/-/core-0.19.1.tgz", + "integrity": "sha512-5DyQ4+1JEUzejeK1JGICcideyfUbGixgS9jNgex5nqkW+cY7WZhxBigmieN5Qnw9ZosSNVC9KQKyb+GUaGyKUA==", "dev": true, - "dependencies": { - "@babel/code-frame": "^7.24.7", - "@babel/generator": "^7.24.8", - "@babel/helper-environment-visitor": "^7.24.7", - "@babel/helper-function-name": "^7.24.7", - "@babel/helper-hoist-variables": "^7.24.7", - "@babel/helper-split-export-declaration": "^7.24.7", - "@babel/parser": "^7.24.8", - "@babel/types": "^7.24.8", - "debug": "^4.3.1", - "globals": "^11.1.0" - }, + "license": "Apache-2.0", "engines": { - "node": ">=6.9.0" + "node": ">=18.18.0" } }, - "node_modules/@babel/types": { - "version": "7.24.9", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.24.9.tgz", - "integrity": "sha512-xm8XrMKz0IlUdocVbYJe0Z9xEgidU7msskG8BbhnTPK/HZ2z/7FP7ykqPgrUH+C+r414mNfNWam1f2vqOjqjYQ==", + "node_modules/@humanfs/node": { + "version": "0.16.6", + "resolved": "https://registry.npmjs.org/@humanfs/node/-/node-0.16.6.tgz", + "integrity": "sha512-YuI2ZHQL78Q5HbhDiBA1X4LmYdXCKCMQIfw0pw7piHJwyREFebJUvrQN4cMssyES6x+vfUbx1CIpaQUKYdQZOw==", "dev": true, + "license": "Apache-2.0", "dependencies": { - "@babel/helper-string-parser": "^7.24.8", - "@babel/helper-validator-identifier": "^7.24.7", - "to-fast-properties": "^2.0.0" + "@humanfs/core": "^0.19.1", + "@humanwhocodes/retry": "^0.3.0" }, "engines": { - "node": ">=6.9.0" + "node": ">=18.18.0" } }, - "node_modules/@bcoe/v8-coverage": { - "version": "0.2.3", - "resolved": "https://registry.npmjs.org/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz", - "integrity": "sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==", - "dev": true - }, - "node_modules/@cspotcode/source-map-support": { - "version": "0.8.1", - "resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz", - "integrity": "sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==", + "node_modules/@humanfs/node/node_modules/@humanwhocodes/retry": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.3.1.tgz", + "integrity": "sha512-JBxkERygn7Bv/GbN5Rv8Ul6LVknS+5Bp6RgDC/O8gEBU/yeH5Ui5C/OlWrTb6qct7LjjfT6Re2NxB0ln0yYybA==", "dev": true, - "dependencies": { - "@jridgewell/trace-mapping": "0.3.9" + "license": "Apache-2.0", + "engines": { + "node": ">=18.18" }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/nzakas" + } + }, + "node_modules/@humanwhocodes/module-importer": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz", + "integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==", + "dev": true, + "license": "Apache-2.0", "engines": { - "node": ">=12" + "node": ">=12.22" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/nzakas" } }, - "node_modules/@cspotcode/source-map-support/node_modules/@jridgewell/trace-mapping": { - "version": "0.3.9", - "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz", - "integrity": "sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==", + "node_modules/@humanwhocodes/retry": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.4.1.tgz", + "integrity": "sha512-c7hNEllBlenFTHBky65mhq8WD2kbN9Q6gk0bTk8lSBvc554jpXSkST1iePudpt7+A/AQvuHs9EMqjHDXMY1lrA==", "dev": true, - "dependencies": { - "@jridgewell/resolve-uri": "^3.0.3", - "@jridgewell/sourcemap-codec": "^1.4.10" + "license": "Apache-2.0", + "engines": { + "node": ">=18.18" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/nzakas" } }, "node_modules/@istanbuljs/load-nyc-config": { @@ -983,6 +2382,57 @@ "@jridgewell/sourcemap-codec": "^1.4.14" } }, + "node_modules/@nodelib/fs.scandir": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", + "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.stat": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", + "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.walk": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", + "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@pkgr/core": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/@pkgr/core/-/core-0.1.1.tgz", + "integrity": "sha512-cq8o4cWH0ibXh9VGi5P20Tu9XF/0fFXl9EUinr9QfTM7a7p0oTA4iJRCQWppXR1Pg8dSM0UCItCkPwsk9qWWYA==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^12.20.0 || ^14.18.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/unts" + } + }, "node_modules/@sinclair/typebox": { "version": "0.27.8", "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.27.8.tgz", @@ -1053,103 +2503,337 @@ "@babel/types": "^7.0.0" } }, - "node_modules/@types/babel__template": { - "version": "7.4.4", - "resolved": "https://registry.npmjs.org/@types/babel__template/-/babel__template-7.4.4.tgz", - "integrity": "sha512-h/NUaSyG5EyxBIp8YRxo4RMe2/qQgvyowRwVMzhYhBCONbW8PUsg4lkFMrhgZhUe5z3L3MiLDuvyJ/CaPa2A8A==", + "node_modules/@types/babel__template": { + "version": "7.4.4", + "resolved": "https://registry.npmjs.org/@types/babel__template/-/babel__template-7.4.4.tgz", + "integrity": "sha512-h/NUaSyG5EyxBIp8YRxo4RMe2/qQgvyowRwVMzhYhBCONbW8PUsg4lkFMrhgZhUe5z3L3MiLDuvyJ/CaPa2A8A==", + "dev": true, + "dependencies": { + "@babel/parser": "^7.1.0", + "@babel/types": "^7.0.0" + } + }, + "node_modules/@types/babel__traverse": { + "version": "7.20.6", + "resolved": "https://registry.npmjs.org/@types/babel__traverse/-/babel__traverse-7.20.6.tgz", + "integrity": "sha512-r1bzfrm0tomOI8g1SzvCaQHo6Lcv6zu0EA+W2kHrt8dyrHQxGzBBL4kdkzIS+jBMV+EYcMAEAqXqYaLJq5rOZg==", + "dev": true, + "dependencies": { + "@babel/types": "^7.20.7" + } + }, + "node_modules/@types/estree": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.6.tgz", + "integrity": "sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/graceful-fs": { + "version": "4.1.9", + "resolved": "https://registry.npmjs.org/@types/graceful-fs/-/graceful-fs-4.1.9.tgz", + "integrity": "sha512-olP3sd1qOEe5dXTSaFvQG+02VdRXcdytWLAZsAq1PecU8uqQAhkrnbli7DagjtXKW/Bl7YJbUsa8MPcuc8LHEQ==", + "dev": true, + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/istanbul-lib-coverage": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.6.tgz", + "integrity": "sha512-2QF/t/auWm0lsy8XtKVPG19v3sSOQlJe/YHZgfjb/KBBHOGSV+J2q/S671rcq9uTBrLAXmZpqJiaQbMT+zNU1w==", + "dev": true + }, + "node_modules/@types/istanbul-lib-report": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@types/istanbul-lib-report/-/istanbul-lib-report-3.0.3.tgz", + "integrity": "sha512-NQn7AHQnk/RSLOxrBbGyJM/aVQ+pjj5HCgasFxc0K/KhoATfQ/47AyUl15I2yBUpihjmas+a+VJBOqecrFH+uA==", + "dev": true, + "dependencies": { + "@types/istanbul-lib-coverage": "*" + } + }, + "node_modules/@types/istanbul-reports": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/istanbul-reports/-/istanbul-reports-3.0.4.tgz", + "integrity": "sha512-pk2B1NWalF9toCRu6gjBzR69syFjP4Od8WRAX+0mmf9lAjCRicLOWc+ZrxZHx/0XRjotgkF9t6iaMJ+aXcOdZQ==", + "dev": true, + "dependencies": { + "@types/istanbul-lib-report": "*" + } + }, + "node_modules/@types/jest": { + "version": "29.5.12", + "resolved": "https://registry.npmjs.org/@types/jest/-/jest-29.5.12.tgz", + "integrity": "sha512-eDC8bTvT/QhYdxJAulQikueigY5AsdBRH2yDKW3yveW7svY3+DzN84/2NUgkw10RTiJbWqZrTtoGVdYlvFJdLw==", + "dev": true, + "dependencies": { + "expect": "^29.0.0", + "pretty-format": "^29.0.0" + } + }, + "node_modules/@types/json-schema": { + "version": "7.0.15", + "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.15.tgz", + "integrity": "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/node": { + "version": "20.14.9", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.14.9.tgz", + "integrity": "sha512-06OCtnTXtWOZBJlRApleWndH4JsRVs1pDCc8dLSQp+7PpUpX3ePdHyeNSFTeSe7FtKyQkrlPvHwJOW3SLd8Oyg==", + "dev": true, + "dependencies": { + "undici-types": "~5.26.4" + } + }, + "node_modules/@types/stack-utils": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@types/stack-utils/-/stack-utils-2.0.3.tgz", + "integrity": "sha512-9aEbYZ3TbYMznPdcdr3SmIrLXwC/AKZXQeCf9Pgao5CKb8CyHuEX5jzWPTkvregvhRJHcpRO6BFoGW9ycaOkYw==", + "dev": true + }, + "node_modules/@types/yargs": { + "version": "17.0.32", + "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-17.0.32.tgz", + "integrity": "sha512-xQ67Yc/laOG5uMfX/093MRlGGCIBzZMarVa+gfNKJxWAIgykYpVGkBdbqEzGDDfCrVUj6Hiff4mTZ5BA6TmAog==", + "dev": true, + "dependencies": { + "@types/yargs-parser": "*" + } + }, + "node_modules/@types/yargs-parser": { + "version": "21.0.3", + "resolved": "https://registry.npmjs.org/@types/yargs-parser/-/yargs-parser-21.0.3.tgz", + "integrity": "sha512-I4q9QU9MQv4oEOz4tAHJtNz1cwuLxn2F3xcc2iV5WdqLPpUnj30aUuxt1mAxYTG+oe8CZMV/+6rU4S4gRDzqtQ==", + "dev": true + }, + "node_modules/@typescript-eslint/eslint-plugin": { + "version": "8.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.18.0.tgz", + "integrity": "sha512-NR2yS7qUqCL7AIxdJUQf2MKKNDVNaig/dEB0GBLU7D+ZdHgK1NoH/3wsgO3OnPVipn51tG3MAwaODEGil70WEw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@eslint-community/regexpp": "^4.10.0", + "@typescript-eslint/scope-manager": "8.18.0", + "@typescript-eslint/type-utils": "8.18.0", + "@typescript-eslint/utils": "8.18.0", + "@typescript-eslint/visitor-keys": "8.18.0", + "graphemer": "^1.4.0", + "ignore": "^5.3.1", + "natural-compare": "^1.4.0", + "ts-api-utils": "^1.3.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "@typescript-eslint/parser": "^8.0.0 || ^8.0.0-alpha.0", + "eslint": "^8.57.0 || ^9.0.0", + "typescript": ">=4.8.4 <5.8.0" + } + }, + "node_modules/@typescript-eslint/parser": { + "version": "8.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.18.0.tgz", + "integrity": "sha512-hgUZ3kTEpVzKaK3uNibExUYm6SKKOmTU2BOxBSvOYwtJEPdVQ70kZJpPjstlnhCHcuc2WGfSbpKlb/69ttyN5Q==", + "dev": true, + "license": "MITClause", + "dependencies": { + "@typescript-eslint/scope-manager": "8.18.0", + "@typescript-eslint/types": "8.18.0", + "@typescript-eslint/typescript-estree": "8.18.0", + "@typescript-eslint/visitor-keys": "8.18.0", + "debug": "^4.3.4" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.57.0 || ^9.0.0", + "typescript": ">=4.8.4 <5.8.0" + } + }, + "node_modules/@typescript-eslint/scope-manager": { + "version": "8.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.18.0.tgz", + "integrity": "sha512-PNGcHop0jkK2WVYGotk/hxj+UFLhXtGPiGtiaWgVBVP1jhMoMCHlTyJA+hEj4rszoSdLTK3fN4oOatrL0Cp+Xw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/types": "8.18.0", + "@typescript-eslint/visitor-keys": "8.18.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/type-utils": { + "version": "8.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.18.0.tgz", + "integrity": "sha512-er224jRepVAVLnMF2Q7MZJCq5CsdH2oqjP4dT7K6ij09Kyd+R21r7UVJrF0buMVdZS5QRhDzpvzAxHxabQadow==", "dev": true, + "license": "MIT", "dependencies": { - "@babel/parser": "^7.1.0", - "@babel/types": "^7.0.0" + "@typescript-eslint/typescript-estree": "8.18.0", + "@typescript-eslint/utils": "8.18.0", + "debug": "^4.3.4", + "ts-api-utils": "^1.3.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.57.0 || ^9.0.0", + "typescript": ">=4.8.4 <5.8.0" } }, - "node_modules/@types/babel__traverse": { - "version": "7.20.6", - "resolved": "https://registry.npmjs.org/@types/babel__traverse/-/babel__traverse-7.20.6.tgz", - "integrity": "sha512-r1bzfrm0tomOI8g1SzvCaQHo6Lcv6zu0EA+W2kHrt8dyrHQxGzBBL4kdkzIS+jBMV+EYcMAEAqXqYaLJq5rOZg==", + "node_modules/@typescript-eslint/types": { + "version": "8.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.18.0.tgz", + "integrity": "sha512-FNYxgyTCAnFwTrzpBGq+zrnoTO4x0c1CKYY5MuUTzpScqmY5fmsh2o3+57lqdI3NZucBDCzDgdEbIaNfAjAHQA==", "dev": true, - "dependencies": { - "@babel/types": "^7.20.7" + "license": "MIT", + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" } }, - "node_modules/@types/graceful-fs": { - "version": "4.1.9", - "resolved": "https://registry.npmjs.org/@types/graceful-fs/-/graceful-fs-4.1.9.tgz", - "integrity": "sha512-olP3sd1qOEe5dXTSaFvQG+02VdRXcdytWLAZsAq1PecU8uqQAhkrnbli7DagjtXKW/Bl7YJbUsa8MPcuc8LHEQ==", + "node_modules/@typescript-eslint/typescript-estree": { + "version": "8.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.18.0.tgz", + "integrity": "sha512-rqQgFRu6yPkauz+ms3nQpohwejS8bvgbPyIDq13cgEDbkXt4LH4OkDMT0/fN1RUtzG8e8AKJyDBoocuQh8qNeg==", "dev": true, + "license": "MIT", "dependencies": { - "@types/node": "*" + "@typescript-eslint/types": "8.18.0", + "@typescript-eslint/visitor-keys": "8.18.0", + "debug": "^4.3.4", + "fast-glob": "^3.3.2", + "is-glob": "^4.0.3", + "minimatch": "^9.0.4", + "semver": "^7.6.0", + "ts-api-utils": "^1.3.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "typescript": ">=4.8.4 <5.8.0" } }, - "node_modules/@types/istanbul-lib-coverage": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.6.tgz", - "integrity": "sha512-2QF/t/auWm0lsy8XtKVPG19v3sSOQlJe/YHZgfjb/KBBHOGSV+J2q/S671rcq9uTBrLAXmZpqJiaQbMT+zNU1w==", - "dev": true - }, - "node_modules/@types/istanbul-lib-report": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/@types/istanbul-lib-report/-/istanbul-lib-report-3.0.3.tgz", - "integrity": "sha512-NQn7AHQnk/RSLOxrBbGyJM/aVQ+pjj5HCgasFxc0K/KhoATfQ/47AyUl15I2yBUpihjmas+a+VJBOqecrFH+uA==", + "node_modules/@typescript-eslint/typescript-estree/node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", "dev": true, + "license": "MIT", "dependencies": { - "@types/istanbul-lib-coverage": "*" + "balanced-match": "^1.0.0" } }, - "node_modules/@types/istanbul-reports": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/@types/istanbul-reports/-/istanbul-reports-3.0.4.tgz", - "integrity": "sha512-pk2B1NWalF9toCRu6gjBzR69syFjP4Od8WRAX+0mmf9lAjCRicLOWc+ZrxZHx/0XRjotgkF9t6iaMJ+aXcOdZQ==", + "node_modules/@typescript-eslint/typescript-estree/node_modules/minimatch": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", + "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", "dev": true, + "license": "ISC", "dependencies": { - "@types/istanbul-lib-report": "*" + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" } }, - "node_modules/@types/jest": { - "version": "29.5.12", - "resolved": "https://registry.npmjs.org/@types/jest/-/jest-29.5.12.tgz", - "integrity": "sha512-eDC8bTvT/QhYdxJAulQikueigY5AsdBRH2yDKW3yveW7svY3+DzN84/2NUgkw10RTiJbWqZrTtoGVdYlvFJdLw==", + "node_modules/@typescript-eslint/typescript-estree/node_modules/semver": { + "version": "7.6.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", + "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", "dev": true, - "dependencies": { - "expect": "^29.0.0", - "pretty-format": "^29.0.0" + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" } }, - "node_modules/@types/node": { - "version": "20.14.9", - "resolved": "https://registry.npmjs.org/@types/node/-/node-20.14.9.tgz", - "integrity": "sha512-06OCtnTXtWOZBJlRApleWndH4JsRVs1pDCc8dLSQp+7PpUpX3ePdHyeNSFTeSe7FtKyQkrlPvHwJOW3SLd8Oyg==", + "node_modules/@typescript-eslint/utils": { + "version": "8.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.18.0.tgz", + "integrity": "sha512-p6GLdY383i7h5b0Qrfbix3Vc3+J2k6QWw6UMUeY5JGfm3C5LbZ4QIZzJNoNOfgyRe0uuYKjvVOsO/jD4SJO+xg==", "dev": true, + "license": "MIT", "dependencies": { - "undici-types": "~5.26.4" + "@eslint-community/eslint-utils": "^4.4.0", + "@typescript-eslint/scope-manager": "8.18.0", + "@typescript-eslint/types": "8.18.0", + "@typescript-eslint/typescript-estree": "8.18.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.57.0 || ^9.0.0", + "typescript": ">=4.8.4 <5.8.0" } }, - "node_modules/@types/stack-utils": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/@types/stack-utils/-/stack-utils-2.0.3.tgz", - "integrity": "sha512-9aEbYZ3TbYMznPdcdr3SmIrLXwC/AKZXQeCf9Pgao5CKb8CyHuEX5jzWPTkvregvhRJHcpRO6BFoGW9ycaOkYw==", - "dev": true - }, - "node_modules/@types/yargs": { - "version": "17.0.32", - "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-17.0.32.tgz", - "integrity": "sha512-xQ67Yc/laOG5uMfX/093MRlGGCIBzZMarVa+gfNKJxWAIgykYpVGkBdbqEzGDDfCrVUj6Hiff4mTZ5BA6TmAog==", + "node_modules/@typescript-eslint/visitor-keys": { + "version": "8.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.18.0.tgz", + "integrity": "sha512-pCh/qEA8Lb1wVIqNvBke8UaRjJ6wrAWkJO5yyIbs8Yx6TNGYyfNjOo61tLv+WwLvoLPp4BQ8B7AHKijl8NGUfw==", "dev": true, + "license": "MIT", "dependencies": { - "@types/yargs-parser": "*" + "@typescript-eslint/types": "8.18.0", + "eslint-visitor-keys": "^4.2.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" } }, - "node_modules/@types/yargs-parser": { - "version": "21.0.3", - "resolved": "https://registry.npmjs.org/@types/yargs-parser/-/yargs-parser-21.0.3.tgz", - "integrity": "sha512-I4q9QU9MQv4oEOz4tAHJtNz1cwuLxn2F3xcc2iV5WdqLPpUnj30aUuxt1mAxYTG+oe8CZMV/+6rU4S4gRDzqtQ==", - "dev": true - }, "node_modules/acorn": { - "version": "8.12.1", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.12.1.tgz", - "integrity": "sha512-tcpGyI9zbizT9JbV6oYE477V6mTlXvvi0T0G3SNIYE2apm/G5huBa1+K89VGeovbg+jycCrfhl3ADxErOuO6Jg==", + "version": "8.14.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.14.0.tgz", + "integrity": "sha512-cl669nCJTZBsL97OF4kUQm5g5hC2uihk0NxY3WENAC0TYdILVkAyHymAntgxGkl7K+t0cXIrH5siy5S4XkFycA==", "dev": true, + "license": "MIT", "bin": { "acorn": "bin/acorn" }, @@ -1157,6 +2841,16 @@ "node": ">=0.4.0" } }, + "node_modules/acorn-jsx": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", + "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" + } + }, "node_modules/acorn-walk": { "version": "8.3.3", "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.3.3.tgz", @@ -1169,6 +2863,23 @@ "node": ">=0.4.0" } }, + "node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dev": true, + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, "node_modules/ansi-escapes": { "version": "4.3.2", "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.2.tgz", @@ -1972,10 +3683,11 @@ "dev": true }, "node_modules/cross-spawn": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", - "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", + "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", "dev": true, + "license": "MIT", "dependencies": { "path-key": "^3.1.0", "shebang-command": "^2.0.0", @@ -2016,6 +3728,13 @@ } } }, + "node_modules/deep-is": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", + "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", + "dev": true, + "license": "MIT" + }, "node_modules/deepmerge": { "version": "4.3.1", "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-4.3.1.tgz", @@ -2109,13 +3828,227 @@ "node": ">=6" } }, - "node_modules/escape-string-regexp": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz", - "integrity": "sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==", + "node_modules/escape-string-regexp": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz", + "integrity": "sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/eslint": { + "version": "9.17.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.17.0.tgz", + "integrity": "sha512-evtlNcpJg+cZLcnVKwsai8fExnqjGPicK7gnUtlNuzu+Fv9bI0aLpND5T44VLQtoMEnI57LoXO9XAkIXwohKrA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@eslint-community/eslint-utils": "^4.2.0", + "@eslint-community/regexpp": "^4.12.1", + "@eslint/config-array": "^0.19.0", + "@eslint/core": "^0.9.0", + "@eslint/eslintrc": "^3.2.0", + "@eslint/js": "9.17.0", + "@eslint/plugin-kit": "^0.2.3", + "@humanfs/node": "^0.16.6", + "@humanwhocodes/module-importer": "^1.0.1", + "@humanwhocodes/retry": "^0.4.1", + "@types/estree": "^1.0.6", + "@types/json-schema": "^7.0.15", + "ajv": "^6.12.4", + "chalk": "^4.0.0", + "cross-spawn": "^7.0.6", + "debug": "^4.3.2", + "escape-string-regexp": "^4.0.0", + "eslint-scope": "^8.2.0", + "eslint-visitor-keys": "^4.2.0", + "espree": "^10.3.0", + "esquery": "^1.5.0", + "esutils": "^2.0.2", + "fast-deep-equal": "^3.1.3", + "file-entry-cache": "^8.0.0", + "find-up": "^5.0.0", + "glob-parent": "^6.0.2", + "ignore": "^5.2.0", + "imurmurhash": "^0.1.4", + "is-glob": "^4.0.0", + "json-stable-stringify-without-jsonify": "^1.0.1", + "lodash.merge": "^4.6.2", + "minimatch": "^3.1.2", + "natural-compare": "^1.4.0", + "optionator": "^0.9.3" + }, + "bin": { + "eslint": "bin/eslint.js" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://eslint.org/donate" + }, + "peerDependencies": { + "jiti": "*" + }, + "peerDependenciesMeta": { + "jiti": { + "optional": true + } + } + }, + "node_modules/eslint-config-prettier": { + "version": "9.1.0", + "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-9.1.0.tgz", + "integrity": "sha512-NSWl5BFQWEPi1j4TjVNItzYV7dZXZ+wP6I6ZhrBGpChQhZRUaElihE9uRRkcbRnNb76UMKDF3r+WTmNcGPKsqw==", + "dev": true, + "license": "MIT", + "bin": { + "eslint-config-prettier": "bin/cli.js" + }, + "peerDependencies": { + "eslint": ">=7.0.0" + } + }, + "node_modules/eslint-plugin-prettier": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-prettier/-/eslint-plugin-prettier-5.2.1.tgz", + "integrity": "sha512-gH3iR3g4JfF+yYPaJYkN7jEl9QbweL/YfkoRlNnuIEHEz1vHVlCmWOS+eGGiRuzHQXdJFCOTxRgvju9b8VUmrw==", + "dev": true, + "license": "MIT", + "dependencies": { + "prettier-linter-helpers": "^1.0.0", + "synckit": "^0.9.1" + }, + "engines": { + "node": "^14.18.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint-plugin-prettier" + }, + "peerDependencies": { + "@types/eslint": ">=8.0.0", + "eslint": ">=8.0.0", + "eslint-config-prettier": "*", + "prettier": ">=3.0.0" + }, + "peerDependenciesMeta": { + "@types/eslint": { + "optional": true + }, + "eslint-config-prettier": { + "optional": true + } + } + }, + "node_modules/eslint-scope": { + "version": "8.2.0", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-8.2.0.tgz", + "integrity": "sha512-PHlWUfG6lvPc3yvP5A4PNyBL1W8fkDUccmI21JUu/+GKZBoH/W5u6usENXUrWFRsyoW5ACUjFGgAFQp5gUlb/A==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "esrecurse": "^4.3.0", + "estraverse": "^5.2.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint-visitor-keys": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.0.tgz", + "integrity": "sha512-UyLnSehNt62FFhSwjZlHmeokpRK59rcz29j+F1/aDgbkbRTk7wIc9XzdoasMUbRNKDM0qQt/+BJ4BrpFeABemw==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint/node_modules/escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/eslint/node_modules/find-up": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", + "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", + "dev": true, + "license": "MIT", + "dependencies": { + "locate-path": "^6.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/eslint/node_modules/locate-path": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", + "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-locate": "^5.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/eslint/node_modules/p-locate": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", + "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-limit": "^3.0.2" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/espree": { + "version": "10.3.0", + "resolved": "https://registry.npmjs.org/espree/-/espree-10.3.0.tgz", + "integrity": "sha512-0QYC8b24HWY8zjRnDTL6RiHfDbAWn63qb4LMj1Z4b076A4une81+z03Kg7l7mn/48PUTqoLptSXez8oknU8Clg==", "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "acorn": "^8.14.0", + "acorn-jsx": "^5.3.2", + "eslint-visitor-keys": "^4.2.0" + }, "engines": { - "node": ">=8" + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" } }, "node_modules/esprima": { @@ -2131,6 +4064,52 @@ "node": ">=4" } }, + "node_modules/esquery": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.6.0.tgz", + "integrity": "sha512-ca9pw9fomFcKPvFLXhBKUK90ZvGibiGOvRJNbjljY7s7uq/5YO4BOzcYtJqExdx99rF6aAcnRxHmcUHcz6sQsg==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "estraverse": "^5.1.0" + }, + "engines": { + "node": ">=0.10" + } + }, + "node_modules/esrecurse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", + "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "estraverse": "^5.2.0" + }, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/estraverse": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", + "dev": true, + "license": "BSD-2-Clause", + "engines": { + "node": ">=4.0" + } + }, + "node_modules/esutils": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", + "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", + "dev": true, + "license": "BSD-2-Clause", + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/execa": { "version": "5.1.1", "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", @@ -2179,12 +4158,73 @@ "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, + "node_modules/fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", + "dev": true, + "license": "MIT" + }, + "node_modules/fast-diff": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/fast-diff/-/fast-diff-1.3.0.tgz", + "integrity": "sha512-VxPP4NqbUjj6MaAOafWeUn2cXWLcCtljklUtZf0Ind4XQ+QPtmA0b18zZy0jIQx+ExRVCR/ZQpBmik5lXshNsw==", + "dev": true, + "license": "Apache-2.0" + }, + "node_modules/fast-glob": { + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.2.tgz", + "integrity": "sha512-oX2ruAFQwf/Orj8m737Y5adxDQO0LAB7/S5MnxCdTNDd4p6BsyIVsv9JQsATbTSq8KHRpLwIHbVlUNatxd+1Ow==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.2", + "merge2": "^1.3.0", + "micromatch": "^4.0.4" + }, + "engines": { + "node": ">=8.6.0" + } + }, + "node_modules/fast-glob/node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, "node_modules/fast-json-stable-stringify": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", "dev": true }, + "node_modules/fast-levenshtein": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", + "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==", + "dev": true, + "license": "MIT" + }, + "node_modules/fastq": { + "version": "1.17.1", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.17.1.tgz", + "integrity": "sha512-sRVD3lWVIXWg6By68ZN7vho9a1pQcN/WBFaAAsDDFzlJjvoGx0P8z7V1t72grFJfJhu3YPZBuu25f7Kaw2jN1w==", + "dev": true, + "license": "ISC", + "dependencies": { + "reusify": "^1.0.4" + } + }, "node_modules/fb-watchman": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/fb-watchman/-/fb-watchman-2.0.2.tgz", @@ -2194,6 +4234,19 @@ "bser": "2.1.1" } }, + "node_modules/file-entry-cache": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-8.0.0.tgz", + "integrity": "sha512-XXTUwCvisa5oacNGRP9SfNtYBNAMi+RPwBFmblZEF7N7swHYQS6/Zfk7SRwx4D5j3CH211YNRco1DEMNVfZCnQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "flat-cache": "^4.0.0" + }, + "engines": { + "node": ">=16.0.0" + } + }, "node_modules/filelist": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/filelist/-/filelist-1.0.4.tgz", @@ -2249,6 +4302,27 @@ "node": ">=8" } }, + "node_modules/flat-cache": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-4.0.1.tgz", + "integrity": "sha512-f7ccFPK3SXFHpx15UIGyRJ/FJQctuKZ0zVuN3frBo4HnK3cay9VEW0R6yPYFHC0AgqhukPzKjq22t5DmAyqGyw==", + "dev": true, + "license": "MIT", + "dependencies": { + "flatted": "^3.2.9", + "keyv": "^4.5.4" + }, + "engines": { + "node": ">=16" + } + }, + "node_modules/flatted": { + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.2.tgz", + "integrity": "sha512-AiwGJM8YcNOaobumgtng+6NHuOqC3A7MixFeDafM3X9cIUM+xUXoS5Vfgf+OihAYe20fxqNM9yPBXJzRtZ/4eA==", + "dev": true, + "license": "ISC" + }, "node_modules/fs.realpath": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", @@ -2338,13 +4412,30 @@ "url": "https://github.com/sponsors/isaacs" } }, + "node_modules/glob-parent": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", + "dev": true, + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.3" + }, + "engines": { + "node": ">=10.13.0" + } + }, "node_modules/globals": { - "version": "11.12.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", - "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", + "version": "15.13.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-15.13.0.tgz", + "integrity": "sha512-49TewVEz0UxZjr1WYYsWpPrhyC/B/pA8Bq0fUmet2n+eR7yn0IvNzNaoBwnK6mdkzcN+se7Ez9zUgULTz2QH4g==", "dev": true, + "license": "MIT", "engines": { - "node": ">=4" + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/graceful-fs": { @@ -2353,6 +4444,13 @@ "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", "dev": true }, + "node_modules/graphemer": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/graphemer/-/graphemer-1.4.0.tgz", + "integrity": "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==", + "dev": true, + "license": "MIT" + }, "node_modules/has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", @@ -2389,6 +4487,43 @@ "node": ">=10.17.0" } }, + "node_modules/ignore": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz", + "integrity": "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 4" + } + }, + "node_modules/import-fresh": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz", + "integrity": "sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==", + "dev": true, + "license": "MIT", + "dependencies": { + "parent-module": "^1.0.0", + "resolve-from": "^4.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/import-fresh/node_modules/resolve-from": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", + "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, "node_modules/import-local": { "version": "3.2.0", "resolved": "https://registry.npmjs.org/import-local/-/import-local-3.2.0.tgz", @@ -2455,6 +4590,16 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/is-fullwidth-code-point": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", @@ -2473,6 +4618,19 @@ "node": ">=6" } }, + "node_modules/is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/is-number": { "version": "7.0.0", "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", @@ -3204,12 +5362,33 @@ "node": ">=4" } }, + "node_modules/json-buffer": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz", + "integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==", + "dev": true, + "license": "MIT" + }, "node_modules/json-parse-even-better-errors": { "version": "2.3.1", "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==", "dev": true }, + "node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "dev": true, + "license": "MIT" + }, + "node_modules/json-stable-stringify-without-jsonify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", + "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==", + "dev": true, + "license": "MIT" + }, "node_modules/json5": { "version": "2.2.3", "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz", @@ -3222,6 +5401,16 @@ "node": ">=6" } }, + "node_modules/keyv": { + "version": "4.5.4", + "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz", + "integrity": "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==", + "dev": true, + "license": "MIT", + "dependencies": { + "json-buffer": "3.0.1" + } + }, "node_modules/kleur": { "version": "3.0.3", "resolved": "https://registry.npmjs.org/kleur/-/kleur-3.0.3.tgz", @@ -3240,6 +5429,20 @@ "node": ">=6" } }, + "node_modules/levn": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", + "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "prelude-ls": "^1.2.1", + "type-check": "~0.4.0" + }, + "engines": { + "node": ">= 0.8.0" + } + }, "node_modules/lines-and-columns": { "version": "1.2.4", "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", @@ -3269,6 +5472,13 @@ "integrity": "sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag==", "dev": true }, + "node_modules/lodash.merge": { + "version": "4.6.2", + "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", + "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", + "dev": true, + "license": "MIT" + }, "node_modules/lru-cache": { "version": "5.1.1", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", @@ -3326,6 +5536,16 @@ "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==", "dev": true }, + "node_modules/merge2": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", + "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, "node_modules/micromatch": { "version": "4.0.7", "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.7.tgz", @@ -3429,6 +5649,24 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/optionator": { + "version": "0.9.4", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.4.tgz", + "integrity": "sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==", + "dev": true, + "license": "MIT", + "dependencies": { + "deep-is": "^0.1.3", + "fast-levenshtein": "^2.0.6", + "levn": "^0.4.1", + "prelude-ls": "^1.2.1", + "type-check": "^0.4.0", + "word-wrap": "^1.2.5" + }, + "engines": { + "node": ">= 0.8.0" + } + }, "node_modules/p-limit": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", @@ -3480,6 +5718,19 @@ "node": ">=6" } }, + "node_modules/parent-module": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", + "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", + "dev": true, + "license": "MIT", + "dependencies": { + "callsites": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, "node_modules/parse-json": { "version": "5.2.0", "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz", @@ -3570,6 +5821,45 @@ "node": ">=8" } }, + "node_modules/prelude-ls": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", + "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/prettier": { + "version": "3.4.2", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.4.2.tgz", + "integrity": "sha512-e9MewbtFo+Fevyuxn/4rrcDAaq0IYxPGLvObpQjiZBMAzB9IGmzlnG9RZy3FFas+eBMu2vA0CszMeduow5dIuQ==", + "dev": true, + "license": "MIT", + "bin": { + "prettier": "bin/prettier.cjs" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/prettier/prettier?sponsor=1" + } + }, + "node_modules/prettier-linter-helpers": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/prettier-linter-helpers/-/prettier-linter-helpers-1.0.0.tgz", + "integrity": "sha512-GbK2cP9nraSSUF9N2XwUwqfzlAFlMNYYl+ShE/V+H8a9uNl/oUqB1w2EL54Jh0OlyRSd8RfWYJ3coVS4TROP2w==", + "dev": true, + "license": "MIT", + "dependencies": { + "fast-diff": "^1.1.2" + }, + "engines": { + "node": ">=6.0.0" + } + }, "node_modules/pretty-format": { "version": "29.7.0", "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-29.7.0.tgz", @@ -3609,6 +5899,16 @@ "node": ">= 6" } }, + "node_modules/punycode": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", + "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, "node_modules/pure-rand": { "version": "6.1.0", "resolved": "https://registry.npmjs.org/pure-rand/-/pure-rand-6.1.0.tgz", @@ -3625,6 +5925,27 @@ } ] }, + "node_modules/queue-microtask": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", + "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, "node_modules/react-is": { "version": "18.3.1", "resolved": "https://registry.npmjs.org/react-is/-/react-is-18.3.1.tgz", @@ -3683,8 +6004,43 @@ "resolved": "https://registry.npmjs.org/resolve.exports/-/resolve.exports-2.0.2.tgz", "integrity": "sha512-X2UW6Nw3n/aMgDVy+0rSqgHlv39WZAlZrXCdnbyEiKm17DSqHX4MmQMaST3FbeWR5FTuRcUwYAziZajji0Y7mg==", "dev": true, - "engines": { - "node": ">=10" + "engines": { + "node": ">=10" + } + }, + "node_modules/reusify": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", + "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==", + "dev": true, + "license": "MIT", + "engines": { + "iojs": ">=1.0.0", + "node": ">=0.10.0" + } + }, + "node_modules/run-parallel": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", + "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT", + "dependencies": { + "queue-microtask": "^1.2.2" } }, "node_modules/semver": { @@ -3866,6 +6222,23 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/synckit": { + "version": "0.9.2", + "resolved": "https://registry.npmjs.org/synckit/-/synckit-0.9.2.tgz", + "integrity": "sha512-vrozgXDQwYO72vHjUb/HnFbQx1exDjoKzqx23aXEg2a9VIg2TSFZ8FmeZpTjUCFMYw7mpX4BE2SFu8wI7asYsw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@pkgr/core": "^0.1.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": "^14.18.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/unts" + } + }, "node_modules/test-exclude": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-6.0.0.tgz", @@ -3907,6 +6280,19 @@ "node": ">=8.0" } }, + "node_modules/ts-api-utils": { + "version": "1.4.3", + "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-1.4.3.tgz", + "integrity": "sha512-i3eMG77UTMD0hZhgRS562pv83RC6ukSAC2GMNWc+9dieh/+jDM5u5YG+NHX6VNDRHQcHwmsTHctP9LhbC3WxVw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=16" + }, + "peerDependencies": { + "typescript": ">=4.2.0" + } + }, "node_modules/ts-jest": { "version": "29.2.3", "resolved": "https://registry.npmjs.org/ts-jest/-/ts-jest-29.2.3.tgz", @@ -4010,6 +6396,26 @@ } } }, + "node_modules/tslib": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", + "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", + "dev": true, + "license": "0BSD" + }, + "node_modules/type-check": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", + "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==", + "dev": true, + "license": "MIT", + "dependencies": { + "prelude-ls": "^1.2.1" + }, + "engines": { + "node": ">= 0.8.0" + } + }, "node_modules/type-detect": { "version": "4.0.8", "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz", @@ -4044,6 +6450,29 @@ "node": ">=14.17" } }, + "node_modules/typescript-eslint": { + "version": "8.18.0", + "resolved": "https://registry.npmjs.org/typescript-eslint/-/typescript-eslint-8.18.0.tgz", + "integrity": "sha512-Xq2rRjn6tzVpAyHr3+nmSg1/9k9aIHnJ2iZeOH7cfGOWqTkXTm3kwpQglEuLGdNrYvPF+2gtAs+/KF5rjVo+WQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/eslint-plugin": "8.18.0", + "@typescript-eslint/parser": "8.18.0", + "@typescript-eslint/utils": "8.18.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.57.0 || ^9.0.0", + "typescript": ">=4.8.4 <5.8.0" + } + }, "node_modules/undici-types": { "version": "5.26.5", "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz", @@ -4080,6 +6509,16 @@ "browserslist": ">= 4.21.0" } }, + "node_modules/uri-js": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "punycode": "^2.1.0" + } + }, "node_modules/v8-compile-cache-lib": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz", @@ -4124,6 +6563,16 @@ "node": ">= 8" } }, + "node_modules/word-wrap": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz", + "integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/wrap-ansi": { "version": "7.0.0", "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", @@ -4211,44 +6660,731 @@ "node": ">=6" } }, - "node_modules/yocto-queue": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", - "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", - "dev": true, - "engines": { - "node": ">=10" + "node_modules/yocto-queue": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", + "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + } + }, + "dependencies": { + "@ampproject/remapping": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.3.0.tgz", + "integrity": "sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==", + "dev": true, + "requires": { + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.24" + } + }, + "@aws-cdk/asset-awscli-v1": { + "version": "2.2.202", + "resolved": "https://registry.npmjs.org/@aws-cdk/asset-awscli-v1/-/asset-awscli-v1-2.2.202.tgz", + "integrity": "sha512-JqlF0D4+EVugnG5dAsNZMqhu3HW7ehOXm5SDMxMbXNDMdsF0pxtQKNHRl52z1U9igsHmaFpUgSGjbhAJ+0JONg==" + }, + "@aws-cdk/asset-kubectl-v20": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/@aws-cdk/asset-kubectl-v20/-/asset-kubectl-v20-2.1.2.tgz", + "integrity": "sha512-3M2tELJOxQv0apCIiuKQ4pAbncz9GuLwnKFqxifWfe77wuMxyTRPmxssYHs42ePqzap1LT6GDcPygGs+hHstLg==" + }, + "@aws-cdk/asset-node-proxy-agent-v6": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@aws-cdk/asset-node-proxy-agent-v6/-/asset-node-proxy-agent-v6-2.0.3.tgz", + "integrity": "sha512-twhuEG+JPOYCYPx/xy5uH2+VUsIEhPTzDY0F1KuB+ocjWWB/KEDiOVL19nHvbPCB6fhWnkykXEMJ4HHcKvjtvg==" + }, + "@aws-cdk/assets": { + "version": "1.203.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/assets/-/assets-1.203.0.tgz", + "integrity": "sha512-aEexr1PEZPqTcBXiKLwmiWYtpoC6vPpsGAqe39U6lovXP675qO6/VTpE26AUtS0sDkuhCBnXDQR5H49e0+A+jQ==", + "requires": { + "@aws-cdk/core": "1.203.0", + "@aws-cdk/cx-api": "1.203.0", + "constructs": "^3.3.69" + }, + "dependencies": { + "constructs": { + "version": "3.4.344", + "resolved": "https://registry.npmjs.org/constructs/-/constructs-3.4.344.tgz", + "integrity": "sha512-Qq3upn44oGdvgasHUKWVFsrynyYrtVRd9fd8ko9cJOrFzx9eCm3iI4bhBryQqaISdausbTYUOXmoEe/YSJ16Nw==" + } + } + }, + "@aws-cdk/aws-acmpca": { + "version": "1.203.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/aws-acmpca/-/aws-acmpca-1.203.0.tgz", + "integrity": "sha512-0+7de1E0JxTjPe8pm73Io3WAEC9pvEjdKmNYwXApYCslqlFqBOuHjU9pQh++6cYcUWCMmav1iwM9zqCB6ZVRSQ==", + "requires": { + "@aws-cdk/core": "1.203.0", + "constructs": "^3.3.69" + }, + "dependencies": { + "constructs": { + "version": "3.4.344", + "resolved": "https://registry.npmjs.org/constructs/-/constructs-3.4.344.tgz", + "integrity": "sha512-Qq3upn44oGdvgasHUKWVFsrynyYrtVRd9fd8ko9cJOrFzx9eCm3iI4bhBryQqaISdausbTYUOXmoEe/YSJ16Nw==" + } + } + }, + "@aws-cdk/aws-applicationautoscaling": { + "version": "1.203.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/aws-applicationautoscaling/-/aws-applicationautoscaling-1.203.0.tgz", + "integrity": "sha512-tz/BzZ++hd+T5s4AQo4l7fKaK6+xeGEbR7fGulgz8xByW5V46/krXvpTWVvM7VV2WSigVx1QeS9vcT3MOdsYBQ==", + "requires": { + "@aws-cdk/aws-autoscaling-common": "1.203.0", + "@aws-cdk/aws-cloudwatch": "1.203.0", + "@aws-cdk/aws-iam": "1.203.0", + "@aws-cdk/core": "1.203.0", + "constructs": "^3.3.69" + }, + "dependencies": { + "constructs": { + "version": "3.4.344", + "resolved": "https://registry.npmjs.org/constructs/-/constructs-3.4.344.tgz", + "integrity": "sha512-Qq3upn44oGdvgasHUKWVFsrynyYrtVRd9fd8ko9cJOrFzx9eCm3iI4bhBryQqaISdausbTYUOXmoEe/YSJ16Nw==" + } + } + }, + "@aws-cdk/aws-autoscaling-common": { + "version": "1.203.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/aws-autoscaling-common/-/aws-autoscaling-common-1.203.0.tgz", + "integrity": "sha512-ET3whArynAzRMxrnlkgAdWWzdaRg2QJWcepjSr615pwF0cOFjTW2scpPWIpcaugN2z+wFPlylP5zloA16FM0Tg==", + "requires": { + "@aws-cdk/aws-iam": "1.203.0", + "@aws-cdk/core": "1.203.0", + "constructs": "^3.3.69" + }, + "dependencies": { + "constructs": { + "version": "3.4.344", + "resolved": "https://registry.npmjs.org/constructs/-/constructs-3.4.344.tgz", + "integrity": "sha512-Qq3upn44oGdvgasHUKWVFsrynyYrtVRd9fd8ko9cJOrFzx9eCm3iI4bhBryQqaISdausbTYUOXmoEe/YSJ16Nw==" + } + } + }, + "@aws-cdk/aws-cloudformation": { + "version": "1.203.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/aws-cloudformation/-/aws-cloudformation-1.203.0.tgz", + "integrity": "sha512-vwNrk5KT0TgIKy+hinjWu/CBEet394mK8o2eFzQG5G82An0O3n/ILdaBrXZut0tyHxJSYQ90+Zaor6Ndld49jA==", + "requires": { + "@aws-cdk/aws-iam": "1.203.0", + "@aws-cdk/aws-lambda": "1.203.0", + "@aws-cdk/aws-s3": "1.203.0", + "@aws-cdk/aws-sns": "1.203.0", + "@aws-cdk/core": "1.203.0", + "@aws-cdk/cx-api": "1.203.0", + "constructs": "^3.3.69" + }, + "dependencies": { + "constructs": { + "version": "3.4.344", + "resolved": "https://registry.npmjs.org/constructs/-/constructs-3.4.344.tgz", + "integrity": "sha512-Qq3upn44oGdvgasHUKWVFsrynyYrtVRd9fd8ko9cJOrFzx9eCm3iI4bhBryQqaISdausbTYUOXmoEe/YSJ16Nw==" + } + } + }, + "@aws-cdk/aws-cloudwatch": { + "version": "1.203.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/aws-cloudwatch/-/aws-cloudwatch-1.203.0.tgz", + "integrity": "sha512-RJfUHAtCPefcgWlH6Nqcv0f1svPfxbESUC52tK6yKqhmzo+57ePLkONsn88x22TuWo/hXOpjxYqrUECr0S5YqA==", + "requires": { + "@aws-cdk/aws-iam": "1.203.0", + "@aws-cdk/core": "1.203.0", + "constructs": "^3.3.69" + }, + "dependencies": { + "constructs": { + "version": "3.4.344", + "resolved": "https://registry.npmjs.org/constructs/-/constructs-3.4.344.tgz", + "integrity": "sha512-Qq3upn44oGdvgasHUKWVFsrynyYrtVRd9fd8ko9cJOrFzx9eCm3iI4bhBryQqaISdausbTYUOXmoEe/YSJ16Nw==" + } + } + }, + "@aws-cdk/aws-codeguruprofiler": { + "version": "1.203.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/aws-codeguruprofiler/-/aws-codeguruprofiler-1.203.0.tgz", + "integrity": "sha512-Tl9DGRY/BN615J8H4SUQxQ0Zb2TzkaBYHFx4F1lt4NYdWHYh6oALxLEBSkXD5txPvvtH4MouDENuqMBV/fZI3w==", + "requires": { + "@aws-cdk/aws-iam": "1.203.0", + "@aws-cdk/core": "1.203.0", + "constructs": "^3.3.69" + }, + "dependencies": { + "constructs": { + "version": "3.4.344", + "resolved": "https://registry.npmjs.org/constructs/-/constructs-3.4.344.tgz", + "integrity": "sha512-Qq3upn44oGdvgasHUKWVFsrynyYrtVRd9fd8ko9cJOrFzx9eCm3iI4bhBryQqaISdausbTYUOXmoEe/YSJ16Nw==" + } + } + }, + "@aws-cdk/aws-codestarnotifications": { + "version": "1.203.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/aws-codestarnotifications/-/aws-codestarnotifications-1.203.0.tgz", + "integrity": "sha512-Qd3iJiFx5Nt+rZRNnmMUmknNetq4069pUhOTv0NTxB+ZmWQ1leQ4iSrhQ5YOANWBK0+EtPMz9YBjKjE4zBPf+Q==", + "requires": { + "@aws-cdk/core": "1.203.0", + "constructs": "^3.3.69" + }, + "dependencies": { + "constructs": { + "version": "3.4.344", + "resolved": "https://registry.npmjs.org/constructs/-/constructs-3.4.344.tgz", + "integrity": "sha512-Qq3upn44oGdvgasHUKWVFsrynyYrtVRd9fd8ko9cJOrFzx9eCm3iI4bhBryQqaISdausbTYUOXmoEe/YSJ16Nw==" + } + } + }, + "@aws-cdk/aws-ec2": { + "version": "1.203.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/aws-ec2/-/aws-ec2-1.203.0.tgz", + "integrity": "sha512-6Q/kvAKsprESeGCYMYS9KiChDyhzMk6NerSreVD83UbcvUJU9EjGUJF+HJfRQXn66YjdOsXKcm89Ni31ftP5PQ==", + "requires": { + "@aws-cdk/aws-cloudwatch": "1.203.0", + "@aws-cdk/aws-iam": "1.203.0", + "@aws-cdk/aws-kms": "1.203.0", + "@aws-cdk/aws-logs": "1.203.0", + "@aws-cdk/aws-s3": "1.203.0", + "@aws-cdk/aws-s3-assets": "1.203.0", + "@aws-cdk/aws-ssm": "1.203.0", + "@aws-cdk/cloud-assembly-schema": "1.203.0", + "@aws-cdk/core": "1.203.0", + "@aws-cdk/cx-api": "1.203.0", + "@aws-cdk/region-info": "1.203.0", + "constructs": "^3.3.69" + }, + "dependencies": { + "constructs": { + "version": "3.4.344", + "resolved": "https://registry.npmjs.org/constructs/-/constructs-3.4.344.tgz", + "integrity": "sha512-Qq3upn44oGdvgasHUKWVFsrynyYrtVRd9fd8ko9cJOrFzx9eCm3iI4bhBryQqaISdausbTYUOXmoEe/YSJ16Nw==" + } + } + }, + "@aws-cdk/aws-ecr": { + "version": "1.203.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/aws-ecr/-/aws-ecr-1.203.0.tgz", + "integrity": "sha512-pMijGTZzncb10zJLuqDzFtHpVxpK2dQdD/1Md7FYF1Mm7/r2CFx4Q2H3f1fOi4sBXk3ftypV8X7HFzIrWPwCSg==", + "requires": { + "@aws-cdk/aws-events": "1.203.0", + "@aws-cdk/aws-iam": "1.203.0", + "@aws-cdk/aws-kms": "1.203.0", + "@aws-cdk/core": "1.203.0", + "constructs": "^3.3.69" + }, + "dependencies": { + "constructs": { + "version": "3.4.344", + "resolved": "https://registry.npmjs.org/constructs/-/constructs-3.4.344.tgz", + "integrity": "sha512-Qq3upn44oGdvgasHUKWVFsrynyYrtVRd9fd8ko9cJOrFzx9eCm3iI4bhBryQqaISdausbTYUOXmoEe/YSJ16Nw==" + } + } + }, + "@aws-cdk/aws-ecr-assets": { + "version": "1.203.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/aws-ecr-assets/-/aws-ecr-assets-1.203.0.tgz", + "integrity": "sha512-wPAjllyLs+TvdakhwATjIyc5mBuNJFurPM2S1mo7YC4fURp2NOb9vHpIFgV9ZC+r8xadBFWt13de72cyl+h2iw==", + "requires": { + "@aws-cdk/assets": "1.203.0", + "@aws-cdk/aws-ecr": "1.203.0", + "@aws-cdk/aws-iam": "1.203.0", + "@aws-cdk/aws-s3": "1.203.0", + "@aws-cdk/core": "1.203.0", + "@aws-cdk/cx-api": "1.203.0", + "constructs": "^3.3.69" + }, + "dependencies": { + "constructs": { + "version": "3.4.344", + "resolved": "https://registry.npmjs.org/constructs/-/constructs-3.4.344.tgz", + "integrity": "sha512-Qq3upn44oGdvgasHUKWVFsrynyYrtVRd9fd8ko9cJOrFzx9eCm3iI4bhBryQqaISdausbTYUOXmoEe/YSJ16Nw==" + } + } + }, + "@aws-cdk/aws-efs": { + "version": "1.203.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/aws-efs/-/aws-efs-1.203.0.tgz", + "integrity": "sha512-JYy8N7GwKCXjFpsBZZqy9VbS/zJGDSq8etXAvBsrscDveDzzQr4/6CiXYU70aL5lLV34ex07A5lKBmII9cqq0Q==", + "requires": { + "@aws-cdk/aws-ec2": "1.203.0", + "@aws-cdk/aws-iam": "1.203.0", + "@aws-cdk/aws-kms": "1.203.0", + "@aws-cdk/cloud-assembly-schema": "1.203.0", + "@aws-cdk/core": "1.203.0", + "@aws-cdk/cx-api": "1.203.0", + "constructs": "^3.3.69" + }, + "dependencies": { + "constructs": { + "version": "3.4.344", + "resolved": "https://registry.npmjs.org/constructs/-/constructs-3.4.344.tgz", + "integrity": "sha512-Qq3upn44oGdvgasHUKWVFsrynyYrtVRd9fd8ko9cJOrFzx9eCm3iI4bhBryQqaISdausbTYUOXmoEe/YSJ16Nw==" + } + } + }, + "@aws-cdk/aws-events": { + "version": "1.203.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/aws-events/-/aws-events-1.203.0.tgz", + "integrity": "sha512-LYjKGqqosXG3HLAmvSAS/N5OSxH0uqHmDlisapwl4kEBPfSOvzzVF9TTuMulDYfSBV0Esspb3RIkXc/ja7f9JQ==", + "requires": { + "@aws-cdk/aws-iam": "1.203.0", + "@aws-cdk/core": "1.203.0", + "constructs": "^3.3.69" + }, + "dependencies": { + "constructs": { + "version": "3.4.344", + "resolved": "https://registry.npmjs.org/constructs/-/constructs-3.4.344.tgz", + "integrity": "sha512-Qq3upn44oGdvgasHUKWVFsrynyYrtVRd9fd8ko9cJOrFzx9eCm3iI4bhBryQqaISdausbTYUOXmoEe/YSJ16Nw==" + } + } + }, + "@aws-cdk/aws-iam": { + "version": "1.203.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/aws-iam/-/aws-iam-1.203.0.tgz", + "integrity": "sha512-8qXLtOzkaLlk7WlssocExMYruOb59irhspTvHvf/kpcMBmWQoyNRC80Ab3QtXkZNBppfrMkROGZvqzW2GlxAwQ==", + "requires": { + "@aws-cdk/core": "1.203.0", + "@aws-cdk/cx-api": "1.203.0", + "@aws-cdk/region-info": "1.203.0", + "constructs": "^3.3.69" + }, + "dependencies": { + "constructs": { + "version": "3.4.344", + "resolved": "https://registry.npmjs.org/constructs/-/constructs-3.4.344.tgz", + "integrity": "sha512-Qq3upn44oGdvgasHUKWVFsrynyYrtVRd9fd8ko9cJOrFzx9eCm3iI4bhBryQqaISdausbTYUOXmoEe/YSJ16Nw==" + } + } + }, + "@aws-cdk/aws-kms": { + "version": "1.203.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/aws-kms/-/aws-kms-1.203.0.tgz", + "integrity": "sha512-TlG5Td7pRrgo5xta0ePIp6x6+4uq0P+1K3tQtpjEFbD2E21krujXdWNdHmuvRoNsY5NbLhOCF5g6PNCdJdvLJA==", + "requires": { + "@aws-cdk/aws-iam": "1.203.0", + "@aws-cdk/cloud-assembly-schema": "1.203.0", + "@aws-cdk/core": "1.203.0", + "@aws-cdk/cx-api": "1.203.0", + "constructs": "^3.3.69" + }, + "dependencies": { + "constructs": { + "version": "3.4.344", + "resolved": "https://registry.npmjs.org/constructs/-/constructs-3.4.344.tgz", + "integrity": "sha512-Qq3upn44oGdvgasHUKWVFsrynyYrtVRd9fd8ko9cJOrFzx9eCm3iI4bhBryQqaISdausbTYUOXmoEe/YSJ16Nw==" + } + } + }, + "@aws-cdk/aws-lambda": { + "version": "1.203.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/aws-lambda/-/aws-lambda-1.203.0.tgz", + "integrity": "sha512-O88yvNpxi4tcizHhwRkGfEBg+WDoagxgwr2TUNFWkKQVEaL/by9BeZk84LNXLC6OO5WrnC+0qgSz1KHyDXMrqw==", + "requires": { + "@aws-cdk/aws-applicationautoscaling": "1.203.0", + "@aws-cdk/aws-cloudwatch": "1.203.0", + "@aws-cdk/aws-codeguruprofiler": "1.203.0", + "@aws-cdk/aws-ec2": "1.203.0", + "@aws-cdk/aws-ecr": "1.203.0", + "@aws-cdk/aws-ecr-assets": "1.203.0", + "@aws-cdk/aws-efs": "1.203.0", + "@aws-cdk/aws-events": "1.203.0", + "@aws-cdk/aws-iam": "1.203.0", + "@aws-cdk/aws-kms": "1.203.0", + "@aws-cdk/aws-logs": "1.203.0", + "@aws-cdk/aws-s3": "1.203.0", + "@aws-cdk/aws-s3-assets": "1.203.0", + "@aws-cdk/aws-signer": "1.203.0", + "@aws-cdk/aws-sns": "1.203.0", + "@aws-cdk/aws-sqs": "1.203.0", + "@aws-cdk/core": "1.203.0", + "@aws-cdk/cx-api": "1.203.0", + "@aws-cdk/region-info": "1.203.0", + "constructs": "^3.3.69" + }, + "dependencies": { + "constructs": { + "version": "3.4.344", + "resolved": "https://registry.npmjs.org/constructs/-/constructs-3.4.344.tgz", + "integrity": "sha512-Qq3upn44oGdvgasHUKWVFsrynyYrtVRd9fd8ko9cJOrFzx9eCm3iI4bhBryQqaISdausbTYUOXmoEe/YSJ16Nw==" + } + } + }, + "@aws-cdk/aws-logs": { + "version": "1.203.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/aws-logs/-/aws-logs-1.203.0.tgz", + "integrity": "sha512-I5/0+NuPdTRSq0cK8W1w8hHyGGj4mqTXFou0sHed3BUN4zYim2cB88uosOVMOhkpGRr19+FEwEThvyntRLcTOQ==", + "requires": { + "@aws-cdk/aws-cloudwatch": "1.203.0", + "@aws-cdk/aws-iam": "1.203.0", + "@aws-cdk/aws-kms": "1.203.0", + "@aws-cdk/aws-s3-assets": "1.203.0", + "@aws-cdk/core": "1.203.0", + "@aws-cdk/cx-api": "1.203.0", + "constructs": "^3.3.69" + }, + "dependencies": { + "constructs": { + "version": "3.4.344", + "resolved": "https://registry.npmjs.org/constructs/-/constructs-3.4.344.tgz", + "integrity": "sha512-Qq3upn44oGdvgasHUKWVFsrynyYrtVRd9fd8ko9cJOrFzx9eCm3iI4bhBryQqaISdausbTYUOXmoEe/YSJ16Nw==" + } + } + }, + "@aws-cdk/aws-msk": { + "version": "1.203.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/aws-msk/-/aws-msk-1.203.0.tgz", + "integrity": "sha512-t/ShG04fCFMDaCaSA697vNCH63e27kFi+V1zRsbMPomYsNLtHR+wihSorT6v2yFRtDsLBKbrWO/Hishmbbr9lw==", + "requires": { + "@aws-cdk/aws-acmpca": "1.203.0", + "@aws-cdk/aws-ec2": "1.203.0", + "@aws-cdk/aws-iam": "1.203.0", + "@aws-cdk/aws-kms": "1.203.0", + "@aws-cdk/aws-logs": "1.203.0", + "@aws-cdk/aws-s3": "1.203.0", + "@aws-cdk/aws-secretsmanager": "1.203.0", + "@aws-cdk/core": "1.203.0", + "@aws-cdk/custom-resources": "1.203.0", + "constructs": "^3.3.69" + }, + "dependencies": { + "constructs": { + "version": "3.4.344", + "resolved": "https://registry.npmjs.org/constructs/-/constructs-3.4.344.tgz", + "integrity": "sha512-Qq3upn44oGdvgasHUKWVFsrynyYrtVRd9fd8ko9cJOrFzx9eCm3iI4bhBryQqaISdausbTYUOXmoEe/YSJ16Nw==" + } + } + }, + "@aws-cdk/aws-s3": { + "version": "1.203.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/aws-s3/-/aws-s3-1.203.0.tgz", + "integrity": "sha512-xPxCv7l6zh7nGoFwLNzHmP07ULtWx/ubDE6l2ZqkRF4ZHVWoyTk7zVs4Q+BmRM1mUn0yDHyMTKKf887lQu9eVg==", + "requires": { + "@aws-cdk/aws-events": "1.203.0", + "@aws-cdk/aws-iam": "1.203.0", + "@aws-cdk/aws-kms": "1.203.0", + "@aws-cdk/core": "1.203.0", + "@aws-cdk/cx-api": "1.203.0", + "constructs": "^3.3.69" + }, + "dependencies": { + "constructs": { + "version": "3.4.344", + "resolved": "https://registry.npmjs.org/constructs/-/constructs-3.4.344.tgz", + "integrity": "sha512-Qq3upn44oGdvgasHUKWVFsrynyYrtVRd9fd8ko9cJOrFzx9eCm3iI4bhBryQqaISdausbTYUOXmoEe/YSJ16Nw==" + } + } + }, + "@aws-cdk/aws-s3-assets": { + "version": "1.203.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/aws-s3-assets/-/aws-s3-assets-1.203.0.tgz", + "integrity": "sha512-BXvIWRdHmwNkpDbQXsB0hZWGJ3szB264Dq14CzWu6+6J8beaUNrT+1/z1mglr6S3UraHMsOhhSO48zT9k+MzhA==", + "requires": { + "@aws-cdk/assets": "1.203.0", + "@aws-cdk/aws-iam": "1.203.0", + "@aws-cdk/aws-kms": "1.203.0", + "@aws-cdk/aws-s3": "1.203.0", + "@aws-cdk/core": "1.203.0", + "@aws-cdk/cx-api": "1.203.0", + "constructs": "^3.3.69" + }, + "dependencies": { + "constructs": { + "version": "3.4.344", + "resolved": "https://registry.npmjs.org/constructs/-/constructs-3.4.344.tgz", + "integrity": "sha512-Qq3upn44oGdvgasHUKWVFsrynyYrtVRd9fd8ko9cJOrFzx9eCm3iI4bhBryQqaISdausbTYUOXmoEe/YSJ16Nw==" + } + } + }, + "@aws-cdk/aws-sam": { + "version": "1.203.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/aws-sam/-/aws-sam-1.203.0.tgz", + "integrity": "sha512-bTq1mCd2MPIcLlh2etbubVPBNVBKVE/JTfcmXWF0YPFpPKbOmNobgp6APyJdwZDrpBzqNRRCoP1np9s2TUZyvA==", + "requires": { + "@aws-cdk/core": "1.203.0", + "constructs": "^3.3.69" + }, + "dependencies": { + "constructs": { + "version": "3.4.344", + "resolved": "https://registry.npmjs.org/constructs/-/constructs-3.4.344.tgz", + "integrity": "sha512-Qq3upn44oGdvgasHUKWVFsrynyYrtVRd9fd8ko9cJOrFzx9eCm3iI4bhBryQqaISdausbTYUOXmoEe/YSJ16Nw==" + } + } + }, + "@aws-cdk/aws-secretsmanager": { + "version": "1.203.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/aws-secretsmanager/-/aws-secretsmanager-1.203.0.tgz", + "integrity": "sha512-B3oIHys70F7OtEDe6pqPm4BWYk+M1T6WMmzTMyJsgomXSzqAlo9MdzuzLY4yOIvFDEHZNet05om40aXEWi5omw==", + "requires": { + "@aws-cdk/aws-ec2": "1.203.0", + "@aws-cdk/aws-iam": "1.203.0", + "@aws-cdk/aws-kms": "1.203.0", + "@aws-cdk/aws-lambda": "1.203.0", + "@aws-cdk/aws-sam": "1.203.0", + "@aws-cdk/core": "1.203.0", + "@aws-cdk/cx-api": "1.203.0", + "constructs": "^3.3.69" + }, + "dependencies": { + "constructs": { + "version": "3.4.344", + "resolved": "https://registry.npmjs.org/constructs/-/constructs-3.4.344.tgz", + "integrity": "sha512-Qq3upn44oGdvgasHUKWVFsrynyYrtVRd9fd8ko9cJOrFzx9eCm3iI4bhBryQqaISdausbTYUOXmoEe/YSJ16Nw==" + } + } + }, + "@aws-cdk/aws-signer": { + "version": "1.203.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/aws-signer/-/aws-signer-1.203.0.tgz", + "integrity": "sha512-czHs8DIIA2wxYW2iFTEWAVydFURMvyZO+rtv7V4LXV9G18qRdBVxyx7VU2Lhr8nd5sg+uPV4Vonjt8OvUfO12A==", + "requires": { + "@aws-cdk/core": "1.203.0", + "constructs": "^3.3.69" + }, + "dependencies": { + "constructs": { + "version": "3.4.344", + "resolved": "https://registry.npmjs.org/constructs/-/constructs-3.4.344.tgz", + "integrity": "sha512-Qq3upn44oGdvgasHUKWVFsrynyYrtVRd9fd8ko9cJOrFzx9eCm3iI4bhBryQqaISdausbTYUOXmoEe/YSJ16Nw==" + } + } + }, + "@aws-cdk/aws-sns": { + "version": "1.203.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/aws-sns/-/aws-sns-1.203.0.tgz", + "integrity": "sha512-9HBgItNq7zM+ElEUEnomHjAgYsZWOjHaafv1x+Gbk/UH4xKzoM7t6kdlRa0qCVeyWhaClGCyRAJtOP42UiGmSg==", + "requires": { + "@aws-cdk/aws-cloudwatch": "1.203.0", + "@aws-cdk/aws-codestarnotifications": "1.203.0", + "@aws-cdk/aws-events": "1.203.0", + "@aws-cdk/aws-iam": "1.203.0", + "@aws-cdk/aws-kms": "1.203.0", + "@aws-cdk/aws-sqs": "1.203.0", + "@aws-cdk/core": "1.203.0", + "constructs": "^3.3.69" + }, + "dependencies": { + "constructs": { + "version": "3.4.344", + "resolved": "https://registry.npmjs.org/constructs/-/constructs-3.4.344.tgz", + "integrity": "sha512-Qq3upn44oGdvgasHUKWVFsrynyYrtVRd9fd8ko9cJOrFzx9eCm3iI4bhBryQqaISdausbTYUOXmoEe/YSJ16Nw==" + } + } + }, + "@aws-cdk/aws-sqs": { + "version": "1.203.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/aws-sqs/-/aws-sqs-1.203.0.tgz", + "integrity": "sha512-GDSWc/O9HcxjP4pbCYuZjqE2GXymzvv6rM8Q17NtERACpUMf/eAuNITLCulE1Y6ufE9A9/lgkT96I3QLd9HBNw==", + "requires": { + "@aws-cdk/aws-cloudwatch": "1.203.0", + "@aws-cdk/aws-iam": "1.203.0", + "@aws-cdk/aws-kms": "1.203.0", + "@aws-cdk/core": "1.203.0", + "constructs": "^3.3.69" + }, + "dependencies": { + "constructs": { + "version": "3.4.344", + "resolved": "https://registry.npmjs.org/constructs/-/constructs-3.4.344.tgz", + "integrity": "sha512-Qq3upn44oGdvgasHUKWVFsrynyYrtVRd9fd8ko9cJOrFzx9eCm3iI4bhBryQqaISdausbTYUOXmoEe/YSJ16Nw==" + } + } + }, + "@aws-cdk/aws-ssm": { + "version": "1.203.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/aws-ssm/-/aws-ssm-1.203.0.tgz", + "integrity": "sha512-zR/eLVPO+O3MMbnMgWSLybrj7VxIt1d90QPHPZMV92hYdzWDKBfVZ09P82muNILRF+SMcK7TrFLCDi5Nos0S2g==", + "requires": { + "@aws-cdk/aws-iam": "1.203.0", + "@aws-cdk/aws-kms": "1.203.0", + "@aws-cdk/cloud-assembly-schema": "1.203.0", + "@aws-cdk/core": "1.203.0", + "constructs": "^3.3.69" + }, + "dependencies": { + "constructs": { + "version": "3.4.344", + "resolved": "https://registry.npmjs.org/constructs/-/constructs-3.4.344.tgz", + "integrity": "sha512-Qq3upn44oGdvgasHUKWVFsrynyYrtVRd9fd8ko9cJOrFzx9eCm3iI4bhBryQqaISdausbTYUOXmoEe/YSJ16Nw==" + } + } + }, + "@aws-cdk/cloud-assembly-schema": { + "version": "1.203.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/cloud-assembly-schema/-/cloud-assembly-schema-1.203.0.tgz", + "integrity": "sha512-r252InZ8Oh7q7ztriaA3n6F48QOFVfNcT/KO4XOlYyt1xDWRMENDYf+D+DVr6O5klcaa3ivvvDT7DRuW3xdVOQ==", + "requires": { + "jsonschema": "^1.4.1", + "semver": "^7.3.8" }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "dependencies": { + "jsonschema": { + "version": "1.4.1", + "bundled": true + }, + "lru-cache": { + "version": "6.0.0", + "bundled": true, + "requires": { + "yallist": "^4.0.0" + } + }, + "semver": { + "version": "7.3.8", + "bundled": true, + "requires": { + "lru-cache": "^6.0.0" + } + }, + "yallist": { + "version": "4.0.0", + "bundled": true + } } - } - }, - "dependencies": { - "@ampproject/remapping": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.3.0.tgz", - "integrity": "sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==", - "dev": true, + }, + "@aws-cdk/core": { + "version": "1.203.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/core/-/core-1.203.0.tgz", + "integrity": "sha512-3/quPwnGWKHm/Bzna/du5WP5a/Wp/NYqDyL1rJ1A3EPpsRQYywJPj77+M8nG5sD5qNIoFbhN7Q5aee+bcS7GGA==", "requires": { - "@jridgewell/gen-mapping": "^0.3.5", - "@jridgewell/trace-mapping": "^0.3.24" + "@aws-cdk/cloud-assembly-schema": "1.203.0", + "@aws-cdk/cx-api": "1.203.0", + "@aws-cdk/region-info": "1.203.0", + "@balena/dockerignore": "^1.0.2", + "constructs": "^3.3.69", + "fs-extra": "^9.1.0", + "ignore": "^5.2.4", + "minimatch": "^3.1.2" + }, + "dependencies": { + "@balena/dockerignore": { + "version": "1.0.2", + "bundled": true + }, + "at-least-node": { + "version": "1.0.0", + "bundled": true + }, + "balanced-match": { + "version": "1.0.2", + "bundled": true + }, + "brace-expansion": { + "version": "1.1.11", + "bundled": true, + "requires": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "concat-map": { + "version": "0.0.1", + "bundled": true + }, + "constructs": { + "version": "3.4.344", + "resolved": "https://registry.npmjs.org/constructs/-/constructs-3.4.344.tgz", + "integrity": "sha512-Qq3upn44oGdvgasHUKWVFsrynyYrtVRd9fd8ko9cJOrFzx9eCm3iI4bhBryQqaISdausbTYUOXmoEe/YSJ16Nw==" + }, + "fs-extra": { + "version": "9.1.0", + "bundled": true, + "requires": { + "at-least-node": "^1.0.0", + "graceful-fs": "^4.2.0", + "jsonfile": "^6.0.1", + "universalify": "^2.0.0" + } + }, + "graceful-fs": { + "version": "4.2.10", + "bundled": true + }, + "ignore": { + "version": "5.2.4", + "bundled": true + }, + "jsonfile": { + "version": "6.1.0", + "bundled": true, + "requires": { + "graceful-fs": "^4.1.6", + "universalify": "^2.0.0" + } + }, + "minimatch": { + "version": "3.1.2", + "bundled": true, + "requires": { + "brace-expansion": "^1.1.7" + } + }, + "universalify": { + "version": "2.0.0", + "bundled": true + } } }, - "@aws-cdk/asset-awscli-v1": { - "version": "2.2.202", - "resolved": "https://registry.npmjs.org/@aws-cdk/asset-awscli-v1/-/asset-awscli-v1-2.2.202.tgz", - "integrity": "sha512-JqlF0D4+EVugnG5dAsNZMqhu3HW7ehOXm5SDMxMbXNDMdsF0pxtQKNHRl52z1U9igsHmaFpUgSGjbhAJ+0JONg==" + "@aws-cdk/custom-resources": { + "version": "1.203.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/custom-resources/-/custom-resources-1.203.0.tgz", + "integrity": "sha512-JpUH3d5gG/2MrWCbnFC96FgRGUqcecY84xZHsfTKUlPQ2bUzElHih2tvbR21DQ5ZIKvFznol3DheduDSD/TgpQ==", + "requires": { + "@aws-cdk/aws-cloudformation": "1.203.0", + "@aws-cdk/aws-ec2": "1.203.0", + "@aws-cdk/aws-iam": "1.203.0", + "@aws-cdk/aws-lambda": "1.203.0", + "@aws-cdk/aws-logs": "1.203.0", + "@aws-cdk/aws-sns": "1.203.0", + "@aws-cdk/core": "1.203.0", + "constructs": "^3.3.69" + }, + "dependencies": { + "constructs": { + "version": "3.4.344", + "resolved": "https://registry.npmjs.org/constructs/-/constructs-3.4.344.tgz", + "integrity": "sha512-Qq3upn44oGdvgasHUKWVFsrynyYrtVRd9fd8ko9cJOrFzx9eCm3iI4bhBryQqaISdausbTYUOXmoEe/YSJ16Nw==" + } + } }, - "@aws-cdk/asset-kubectl-v20": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/@aws-cdk/asset-kubectl-v20/-/asset-kubectl-v20-2.1.2.tgz", - "integrity": "sha512-3M2tELJOxQv0apCIiuKQ4pAbncz9GuLwnKFqxifWfe77wuMxyTRPmxssYHs42ePqzap1LT6GDcPygGs+hHstLg==" + "@aws-cdk/cx-api": { + "version": "1.203.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/cx-api/-/cx-api-1.203.0.tgz", + "integrity": "sha512-W2flnJFGytifPw2ojEsh9l8MAI4UANaUcMKr+qt4eJmFwrtVcS7nasdJQGSatQdxkAwd2pX4x10brAHYoAqjjQ==", + "requires": { + "@aws-cdk/cloud-assembly-schema": "1.203.0", + "semver": "^7.3.8" + }, + "dependencies": { + "lru-cache": { + "version": "6.0.0", + "bundled": true, + "requires": { + "yallist": "^4.0.0" + } + }, + "semver": { + "version": "7.3.8", + "bundled": true, + "requires": { + "lru-cache": "^6.0.0" + } + }, + "yallist": { + "version": "4.0.0", + "bundled": true + } + } }, - "@aws-cdk/asset-node-proxy-agent-v6": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/@aws-cdk/asset-node-proxy-agent-v6/-/asset-node-proxy-agent-v6-2.0.3.tgz", - "integrity": "sha512-twhuEG+JPOYCYPx/xy5uH2+VUsIEhPTzDY0F1KuB+ocjWWB/KEDiOVL19nHvbPCB6fhWnkykXEMJ4HHcKvjtvg==" + "@aws-cdk/region-info": { + "version": "1.203.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/region-info/-/region-info-1.203.0.tgz", + "integrity": "sha512-3GzFYrdUO2NGcOrlIJ1TvjRxB0/ntBEyQgwFtVJQSvt3msCznE/w1n6pZS+oDF12NWtIPFbsJ5zTGdJ+PLMJhg==" }, "@babel/code-frame": { "version": "7.24.7", @@ -4647,6 +7783,14 @@ "@babel/types": "^7.24.8", "debug": "^4.3.1", "globals": "^11.1.0" + }, + "dependencies": { + "globals": { + "version": "11.12.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", + "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", + "dev": true + } } }, "@babel/types": { @@ -4687,6 +7831,146 @@ } } }, + "@eslint-community/eslint-utils": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.4.1.tgz", + "integrity": "sha512-s3O3waFUrMV8P/XaF/+ZTp1X9XBZW1a4B97ZnjQF2KYWaFD2A8KyFBsrsfSjEmjn3RGWAIuvlneuZm3CUK3jbA==", + "dev": true, + "requires": { + "eslint-visitor-keys": "^3.4.3" + }, + "dependencies": { + "eslint-visitor-keys": { + "version": "3.4.3", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz", + "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==", + "dev": true + } + } + }, + "@eslint-community/regexpp": { + "version": "4.12.1", + "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.12.1.tgz", + "integrity": "sha512-CCZCDJuduB9OUkFkY2IgppNZMi2lBQgD2qzwXkEia16cge2pijY/aXi96CJMquDMn3nJdlPV1A5KrJEXwfLNzQ==", + "dev": true + }, + "@eslint/config-array": { + "version": "0.19.1", + "resolved": "https://registry.npmjs.org/@eslint/config-array/-/config-array-0.19.1.tgz", + "integrity": "sha512-fo6Mtm5mWyKjA/Chy1BYTdn5mGJoDNjC7C64ug20ADsRDGrA85bN3uK3MaKbeRkRuuIEAR5N33Jr1pbm411/PA==", + "dev": true, + "requires": { + "@eslint/object-schema": "^2.1.5", + "debug": "^4.3.1", + "minimatch": "^3.1.2" + } + }, + "@eslint/core": { + "version": "0.9.1", + "resolved": "https://registry.npmjs.org/@eslint/core/-/core-0.9.1.tgz", + "integrity": "sha512-GuUdqkyyzQI5RMIWkHhvTWLCyLo1jNK3vzkSyaExH5kHPDHcuL2VOpHjmMY+y3+NC69qAKToBqldTBgYeLSr9Q==", + "dev": true, + "requires": { + "@types/json-schema": "^7.0.15" + } + }, + "@eslint/eslintrc": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-3.2.0.tgz", + "integrity": "sha512-grOjVNN8P3hjJn/eIETF1wwd12DdnwFDoyceUJLYYdkpbwq3nLi+4fqrTAONx7XDALqlL220wC/RHSC/QTI/0w==", + "dev": true, + "requires": { + "ajv": "^6.12.4", + "debug": "^4.3.2", + "espree": "^10.0.1", + "globals": "^14.0.0", + "ignore": "^5.2.0", + "import-fresh": "^3.2.1", + "js-yaml": "^4.1.0", + "minimatch": "^3.1.2", + "strip-json-comments": "^3.1.1" + }, + "dependencies": { + "argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "dev": true + }, + "globals": { + "version": "14.0.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-14.0.0.tgz", + "integrity": "sha512-oahGvuMGQlPw/ivIYBjVSrWAfWLBeku5tpPE2fOPLi+WHffIWbuh2tCjhyQhTBPMf5E9jDEH4FOmTYgYwbKwtQ==", + "dev": true + }, + "js-yaml": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", + "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "dev": true, + "requires": { + "argparse": "^2.0.1" + } + } + } + }, + "@eslint/js": { + "version": "9.17.0", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.17.0.tgz", + "integrity": "sha512-Sxc4hqcs1kTu0iID3kcZDW3JHq2a77HO9P8CP6YEA/FpH3Ll8UXE2r/86Rz9YJLKme39S9vU5OWNjC6Xl0Cr3w==", + "dev": true + }, + "@eslint/object-schema": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@eslint/object-schema/-/object-schema-2.1.5.tgz", + "integrity": "sha512-o0bhxnL89h5Bae5T318nFoFzGy+YE5i/gGkoPAgkmTVdRKTiv3p8JHevPiPaMwoloKfEiiaHlawCqaZMqRm+XQ==", + "dev": true + }, + "@eslint/plugin-kit": { + "version": "0.2.4", + "resolved": "https://registry.npmjs.org/@eslint/plugin-kit/-/plugin-kit-0.2.4.tgz", + "integrity": "sha512-zSkKow6H5Kdm0ZUQUB2kV5JIXqoG0+uH5YADhaEHswm664N9Db8dXSi0nMJpacpMf+MyyglF1vnZohpEg5yUtg==", + "dev": true, + "requires": { + "levn": "^0.4.1" + } + }, + "@humanfs/core": { + "version": "0.19.1", + "resolved": "https://registry.npmjs.org/@humanfs/core/-/core-0.19.1.tgz", + "integrity": "sha512-5DyQ4+1JEUzejeK1JGICcideyfUbGixgS9jNgex5nqkW+cY7WZhxBigmieN5Qnw9ZosSNVC9KQKyb+GUaGyKUA==", + "dev": true + }, + "@humanfs/node": { + "version": "0.16.6", + "resolved": "https://registry.npmjs.org/@humanfs/node/-/node-0.16.6.tgz", + "integrity": "sha512-YuI2ZHQL78Q5HbhDiBA1X4LmYdXCKCMQIfw0pw7piHJwyREFebJUvrQN4cMssyES6x+vfUbx1CIpaQUKYdQZOw==", + "dev": true, + "requires": { + "@humanfs/core": "^0.19.1", + "@humanwhocodes/retry": "^0.3.0" + }, + "dependencies": { + "@humanwhocodes/retry": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.3.1.tgz", + "integrity": "sha512-JBxkERygn7Bv/GbN5Rv8Ul6LVknS+5Bp6RgDC/O8gEBU/yeH5Ui5C/OlWrTb6qct7LjjfT6Re2NxB0ln0yYybA==", + "dev": true + } + } + }, + "@humanwhocodes/module-importer": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz", + "integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==", + "dev": true + }, + "@humanwhocodes/retry": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.4.1.tgz", + "integrity": "sha512-c7hNEllBlenFTHBky65mhq8WD2kbN9Q6gk0bTk8lSBvc554jpXSkST1iePudpt7+A/AQvuHs9EMqjHDXMY1lrA==", + "dev": true + }, "@istanbuljs/load-nyc-config": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz", @@ -4965,6 +8249,38 @@ "@jridgewell/sourcemap-codec": "^1.4.14" } }, + "@nodelib/fs.scandir": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", + "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", + "dev": true, + "requires": { + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" + } + }, + "@nodelib/fs.stat": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", + "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", + "dev": true + }, + "@nodelib/fs.walk": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", + "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", + "dev": true, + "requires": { + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" + } + }, + "@pkgr/core": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/@pkgr/core/-/core-0.1.1.tgz", + "integrity": "sha512-cq8o4cWH0ibXh9VGi5P20Tu9XF/0fFXl9EUinr9QfTM7a7p0oTA4iJRCQWppXR1Pg8dSM0UCItCkPwsk9qWWYA==", + "dev": true + }, "@sinclair/typebox": { "version": "0.27.8", "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.27.8.tgz", @@ -5054,6 +8370,12 @@ "@babel/types": "^7.20.7" } }, + "@types/estree": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.6.tgz", + "integrity": "sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw==", + "dev": true + }, "@types/graceful-fs": { "version": "4.1.9", "resolved": "https://registry.npmjs.org/@types/graceful-fs/-/graceful-fs-4.1.9.tgz", @@ -5097,42 +8419,177 @@ "pretty-format": "^29.0.0" } }, + "@types/json-schema": { + "version": "7.0.15", + "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.15.tgz", + "integrity": "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==", + "dev": true + }, "@types/node": { "version": "20.14.9", "resolved": "https://registry.npmjs.org/@types/node/-/node-20.14.9.tgz", "integrity": "sha512-06OCtnTXtWOZBJlRApleWndH4JsRVs1pDCc8dLSQp+7PpUpX3ePdHyeNSFTeSe7FtKyQkrlPvHwJOW3SLd8Oyg==", "dev": true, "requires": { - "undici-types": "~5.26.4" + "undici-types": "~5.26.4" + } + }, + "@types/stack-utils": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@types/stack-utils/-/stack-utils-2.0.3.tgz", + "integrity": "sha512-9aEbYZ3TbYMznPdcdr3SmIrLXwC/AKZXQeCf9Pgao5CKb8CyHuEX5jzWPTkvregvhRJHcpRO6BFoGW9ycaOkYw==", + "dev": true + }, + "@types/yargs": { + "version": "17.0.32", + "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-17.0.32.tgz", + "integrity": "sha512-xQ67Yc/laOG5uMfX/093MRlGGCIBzZMarVa+gfNKJxWAIgykYpVGkBdbqEzGDDfCrVUj6Hiff4mTZ5BA6TmAog==", + "dev": true, + "requires": { + "@types/yargs-parser": "*" + } + }, + "@types/yargs-parser": { + "version": "21.0.3", + "resolved": "https://registry.npmjs.org/@types/yargs-parser/-/yargs-parser-21.0.3.tgz", + "integrity": "sha512-I4q9QU9MQv4oEOz4tAHJtNz1cwuLxn2F3xcc2iV5WdqLPpUnj30aUuxt1mAxYTG+oe8CZMV/+6rU4S4gRDzqtQ==", + "dev": true + }, + "@typescript-eslint/eslint-plugin": { + "version": "8.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.18.0.tgz", + "integrity": "sha512-NR2yS7qUqCL7AIxdJUQf2MKKNDVNaig/dEB0GBLU7D+ZdHgK1NoH/3wsgO3OnPVipn51tG3MAwaODEGil70WEw==", + "dev": true, + "requires": { + "@eslint-community/regexpp": "^4.10.0", + "@typescript-eslint/scope-manager": "8.18.0", + "@typescript-eslint/type-utils": "8.18.0", + "@typescript-eslint/utils": "8.18.0", + "@typescript-eslint/visitor-keys": "8.18.0", + "graphemer": "^1.4.0", + "ignore": "^5.3.1", + "natural-compare": "^1.4.0", + "ts-api-utils": "^1.3.0" + } + }, + "@typescript-eslint/parser": { + "version": "8.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.18.0.tgz", + "integrity": "sha512-hgUZ3kTEpVzKaK3uNibExUYm6SKKOmTU2BOxBSvOYwtJEPdVQ70kZJpPjstlnhCHcuc2WGfSbpKlb/69ttyN5Q==", + "dev": true, + "requires": { + "@typescript-eslint/scope-manager": "8.18.0", + "@typescript-eslint/types": "8.18.0", + "@typescript-eslint/typescript-estree": "8.18.0", + "@typescript-eslint/visitor-keys": "8.18.0", + "debug": "^4.3.4" + } + }, + "@typescript-eslint/scope-manager": { + "version": "8.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.18.0.tgz", + "integrity": "sha512-PNGcHop0jkK2WVYGotk/hxj+UFLhXtGPiGtiaWgVBVP1jhMoMCHlTyJA+hEj4rszoSdLTK3fN4oOatrL0Cp+Xw==", + "dev": true, + "requires": { + "@typescript-eslint/types": "8.18.0", + "@typescript-eslint/visitor-keys": "8.18.0" + } + }, + "@typescript-eslint/type-utils": { + "version": "8.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.18.0.tgz", + "integrity": "sha512-er224jRepVAVLnMF2Q7MZJCq5CsdH2oqjP4dT7K6ij09Kyd+R21r7UVJrF0buMVdZS5QRhDzpvzAxHxabQadow==", + "dev": true, + "requires": { + "@typescript-eslint/typescript-estree": "8.18.0", + "@typescript-eslint/utils": "8.18.0", + "debug": "^4.3.4", + "ts-api-utils": "^1.3.0" } }, - "@types/stack-utils": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/@types/stack-utils/-/stack-utils-2.0.3.tgz", - "integrity": "sha512-9aEbYZ3TbYMznPdcdr3SmIrLXwC/AKZXQeCf9Pgao5CKb8CyHuEX5jzWPTkvregvhRJHcpRO6BFoGW9ycaOkYw==", + "@typescript-eslint/types": { + "version": "8.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.18.0.tgz", + "integrity": "sha512-FNYxgyTCAnFwTrzpBGq+zrnoTO4x0c1CKYY5MuUTzpScqmY5fmsh2o3+57lqdI3NZucBDCzDgdEbIaNfAjAHQA==", "dev": true }, - "@types/yargs": { - "version": "17.0.32", - "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-17.0.32.tgz", - "integrity": "sha512-xQ67Yc/laOG5uMfX/093MRlGGCIBzZMarVa+gfNKJxWAIgykYpVGkBdbqEzGDDfCrVUj6Hiff4mTZ5BA6TmAog==", + "@typescript-eslint/typescript-estree": { + "version": "8.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.18.0.tgz", + "integrity": "sha512-rqQgFRu6yPkauz+ms3nQpohwejS8bvgbPyIDq13cgEDbkXt4LH4OkDMT0/fN1RUtzG8e8AKJyDBoocuQh8qNeg==", "dev": true, "requires": { - "@types/yargs-parser": "*" + "@typescript-eslint/types": "8.18.0", + "@typescript-eslint/visitor-keys": "8.18.0", + "debug": "^4.3.4", + "fast-glob": "^3.3.2", + "is-glob": "^4.0.3", + "minimatch": "^9.0.4", + "semver": "^7.6.0", + "ts-api-utils": "^1.3.0" + }, + "dependencies": { + "brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dev": true, + "requires": { + "balanced-match": "^1.0.0" + } + }, + "minimatch": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", + "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", + "dev": true, + "requires": { + "brace-expansion": "^2.0.1" + } + }, + "semver": { + "version": "7.6.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", + "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", + "dev": true + } } }, - "@types/yargs-parser": { - "version": "21.0.3", - "resolved": "https://registry.npmjs.org/@types/yargs-parser/-/yargs-parser-21.0.3.tgz", - "integrity": "sha512-I4q9QU9MQv4oEOz4tAHJtNz1cwuLxn2F3xcc2iV5WdqLPpUnj30aUuxt1mAxYTG+oe8CZMV/+6rU4S4gRDzqtQ==", - "dev": true + "@typescript-eslint/utils": { + "version": "8.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.18.0.tgz", + "integrity": "sha512-p6GLdY383i7h5b0Qrfbix3Vc3+J2k6QWw6UMUeY5JGfm3C5LbZ4QIZzJNoNOfgyRe0uuYKjvVOsO/jD4SJO+xg==", + "dev": true, + "requires": { + "@eslint-community/eslint-utils": "^4.4.0", + "@typescript-eslint/scope-manager": "8.18.0", + "@typescript-eslint/types": "8.18.0", + "@typescript-eslint/typescript-estree": "8.18.0" + } + }, + "@typescript-eslint/visitor-keys": { + "version": "8.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.18.0.tgz", + "integrity": "sha512-pCh/qEA8Lb1wVIqNvBke8UaRjJ6wrAWkJO5yyIbs8Yx6TNGYyfNjOo61tLv+WwLvoLPp4BQ8B7AHKijl8NGUfw==", + "dev": true, + "requires": { + "@typescript-eslint/types": "8.18.0", + "eslint-visitor-keys": "^4.2.0" + } }, "acorn": { - "version": "8.12.1", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.12.1.tgz", - "integrity": "sha512-tcpGyI9zbizT9JbV6oYE477V6mTlXvvi0T0G3SNIYE2apm/G5huBa1+K89VGeovbg+jycCrfhl3ADxErOuO6Jg==", + "version": "8.14.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.14.0.tgz", + "integrity": "sha512-cl669nCJTZBsL97OF4kUQm5g5hC2uihk0NxY3WENAC0TYdILVkAyHymAntgxGkl7K+t0cXIrH5siy5S4XkFycA==", "dev": true }, + "acorn-jsx": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", + "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", + "dev": true, + "requires": {} + }, "acorn-walk": { "version": "8.3.3", "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.3.3.tgz", @@ -5142,6 +8599,18 @@ "acorn": "^8.11.0" } }, + "ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dev": true, + "requires": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + } + }, "ansi-escapes": { "version": "4.3.2", "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.2.tgz", @@ -5687,9 +9156,9 @@ "dev": true }, "cross-spawn": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", - "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", + "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", "dev": true, "requires": { "path-key": "^3.1.0", @@ -5713,6 +9182,12 @@ "dev": true, "requires": {} }, + "deep-is": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", + "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", + "dev": true + }, "deepmerge": { "version": "4.3.1", "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-4.3.1.tgz", @@ -5785,12 +9260,164 @@ "integrity": "sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==", "dev": true }, + "eslint": { + "version": "9.17.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.17.0.tgz", + "integrity": "sha512-evtlNcpJg+cZLcnVKwsai8fExnqjGPicK7gnUtlNuzu+Fv9bI0aLpND5T44VLQtoMEnI57LoXO9XAkIXwohKrA==", + "dev": true, + "requires": { + "@eslint-community/eslint-utils": "^4.2.0", + "@eslint-community/regexpp": "^4.12.1", + "@eslint/config-array": "^0.19.0", + "@eslint/core": "^0.9.0", + "@eslint/eslintrc": "^3.2.0", + "@eslint/js": "9.17.0", + "@eslint/plugin-kit": "^0.2.3", + "@humanfs/node": "^0.16.6", + "@humanwhocodes/module-importer": "^1.0.1", + "@humanwhocodes/retry": "^0.4.1", + "@types/estree": "^1.0.6", + "@types/json-schema": "^7.0.15", + "ajv": "^6.12.4", + "chalk": "^4.0.0", + "cross-spawn": "^7.0.6", + "debug": "^4.3.2", + "escape-string-regexp": "^4.0.0", + "eslint-scope": "^8.2.0", + "eslint-visitor-keys": "^4.2.0", + "espree": "^10.3.0", + "esquery": "^1.5.0", + "esutils": "^2.0.2", + "fast-deep-equal": "^3.1.3", + "file-entry-cache": "^8.0.0", + "find-up": "^5.0.0", + "glob-parent": "^6.0.2", + "ignore": "^5.2.0", + "imurmurhash": "^0.1.4", + "is-glob": "^4.0.0", + "json-stable-stringify-without-jsonify": "^1.0.1", + "lodash.merge": "^4.6.2", + "minimatch": "^3.1.2", + "natural-compare": "^1.4.0", + "optionator": "^0.9.3" + }, + "dependencies": { + "escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "dev": true + }, + "find-up": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", + "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", + "dev": true, + "requires": { + "locate-path": "^6.0.0", + "path-exists": "^4.0.0" + } + }, + "locate-path": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", + "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", + "dev": true, + "requires": { + "p-locate": "^5.0.0" + } + }, + "p-locate": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", + "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", + "dev": true, + "requires": { + "p-limit": "^3.0.2" + } + } + } + }, + "eslint-config-prettier": { + "version": "9.1.0", + "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-9.1.0.tgz", + "integrity": "sha512-NSWl5BFQWEPi1j4TjVNItzYV7dZXZ+wP6I6ZhrBGpChQhZRUaElihE9uRRkcbRnNb76UMKDF3r+WTmNcGPKsqw==", + "dev": true, + "requires": {} + }, + "eslint-plugin-prettier": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-prettier/-/eslint-plugin-prettier-5.2.1.tgz", + "integrity": "sha512-gH3iR3g4JfF+yYPaJYkN7jEl9QbweL/YfkoRlNnuIEHEz1vHVlCmWOS+eGGiRuzHQXdJFCOTxRgvju9b8VUmrw==", + "dev": true, + "requires": { + "prettier-linter-helpers": "^1.0.0", + "synckit": "^0.9.1" + } + }, + "eslint-scope": { + "version": "8.2.0", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-8.2.0.tgz", + "integrity": "sha512-PHlWUfG6lvPc3yvP5A4PNyBL1W8fkDUccmI21JUu/+GKZBoH/W5u6usENXUrWFRsyoW5ACUjFGgAFQp5gUlb/A==", + "dev": true, + "requires": { + "esrecurse": "^4.3.0", + "estraverse": "^5.2.0" + } + }, + "eslint-visitor-keys": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.0.tgz", + "integrity": "sha512-UyLnSehNt62FFhSwjZlHmeokpRK59rcz29j+F1/aDgbkbRTk7wIc9XzdoasMUbRNKDM0qQt/+BJ4BrpFeABemw==", + "dev": true + }, + "espree": { + "version": "10.3.0", + "resolved": "https://registry.npmjs.org/espree/-/espree-10.3.0.tgz", + "integrity": "sha512-0QYC8b24HWY8zjRnDTL6RiHfDbAWn63qb4LMj1Z4b076A4une81+z03Kg7l7mn/48PUTqoLptSXez8oknU8Clg==", + "dev": true, + "requires": { + "acorn": "^8.14.0", + "acorn-jsx": "^5.3.2", + "eslint-visitor-keys": "^4.2.0" + } + }, "esprima": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", "dev": true }, + "esquery": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.6.0.tgz", + "integrity": "sha512-ca9pw9fomFcKPvFLXhBKUK90ZvGibiGOvRJNbjljY7s7uq/5YO4BOzcYtJqExdx99rF6aAcnRxHmcUHcz6sQsg==", + "dev": true, + "requires": { + "estraverse": "^5.1.0" + } + }, + "esrecurse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", + "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", + "dev": true, + "requires": { + "estraverse": "^5.2.0" + } + }, + "estraverse": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", + "dev": true + }, + "esutils": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", + "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", + "dev": true + }, "execa": { "version": "5.1.1", "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", @@ -5827,12 +9454,63 @@ "jest-util": "^29.7.0" } }, + "fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", + "dev": true + }, + "fast-diff": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/fast-diff/-/fast-diff-1.3.0.tgz", + "integrity": "sha512-VxPP4NqbUjj6MaAOafWeUn2cXWLcCtljklUtZf0Ind4XQ+QPtmA0b18zZy0jIQx+ExRVCR/ZQpBmik5lXshNsw==", + "dev": true + }, + "fast-glob": { + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.2.tgz", + "integrity": "sha512-oX2ruAFQwf/Orj8m737Y5adxDQO0LAB7/S5MnxCdTNDd4p6BsyIVsv9JQsATbTSq8KHRpLwIHbVlUNatxd+1Ow==", + "dev": true, + "requires": { + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.2", + "merge2": "^1.3.0", + "micromatch": "^4.0.4" + }, + "dependencies": { + "glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "requires": { + "is-glob": "^4.0.1" + } + } + } + }, "fast-json-stable-stringify": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", "dev": true }, + "fast-levenshtein": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", + "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==", + "dev": true + }, + "fastq": { + "version": "1.17.1", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.17.1.tgz", + "integrity": "sha512-sRVD3lWVIXWg6By68ZN7vho9a1pQcN/WBFaAAsDDFzlJjvoGx0P8z7V1t72grFJfJhu3YPZBuu25f7Kaw2jN1w==", + "dev": true, + "requires": { + "reusify": "^1.0.4" + } + }, "fb-watchman": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/fb-watchman/-/fb-watchman-2.0.2.tgz", @@ -5842,6 +9520,15 @@ "bser": "2.1.1" } }, + "file-entry-cache": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-8.0.0.tgz", + "integrity": "sha512-XXTUwCvisa5oacNGRP9SfNtYBNAMi+RPwBFmblZEF7N7swHYQS6/Zfk7SRwx4D5j3CH211YNRco1DEMNVfZCnQ==", + "dev": true, + "requires": { + "flat-cache": "^4.0.0" + } + }, "filelist": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/filelist/-/filelist-1.0.4.tgz", @@ -5890,6 +9577,22 @@ "path-exists": "^4.0.0" } }, + "flat-cache": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-4.0.1.tgz", + "integrity": "sha512-f7ccFPK3SXFHpx15UIGyRJ/FJQctuKZ0zVuN3frBo4HnK3cay9VEW0R6yPYFHC0AgqhukPzKjq22t5DmAyqGyw==", + "dev": true, + "requires": { + "flatted": "^3.2.9", + "keyv": "^4.5.4" + } + }, + "flatted": { + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.2.tgz", + "integrity": "sha512-AiwGJM8YcNOaobumgtng+6NHuOqC3A7MixFeDafM3X9cIUM+xUXoS5Vfgf+OihAYe20fxqNM9yPBXJzRtZ/4eA==", + "dev": true + }, "fs.realpath": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", @@ -5947,10 +9650,19 @@ "path-is-absolute": "^1.0.0" } }, + "glob-parent": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", + "dev": true, + "requires": { + "is-glob": "^4.0.3" + } + }, "globals": { - "version": "11.12.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", - "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", + "version": "15.13.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-15.13.0.tgz", + "integrity": "sha512-49TewVEz0UxZjr1WYYsWpPrhyC/B/pA8Bq0fUmet2n+eR7yn0IvNzNaoBwnK6mdkzcN+se7Ez9zUgULTz2QH4g==", "dev": true }, "graceful-fs": { @@ -5959,6 +9671,12 @@ "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", "dev": true }, + "graphemer": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/graphemer/-/graphemer-1.4.0.tgz", + "integrity": "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==", + "dev": true + }, "has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", @@ -5986,6 +9704,30 @@ "integrity": "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==", "dev": true }, + "ignore": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz", + "integrity": "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==", + "dev": true + }, + "import-fresh": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz", + "integrity": "sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==", + "dev": true, + "requires": { + "parent-module": "^1.0.0", + "resolve-from": "^4.0.0" + }, + "dependencies": { + "resolve-from": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", + "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", + "dev": true + } + } + }, "import-local": { "version": "3.2.0", "resolved": "https://registry.npmjs.org/import-local/-/import-local-3.2.0.tgz", @@ -6033,6 +9775,12 @@ "hasown": "^2.0.2" } }, + "is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "dev": true + }, "is-fullwidth-code-point": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", @@ -6045,6 +9793,15 @@ "integrity": "sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ==", "dev": true }, + "is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, + "requires": { + "is-extglob": "^2.1.1" + } + }, "is-number": { "version": "7.0.0", "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", @@ -6602,18 +10359,45 @@ "integrity": "sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==", "dev": true }, + "json-buffer": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz", + "integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==", + "dev": true + }, "json-parse-even-better-errors": { "version": "2.3.1", "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==", "dev": true }, + "json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "dev": true + }, + "json-stable-stringify-without-jsonify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", + "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==", + "dev": true + }, "json5": { "version": "2.2.3", "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz", "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==", "dev": true }, + "keyv": { + "version": "4.5.4", + "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz", + "integrity": "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==", + "dev": true, + "requires": { + "json-buffer": "3.0.1" + } + }, "kleur": { "version": "3.0.3", "resolved": "https://registry.npmjs.org/kleur/-/kleur-3.0.3.tgz", @@ -6626,6 +10410,16 @@ "integrity": "sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A==", "dev": true }, + "levn": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", + "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", + "dev": true, + "requires": { + "prelude-ls": "^1.2.1", + "type-check": "~0.4.0" + } + }, "lines-and-columns": { "version": "1.2.4", "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", @@ -6652,6 +10446,12 @@ "integrity": "sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag==", "dev": true }, + "lodash.merge": { + "version": "4.6.2", + "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", + "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", + "dev": true + }, "lru-cache": { "version": "5.1.1", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", @@ -6699,6 +10499,12 @@ "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==", "dev": true }, + "merge2": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", + "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", + "dev": true + }, "micromatch": { "version": "4.0.7", "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.7.tgz", @@ -6781,6 +10587,20 @@ "mimic-fn": "^2.1.0" } }, + "optionator": { + "version": "0.9.4", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.4.tgz", + "integrity": "sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==", + "dev": true, + "requires": { + "deep-is": "^0.1.3", + "fast-levenshtein": "^2.0.6", + "levn": "^0.4.1", + "prelude-ls": "^1.2.1", + "type-check": "^0.4.0", + "word-wrap": "^1.2.5" + } + }, "p-limit": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", @@ -6816,6 +10636,15 @@ "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", "dev": true }, + "parent-module": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", + "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", + "dev": true, + "requires": { + "callsites": "^3.0.0" + } + }, "parse-json": { "version": "5.2.0", "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz", @@ -6879,6 +10708,27 @@ "find-up": "^4.0.0" } }, + "prelude-ls": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", + "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==", + "dev": true + }, + "prettier": { + "version": "3.4.2", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.4.2.tgz", + "integrity": "sha512-e9MewbtFo+Fevyuxn/4rrcDAaq0IYxPGLvObpQjiZBMAzB9IGmzlnG9RZy3FFas+eBMu2vA0CszMeduow5dIuQ==", + "dev": true + }, + "prettier-linter-helpers": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/prettier-linter-helpers/-/prettier-linter-helpers-1.0.0.tgz", + "integrity": "sha512-GbK2cP9nraSSUF9N2XwUwqfzlAFlMNYYl+ShE/V+H8a9uNl/oUqB1w2EL54Jh0OlyRSd8RfWYJ3coVS4TROP2w==", + "dev": true, + "requires": { + "fast-diff": "^1.1.2" + } + }, "pretty-format": { "version": "29.7.0", "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-29.7.0.tgz", @@ -6908,12 +10758,24 @@ "sisteransi": "^1.0.5" } }, + "punycode": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", + "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", + "dev": true + }, "pure-rand": { "version": "6.1.0", "resolved": "https://registry.npmjs.org/pure-rand/-/pure-rand-6.1.0.tgz", "integrity": "sha512-bVWawvoZoBYpp6yIoQtQXHZjmz35RSVHnUOTefl8Vcjr8snTPY1wnpSPMWekcFwbxI6gtmT7rSYPFvz71ldiOA==", "dev": true }, + "queue-microtask": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", + "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", + "dev": true + }, "react-is": { "version": "18.3.1", "resolved": "https://registry.npmjs.org/react-is/-/react-is-18.3.1.tgz", @@ -6958,6 +10820,21 @@ "integrity": "sha512-X2UW6Nw3n/aMgDVy+0rSqgHlv39WZAlZrXCdnbyEiKm17DSqHX4MmQMaST3FbeWR5FTuRcUwYAziZajji0Y7mg==", "dev": true }, + "reusify": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", + "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==", + "dev": true + }, + "run-parallel": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", + "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", + "dev": true, + "requires": { + "queue-microtask": "^1.2.2" + } + }, "semver": { "version": "6.3.1", "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", @@ -7089,6 +10966,16 @@ "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", "dev": true }, + "synckit": { + "version": "0.9.2", + "resolved": "https://registry.npmjs.org/synckit/-/synckit-0.9.2.tgz", + "integrity": "sha512-vrozgXDQwYO72vHjUb/HnFbQx1exDjoKzqx23aXEg2a9VIg2TSFZ8FmeZpTjUCFMYw7mpX4BE2SFu8wI7asYsw==", + "dev": true, + "requires": { + "@pkgr/core": "^0.1.0", + "tslib": "^2.6.2" + } + }, "test-exclude": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-6.0.0.tgz", @@ -7121,6 +11008,13 @@ "is-number": "^7.0.0" } }, + "ts-api-utils": { + "version": "1.4.3", + "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-1.4.3.tgz", + "integrity": "sha512-i3eMG77UTMD0hZhgRS562pv83RC6ukSAC2GMNWc+9dieh/+jDM5u5YG+NHX6VNDRHQcHwmsTHctP9LhbC3WxVw==", + "dev": true, + "requires": {} + }, "ts-jest": { "version": "29.2.3", "resolved": "https://registry.npmjs.org/ts-jest/-/ts-jest-29.2.3.tgz", @@ -7167,6 +11061,21 @@ "yn": "3.1.1" } }, + "tslib": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", + "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", + "dev": true + }, + "type-check": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", + "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==", + "dev": true, + "requires": { + "prelude-ls": "^1.2.1" + } + }, "type-detect": { "version": "4.0.8", "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz", @@ -7185,6 +11094,17 @@ "integrity": "sha512-Mtq29sKDAEYP7aljRgtPOpTvOfbwRWlS6dPRzwjdE+C0R4brX/GUyhHSecbHMFLNBLcJIPt9nl9yG5TZ1weH+Q==", "dev": true }, + "typescript-eslint": { + "version": "8.18.0", + "resolved": "https://registry.npmjs.org/typescript-eslint/-/typescript-eslint-8.18.0.tgz", + "integrity": "sha512-Xq2rRjn6tzVpAyHr3+nmSg1/9k9aIHnJ2iZeOH7cfGOWqTkXTm3kwpQglEuLGdNrYvPF+2gtAs+/KF5rjVo+WQ==", + "dev": true, + "requires": { + "@typescript-eslint/eslint-plugin": "8.18.0", + "@typescript-eslint/parser": "8.18.0", + "@typescript-eslint/utils": "8.18.0" + } + }, "undici-types": { "version": "5.26.5", "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz", @@ -7201,6 +11121,15 @@ "picocolors": "^1.0.1" } }, + "uri-js": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "dev": true, + "requires": { + "punycode": "^2.1.0" + } + }, "v8-compile-cache-lib": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz", @@ -7236,6 +11165,12 @@ "isexe": "^2.0.0" } }, + "word-wrap": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz", + "integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==", + "dev": true + }, "wrap-ansi": { "version": "7.0.0", "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", diff --git a/aoe-infra/infra/package.json b/aoe-infra/package.json similarity index 52% rename from aoe-infra/infra/package.json rename to aoe-infra/package.json index 73c5bdbc2..18d72629e 100644 --- a/aoe-infra/infra/package.json +++ b/aoe-infra/package.json @@ -7,19 +7,31 @@ "scripts": { "build": "tsc", "watch": "tsc -w", + "lint": "eslint lib/**/*.ts bin/**/*.ts", + "lint:fix": "eslint --fix lib/**/*.ts bin/**/*.ts", "test": "jest", "cdk": "cdk" }, "devDependencies": { + "@eslint/eslintrc": "^3.2.0", + "@eslint/js": "^9.17.0", "@types/jest": "^29.5.12", "@types/node": "20.14.9", + "@typescript-eslint/parser": "^8.18.0", "aws-cdk": "2.150.0", + "eslint": "^9.17.0", + "eslint-config-prettier": "^9.1.0", + "eslint-plugin-prettier": "^5.2.1", + "globals": "^15.13.0", "jest": "^29.7.0", + "prettier": "^3.4.2", "ts-jest": "^29.1.5", "ts-node": "^10.9.2", - "typescript": "~5.5.3" + "typescript": "~5.5.3", + "typescript-eslint": "^8.18.0" }, "dependencies": { + "@aws-cdk/aws-msk": "^1.203.0", "aws-cdk-lib": "2.150.0", "constructs": "^10.0.0", "lodash": "^4.17.21", diff --git a/aoe-infra/scripts/bastion_userdata.sh b/aoe-infra/scripts/bastion_userdata.sh new file mode 100644 index 000000000..8b9e6fe05 --- /dev/null +++ b/aoe-infra/scripts/bastion_userdata.sh @@ -0,0 +1,24 @@ +# Note: addUserData() automatically adds shebang at the start of the file. +mkfs -t xfs /dev/nvme1n1 +mkdir /data +mount /dev/nvme1n1 /data +chmod 770 /data + +echo -e "[mongodb-org-5.0] \nname=MongoDB Repository\nbaseurl=https://repo.mongodb.org/yum/amazon/2/mongodb-org/5.0/x86_64/\ngpgcheck=1 \nenabled=1 \ngpgkey=https://www.mongodb.org/static/pgp/server-5.0.asc" | sudo tee /etc/yum.repos.d/mongodb-org-5.0.repo + +sudo rpm --import https://www.mongodb.org/static/pgp/server-6.0.asc + +cat < /sys/kernel/mm/transparent_hugepage/enabled' as root, -# and add it to your /etc/rc.local in order to retain the setting after a reboot. -# Redis must be restarted after THP is disabled. - -echo never > /sys/kernel/mm/transparent_hugepage/enabled -echo never > /sys/kernel/mm/transparent_hugepage/defrag - -# WARNING: The TCP backlog setting of 511 cannot be enforced -# because /proc/sys/net/core/somaxconn is set to the lower value of 128. - -sysctl -w net.core.somaxconn=512 - -# WARNING overcommit_memory is set to 0! Background save may fail under low memory condition. -# To fix this issue add 'vm.overcommit_memory = 1' to /etc/sysctl.conf and then reboot -# or run the command 'sysctl vm.overcommit_memory=1' for this to take effect. -# The overcommit_memory has 3 options. -# 0, the system kernel check if there is enough memory to be allocated to the process or not, -# if not enough, it will return errors to the process. -# 1, the system kernel is allowed to allocate the whole memory to the process -# no matter what the status of memory is. -# 2, the system kernel is allowed to allocate a memory whose size could be bigger than -# the sum of the size of physical memory and the size of exchange workspace to the process. - -sysctl vm.overcommit_memory=1 - -# start redis server - -redis-server /usr/local/etc/redis/redis.conf --bind 0.0.0.0 --appendonly yes --requirepass dev_password diff --git a/aoe-semantic-apis/docker/redis-reference/redis.conf b/aoe-semantic-apis/docker/redis-reference/redis.conf deleted file mode 100644 index 860215443..000000000 --- a/aoe-semantic-apis/docker/redis-reference/redis.conf +++ /dev/null @@ -1,1317 +0,0 @@ -# Redis configuration file example. -# -# Note that in order to read the configuration file, Redis must be -# started with the file path as first argument: -# -# ./redis-server /path/to/redis.conf - -# Note on units: when memory size is needed, it is possible to specify -# it in the usual form of 1k 5GB 4M and so forth: -# -# 1k => 1000 bytes -# 1kb => 1024 bytes -# 1m => 1000000 bytes -# 1mb => 1024*1024 bytes -# 1g => 1000000000 bytes -# 1gb => 1024*1024*1024 bytes -# -# units are case insensitive so 1GB 1Gb 1gB are all the same. - -################################## INCLUDES ################################### - -# Include one or more other config files here. This is useful if you -# have a standard template that goes to all Redis servers but also need -# to customize a few per-server settings. Include files can include -# other files, so use this wisely. -# -# Notice option "include" won't be rewritten by command "CONFIG REWRITE" -# from admin or Redis Sentinel. Since Redis always uses the last processed -# line as value of a configuration directive, you'd better put includes -# at the beginning of this file to avoid overwriting config change at runtime. -# -# If instead you are interested in using includes to override configuration -# options, it is better to use include as the last line. -# -# include /path/to/local.conf -# include /path/to/other.conf - -################################## MODULES ##################################### - -# Load modules at startup. If the server is not able to load modules -# it will abort. It is possible to use multiple loadmodule directives. -# -# loadmodule /path/to/my_module.so -# loadmodule /path/to/other_module.so - -################################## NETWORK ##################################### - -# By default, if no "bind" configuration directive is specified, Redis listens -# for connections from all the network interfaces available on the server. -# It is possible to listen to just one or multiple selected interfaces using -# the "bind" configuration directive, followed by one or more IP addresses. -# -# Examples: -# -# bind 192.168.1.100 10.0.0.1 -# bind 127.0.0.1 ::1 -# -# ~~~ WARNING ~~~ If the computer running Redis is directly exposed to the -# internet, binding to all the interfaces is dangerous and will expose the -# instance to everybody on the internet. So by default we uncomment the -# following bind directive, that will force Redis to listen only into -# the IPv4 lookback interface address (this means Redis will be able to -# accept connections only from clients running into the same computer it -# is running). -# -# IF YOU ARE SURE YOU WANT YOUR INSTANCE TO LISTEN TO ALL THE INTERFACES -# JUST COMMENT THE FOLLOWING LINE. -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -# bind 127.0.0.1 -bind 0.0.0.0 - -# Protected mode is a layer of security protection, in order to avoid that -# Redis instances left open on the internet are accessed and exploited. -# -# When protected mode is on and if: -# -# 1) The server is not binding explicitly to a set of addresses using the -# "bind" directive. -# 2) No password is configured. -# -# The server only accepts connections from clients connecting from the -# IPv4 and IPv6 loopback addresses 127.0.0.1 and ::1, and from Unix domain -# sockets. -# -# By default protected mode is enabled. You should disable it only if -# you are sure you want clients from other hosts to connect to Redis -# even if no authentication is configured, nor a specific set of interfaces -# are explicitly listed using the "bind" directive. -protected-mode yes - -# Accept connections on the specified port, default is 6379 (IANA #815344). -# If port 0 is specified Redis will not listen on a TCP socket. -port 6379 - -# TCP listen() backlog. -# -# In high requests-per-second environments you need an high backlog in order -# to avoid slow clients connections issues. Note that the Linux kernel -# will silently truncate it to the value of /proc/sys/net/core/somaxconn so -# make sure to raise both the value of somaxconn and tcp_max_syn_backlog -# in order to get the desired effect. -tcp-backlog 511 - -# Unix socket. -# -# Specify the path for the Unix socket that will be used to listen for -# incoming connections. There is no default, so Redis will not listen -# on a unix socket when not specified. -# -# unixsocket /tmp/redis.sock -# unixsocketperm 700 - -# Close the connection after a client is idle for N seconds (0 to disable) -timeout 0 - -# TCP keepalive. -# -# If non-zero, use SO_KEEPALIVE to send TCP ACKs to clients in absence -# of communication. This is useful for two reasons: -# -# 1) Detect dead peers. -# 2) Take the connection alive from the point of view of network -# equipment in the middle. -# -# On Linux, the specified value (in seconds) is the period used to send ACKs. -# Note that to close the connection the double of the time is needed. -# On other kernels the period depends on the kernel configuration. -# -# A reasonable value for this option is 300 seconds, which is the new -# Redis default starting with Redis 3.2.1. -tcp-keepalive 300 - -################################# GENERAL ##################################### - -# By default Redis does not run as a daemon. Use 'yes' if you need it. -# Note that Redis will write a pid file in /var/run/redis.pid when daemonized. -daemonize no - -# If you run Redis from upstart or systemd, Redis can interact with your -# supervision tree. Options: -# supervised no - no supervision interaction -# supervised upstart - signal upstart by putting Redis into SIGSTOP mode -# supervised systemd - signal systemd by writing READY=1 to $NOTIFY_SOCKET -# supervised auto - detect upstart or systemd method based on -# UPSTART_JOB or NOTIFY_SOCKET environment variables -# Note: these supervision methods only signal "process is ready." -# They do not enable continuous liveness pings back to your supervisor. -supervised no - -# If a pid file is specified, Redis writes it where specified at startup -# and removes it at exit. -# -# When the server runs non daemonized, no pid file is created if none is -# specified in the configuration. When the server is daemonized, the pid file -# is used even if not specified, defaulting to "/var/run/redis.pid". -# -# Creating a pid file is best effort: if Redis is not able to create it -# nothing bad happens, the server will start and run normally. -pidfile /var/run/redis_6379.pid - -# Specify the server verbosity level. -# This can be one of: -# debug (a lot of information, useful for development/testing) -# verbose (many rarely useful info, but not a mess like the debug level) -# notice (moderately verbose, what you want in production probably) -# warning (only very important / critical messages are logged) -loglevel notice - -# Specify the log file name. Also the empty string can be used to force -# Redis to log on the standard output. Note that if you use standard -# output for logging but daemonize, logs will be sent to /dev/null -logfile "" - -# To enable logging to the system logger, just set 'syslog-enabled' to yes, -# and optionally update the other syslog parameters to suit your needs. -# syslog-enabled no - -# Specify the syslog identity. -# syslog-ident redis - -# Specify the syslog facility. Must be USER or between LOCAL0-LOCAL7. -# syslog-facility local0 - -# Set the number of databases. The default database is DB 0, you can select -# a different one on a per-connection basis using SELECT where -# dbid is a number between 0 and 'databases'-1 -databases 16 - -# By default Redis shows an ASCII art logo only when started to log to the -# standard output and if the standard output is a TTY. Basically this means -# that normally a logo is displayed only in interactive sessions. -# -# However it is possible to force the pre-4.0 behavior and always show a -# ASCII art logo in startup logs by setting the following option to yes. -always-show-logo yes - -################################ SNAPSHOTTING ################################ -# -# Save the DB on disk: -# -# save -# -# Will save the DB if both the given number of seconds and the given -# number of write operations against the DB occurred. -# -# In the example below the behaviour will be to save: -# after 900 sec (15 min) if at least 1 key changed -# after 300 sec (5 min) if at least 10 keys changed -# after 60 sec if at least 10000 keys changed -# -# Note: you can disable saving completely by commenting out all "save" lines. -# -# It is also possible to remove all the previously configured save -# points by adding a save directive with a single empty string argument -# like in the following example: -# -# save "" - -save 900 1 -save 300 10 -save 60 10000 - -# By default Redis will stop accepting writes if RDB snapshots are enabled -# (at least one save point) and the latest background save failed. -# This will make the user aware (in a hard way) that data is not persisting -# on disk properly, otherwise chances are that no one will notice and some -# disaster will happen. -# -# If the background saving process will start working again Redis will -# automatically allow writes again. -# -# However if you have setup your proper monitoring of the Redis server -# and persistence, you may want to disable this feature so that Redis will -# continue to work as usual even if there are problems with disk, -# permissions, and so forth. -stop-writes-on-bgsave-error yes - -# Compress string objects using LZF when dump .rdb databases? -# For default that's set to 'yes' as it's almost always a win. -# If you want to save some CPU in the saving child set it to 'no' but -# the dataset will likely be bigger if you have compressible values or keys. -rdbcompression yes - -# Since version 5 of RDB a CRC64 checksum is placed at the end of the file. -# This makes the format more resistant to corruption but there is a performance -# hit to pay (around 10%) when saving and loading RDB files, so you can disable it -# for maximum performances. -# -# RDB files created with checksum disabled have a checksum of zero that will -# tell the loading code to skip the check. -rdbchecksum yes - -# The filename where to dump the DB -dbfilename dump.rdb - -# The working directory. -# -# The DB will be written inside this directory, with the filename specified -# above using the 'dbfilename' configuration directive. -# -# The Append Only File will also be created inside this directory. -# -# Note that you must specify a directory here, not a file name. -dir ./ - -################################# REPLICATION ################################# - -# Master-Slave replication. Use slaveof to make a Redis instance a copy of -# another Redis server. A few things to understand ASAP about Redis replication. -# -# 1) Redis replication is asynchronous, but you can configure a master to -# stop accepting writes if it appears to be not connected with at least -# a given number of slaves. -# 2) Redis slaves are able to perform a partial resynchronization with the -# master if the replication link is lost for a relatively small amount of -# time. You may want to configure the replication backlog size (see the next -# sections of this file) with a sensible value depending on your needs. -# 3) Replication is automatic and does not need user intervention. After a -# network partition slaves automatically try to reconnect to masters -# and resynchronize with them. -# -# slaveof - -# If the master is password protected (using the "requirepass" configuration -# directive below) it is possible to tell the slave to authenticate before -# starting the replication synchronization process, otherwise the master will -# refuse the slave request. -# -# masterauth - -# When a slave loses its connection with the master, or when the replication -# is still in progress, the slave can act in two different ways: -# -# 1) if slave-serve-stale-data is set to 'yes' (the default) the slave will -# still reply to client requests, possibly with out of date data, or the -# data set may just be empty if this is the first synchronization. -# -# 2) if slave-serve-stale-data is set to 'no' the slave will reply with -# an error "SYNC with master in progress" to all the kind of commands -# but to INFO and SLAVEOF. -# -slave-serve-stale-data yes - -# You can configure a slave instance to accept writes or not. Writing against -# a slave instance may be useful to store some ephemeral data (because data -# written on a slave will be easily deleted after resync with the master) but -# may also cause problems if clients are writing to it because of a -# misconfiguration. -# -# Since Redis 2.6 by default slaves are read-only. -# -# Note: read only slaves are not designed to be exposed to untrusted clients -# on the internet. It's just a protection layer against misuse of the instance. -# Still a read only slave exports by default all the administrative commands -# such as CONFIG, DEBUG, and so forth. To a limited extent you can improve -# security of read only slaves using 'rename-command' to shadow all the -# administrative / dangerous commands. -slave-read-only yes - -# Replication SYNC strategy: disk or socket. -# -# ------------------------------------------------------- -# WARNING: DISKLESS REPLICATION IS EXPERIMENTAL CURRENTLY -# ------------------------------------------------------- -# -# New slaves and reconnecting slaves that are not able to continue the replication -# process just receiving differences, need to do what is called a "full -# synchronization". An RDB file is transmitted from the master to the slaves. -# The transmission can happen in two different ways: -# -# 1) Disk-backed: The Redis master creates a new process that writes the RDB -# file on disk. Later the file is transferred by the parent -# process to the slaves incrementally. -# 2) Diskless: The Redis master creates a new process that directly writes the -# RDB file to slave sockets, without touching the disk at all. -# -# With disk-backed replication, while the RDB file is generated, more slaves -# can be queued and served with the RDB file as soon as the current child producing -# the RDB file finishes its work. With diskless replication instead once -# the transfer starts, new slaves arriving will be queued and a new transfer -# will start when the current one terminates. -# -# When diskless replication is used, the master waits a configurable amount of -# time (in seconds) before starting the transfer in the hope that multiple slaves -# will arrive and the transfer can be parallelized. -# -# With slow disks and fast (large bandwidth) networks, diskless replication -# works better. -repl-diskless-sync no - -# When diskless replication is enabled, it is possible to configure the delay -# the server waits in order to spawn the child that transfers the RDB via socket -# to the slaves. -# -# This is important since once the transfer starts, it is not possible to serve -# new slaves arriving, that will be queued for the next RDB transfer, so the server -# waits a delay in order to let more slaves arrive. -# -# The delay is specified in seconds, and by default is 5 seconds. To disable -# it entirely just set it to 0 seconds and the transfer will start ASAP. -repl-diskless-sync-delay 5 - -# Slaves send PINGs to server in a predefined interval. It's possible to change -# this interval with the repl_ping_slave_period option. The default value is 10 -# seconds. -# -# repl-ping-slave-period 10 - -# The following option sets the replication timeout for: -# -# 1) Bulk transfer I/O during SYNC, from the point of view of slave. -# 2) Master timeout from the point of view of slaves (data, pings). -# 3) Slave timeout from the point of view of masters (REPLCONF ACK pings). -# -# It is important to make sure that this value is greater than the value -# specified for repl-ping-slave-period otherwise a timeout will be detected -# every time there is low traffic between the master and the slave. -# -# repl-timeout 60 - -# Disable TCP_NODELAY on the slave socket after SYNC? -# -# If you select "yes" Redis will use a smaller number of TCP packets and -# less bandwidth to send data to slaves. But this can add a delay for -# the data to appear on the slave side, up to 40 milliseconds with -# Linux kernels using a default configuration. -# -# If you select "no" the delay for data to appear on the slave side will -# be reduced but more bandwidth will be used for replication. -# -# By default we optimize for low latency, but in very high traffic conditions -# or when the master and slaves are many hops away, turning this to "yes" may -# be a good idea. -repl-disable-tcp-nodelay no - -# Set the replication backlog size. The backlog is a buffer that accumulates -# slave data when slaves are disconnected for some time, so that when a slave -# wants to reconnect again, often a full resync is not needed, but a partial -# resync is enough, just passing the portion of data the slave missed while -# disconnected. -# -# The bigger the replication backlog, the longer the time the slave can be -# disconnected and later be able to perform a partial resynchronization. -# -# The backlog is only allocated once there is at least a slave connected. -# -# repl-backlog-size 1mb - -# After a master has no longer connected slaves for some time, the backlog -# will be freed. The following option configures the amount of seconds that -# need to elapse, starting from the time the last slave disconnected, for -# the backlog buffer to be freed. -# -# Note that slaves never free the backlog for timeout, since they may be -# promoted to masters later, and should be able to correctly "partially -# resynchronize" with the slaves: hence they should always accumulate backlog. -# -# A value of 0 means to never release the backlog. -# -# repl-backlog-ttl 3600 - -# The slave priority is an integer number published by Redis in the INFO output. -# It is used by Redis Sentinel in order to select a slave to promote into a -# master if the master is no longer working correctly. -# -# A slave with a low priority number is considered better for promotion, so -# for instance if there are three slaves with priority 10, 100, 25 Sentinel will -# pick the one with priority 10, that is the lowest. -# -# However a special priority of 0 marks the slave as not able to perform the -# role of master, so a slave with priority of 0 will never be selected by -# Redis Sentinel for promotion. -# -# By default the priority is 100. -slave-priority 100 - -# It is possible for a master to stop accepting writes if there are less than -# N slaves connected, having a lag less or equal than M seconds. -# -# The N slaves need to be in "online" state. -# -# The lag in seconds, that must be <= the specified value, is calculated from -# the last ping received from the slave, that is usually sent every second. -# -# This option does not GUARANTEE that N replicas will accept the write, but -# will limit the window of exposure for lost writes in case not enough slaves -# are available, to the specified number of seconds. -# -# For example to require at least 3 slaves with a lag <= 10 seconds use: -# -# min-slaves-to-write 3 -# min-slaves-max-lag 10 -# -# Setting one or the other to 0 disables the feature. -# -# By default min-slaves-to-write is set to 0 (feature disabled) and -# min-slaves-max-lag is set to 10. - -# A Redis master is able to list the address and port of the attached -# slaves in different ways. For example the "INFO replication" section -# offers this information, which is used, among other tools, by -# Redis Sentinel in order to discover slave instances. -# Another place where this info is available is in the output of the -# "ROLE" command of a master. -# -# The listed IP and address normally reported by a slave is obtained -# in the following way: -# -# IP: The address is auto detected by checking the peer address -# of the socket used by the slave to connect with the master. -# -# Port: The port is communicated by the slave during the replication -# handshake, and is normally the port that the slave is using to -# list for connections. -# -# However when port forwarding or Network Address Translation (NAT) is -# used, the slave may be actually reachable via different IP and port -# pairs. The following two options can be used by a slave in order to -# report to its master a specific set of IP and port, so that both INFO -# and ROLE will report those values. -# -# There is no need to use both the options if you need to override just -# the port or the IP address. -# -# slave-announce-ip 5.5.5.5 -# slave-announce-port 1234 - -################################## SECURITY ################################### - -# Require clients to issue AUTH before processing any other -# commands. This might be useful in environments in which you do not trust -# others with access to the host running redis-server. -# -# This should stay commented out for backward compatibility and because most -# people do not need auth (e.g. they run their own servers). -# -# Warning: since Redis is pretty fast an outside user can try up to -# 150k passwords per second against a good box. This means that you should -# use a very strong password otherwise it will be very easy to break. -# -# requirepass foobared - -# Command renaming. -# -# It is possible to change the name of dangerous commands in a shared -# environment. For instance the CONFIG command may be renamed into something -# hard to guess so that it will still be available for internal-use tools -# but not available for general clients. -# -# Example: -# -# rename-command CONFIG b840fc02d524045429941cc15f59e41cb7be6c52 -# -# It is also possible to completely kill a command by renaming it into -# an empty string: -# -# rename-command CONFIG "" -# -# Please note that changing the name of commands that are logged into the -# AOF file or transmitted to slaves may cause problems. - -################################### CLIENTS #################################### - -# Set the max number of connected clients at the same time. By default -# this limit is set to 10000 clients, however if the Redis server is not -# able to configure the process file limit to allow for the specified limit -# the max number of allowed clients is set to the current file limit -# minus 32 (as Redis reserves a few file descriptors for internal uses). -# -# Once the limit is reached Redis will close all the new connections sending -# an error 'max number of clients reached'. -# -# maxclients 10000 - -############################## MEMORY MANAGEMENT ################################ - -# Set a memory usage limit to the specified amount of bytes. -# When the memory limit is reached Redis will try to remove keys -# according to the eviction policy selected (see maxmemory-policy). -# -# If Redis can't remove keys according to the policy, or if the policy is -# set to 'noeviction', Redis will start to reply with errors to commands -# that would use more memory, like SET, LPUSH, and so on, and will continue -# to reply to read-only commands like GET. -# -# This option is usually useful when using Redis as an LRU or LFU cache, or to -# set a hard memory limit for an instance (using the 'noeviction' policy). -# -# WARNING: If you have slaves attached to an instance with maxmemory on, -# the size of the output buffers needed to feed the slaves are subtracted -# from the used memory count, so that network problems / resyncs will -# not trigger a loop where keys are evicted, and in turn the output -# buffer of slaves is full with DELs of keys evicted triggering the deletion -# of more keys, and so forth until the database is completely emptied. -# -# In short... if you have slaves attached it is suggested that you set a lower -# limit for maxmemory so that there is some free RAM on the system for slave -# output buffers (but this is not needed if the policy is 'noeviction'). -# -# maxmemory - -# MAXMEMORY POLICY: how Redis will select what to remove when maxmemory -# is reached. You can select among five behaviors: -# -# volatile-lru -> Evict using approximated LRU among the keys with an expire set. -# allkeys-lru -> Evict any key using approximated LRU. -# volatile-lfu -> Evict using approximated LFU among the keys with an expire set. -# allkeys-lfu -> Evict any key using approximated LFU. -# volatile-random -> Remove a random key among the ones with an expire set. -# allkeys-random -> Remove a random key, any key. -# volatile-ttl -> Remove the key with the nearest expire time (minor TTL) -# noeviction -> Don't evict anything, just return an error on write operations. -# -# LRU means Least Recently Used -# LFU means Least Frequently Used -# -# Both LRU, LFU and volatile-ttl are implemented using approximated -# randomized algorithms. -# -# Note: with any of the above policies, Redis will return an error on write -# operations, when there are no suitable keys for eviction. -# -# At the date of writing these commands are: set setnx setex append -# incr decr rpush lpush rpushx lpushx linsert lset rpoplpush sadd -# sinter sinterstore sunion sunionstore sdiff sdiffstore zadd zincrby -# zunionstore zinterstore hset hsetnx hmset hincrby incrby decrby -# getset mset msetnx exec sort -# -# The default is: -# -# maxmemory-policy noeviction - -# LRU, LFU and minimal TTL algorithms are not precise algorithms but approximated -# algorithms (in order to save memory), so you can tune it for speed or -# accuracy. For default Redis will check five keys and pick the one that was -# used less recently, you can change the sample size using the following -# configuration directive. -# -# The default of 5 produces good enough results. 10 Approximates very closely -# true LRU but costs more CPU. 3 is faster but not very accurate. -# -# maxmemory-samples 5 - -############################# LAZY FREEING #################################### - -# Redis has two primitives to delete keys. One is called DEL and is a blocking -# deletion of the object. It means that the server stops processing new commands -# in order to reclaim all the memory associated with an object in a synchronous -# way. If the key deleted is associated with a small object, the time needed -# in order to execute the DEL command is very small and comparable to most other -# O(1) or O(log_N) commands in Redis. However if the key is associated with an -# aggregated value containing millions of elements, the server can block for -# a long time (even seconds) in order to complete the operation. -# -# For the above reasons Redis also offers non blocking deletion primitives -# such as UNLINK (non blocking DEL) and the ASYNC option of FLUSHALL and -# FLUSHDB commands, in order to reclaim memory in background. Those commands -# are executed in constant time. Another thread will incrementally free the -# object in the background as fast as possible. -# -# DEL, UNLINK and ASYNC option of FLUSHALL and FLUSHDB are user-controlled. -# It's up to the design of the application to understand when it is a good -# idea to use one or the other. However the Redis server sometimes has to -# delete keys or flush the whole database as a side effect of other operations. -# Specifically Redis deletes objects independently of a user call in the -# following scenarios: -# -# 1) On eviction, because of the maxmemory and maxmemory policy configurations, -# in order to make room for new data, without going over the specified -# memory limit. -# 2) Because of expire: when a key with an associated time to live (see the -# EXPIRE command) must be deleted from memory. -# 3) Because of a side effect of a command that stores data on a key that may -# already exist. For example the RENAME command may delete the old key -# content when it is replaced with another one. Similarly SUNIONSTORE -# or SORT with STORE option may delete existing keys. The SET command -# itself removes any old content of the specified key in order to replace -# it with the specified string. -# 4) During replication, when a slave performs a full resynchronization with -# its master, the content of the whole database is removed in order to -# load the RDB file just transfered. -# -# In all the above cases the default is to delete objects in a blocking way, -# like if DEL was called. However you can configure each case specifically -# in order to instead release memory in a non-blocking way like if UNLINK -# was called, using the following configuration directives: - -lazyfree-lazy-eviction no -lazyfree-lazy-expire no -lazyfree-lazy-server-del no -slave-lazy-flush no - -############################## APPEND ONLY MODE ############################### - -# By default Redis asynchronously dumps the dataset on disk. This mode is -# good enough in many applications, but an issue with the Redis process or -# a power outage may result into a few minutes of writes lost (depending on -# the configured save points). -# -# The Append Only File is an alternative persistence mode that provides -# much better durability. For instance using the default data fsync policy -# (see later in the config file) Redis can lose just one second of writes in a -# dramatic event like a server power outage, or a single write if something -# wrong with the Redis process itself happens, but the operating system is -# still running correctly. -# -# AOF and RDB persistence can be enabled at the same time without problems. -# If the AOF is enabled on startup Redis will load the AOF, that is the file -# with the better durability guarantees. -# -# Please check http://redis.io/topics/persistence for more information. - -appendonly no - -# The name of the append only file (default: "appendonly.aof") - -appendfilename "appendonly.aof" - -# The fsync() call tells the Operating System to actually write data on disk -# instead of waiting for more data in the output buffer. Some OS will really flush -# data on disk, some other OS will just try to do it ASAP. -# -# Redis supports three different modes: -# -# no: don't fsync, just let the OS flush the data when it wants. Faster. -# always: fsync after every write to the append only log. Slow, Safest. -# everysec: fsync only one time every second. Compromise. -# -# The default is "everysec", as that's usually the right compromise between -# speed and data safety. It's up to you to understand if you can relax this to -# "no" that will let the operating system flush the output buffer when -# it wants, for better performances (but if you can live with the idea of -# some data loss consider the default persistence mode that's snapshotting), -# or on the contrary, use "always" that's very slow but a bit safer than -# everysec. -# -# More details please check the following article: -# http://antirez.com/post/redis-persistence-demystified.html -# -# If unsure, use "everysec". - -# appendfsync always -appendfsync everysec -# appendfsync no - -# When the AOF fsync policy is set to always or everysec, and a background -# saving process (a background save or AOF log background rewriting) is -# performing a lot of I/O against the disk, in some Linux configurations -# Redis may block too long on the fsync() call. Note that there is no fix for -# this currently, as even performing fsync in a different thread will block -# our synchronous write(2) call. -# -# In order to mitigate this problem it's possible to use the following option -# that will prevent fsync() from being called in the main process while a -# BGSAVE or BGREWRITEAOF is in progress. -# -# This means that while another child is saving, the durability of Redis is -# the same as "appendfsync none". In practical terms, this means that it is -# possible to lose up to 30 seconds of log in the worst scenario (with the -# default Linux settings). -# -# If you have latency problems turn this to "yes". Otherwise leave it as -# "no" that is the safest pick from the point of view of durability. - -no-appendfsync-on-rewrite no - -# Automatic rewrite of the append only file. -# Redis is able to automatically rewrite the log file implicitly calling -# BGREWRITEAOF when the AOF log size grows by the specified percentage. -# -# This is how it works: Redis remembers the size of the AOF file after the -# latest rewrite (if no rewrite has happened since the restart, the size of -# the AOF at startup is used). -# -# This base size is compared to the current size. If the current size is -# bigger than the specified percentage, the rewrite is triggered. Also -# you need to specify a minimal size for the AOF file to be rewritten, this -# is useful to avoid rewriting the AOF file even if the percentage increase -# is reached but it is still pretty small. -# -# Specify a percentage of zero in order to disable the automatic AOF -# rewrite feature. - -auto-aof-rewrite-percentage 100 -auto-aof-rewrite-min-size 64mb - -# An AOF file may be found to be truncated at the end during the Redis -# startup process, when the AOF data gets loaded back into memory. -# This may happen when the system where Redis is running -# crashes, especially when an ext4 filesystem is mounted without the -# data=ordered option (however this can't happen when Redis itself -# crashes or aborts but the operating system still works correctly). -# -# Redis can either exit with an error when this happens, or load as much -# data as possible (the default now) and start if the AOF file is found -# to be truncated at the end. The following option controls this behavior. -# -# If aof-load-truncated is set to yes, a truncated AOF file is loaded and -# the Redis server starts emitting a log to inform the user of the event. -# Otherwise if the option is set to no, the server aborts with an error -# and refuses to start. When the option is set to no, the user requires -# to fix the AOF file using the "redis-check-aof" utility before to restart -# the server. -# -# Note that if the AOF file will be found to be corrupted in the middle -# the server will still exit with an error. This option only applies when -# Redis will try to read more data from the AOF file but not enough bytes -# will be found. -aof-load-truncated yes - -# When rewriting the AOF file, Redis is able to use an RDB preamble in the -# AOF file for faster rewrites and recoveries. When this option is turned -# on the rewritten AOF file is composed of two different stanzas: -# -# [RDB file][AOF tail] -# -# When loading Redis recognizes that the AOF file starts with the "REDIS" -# string and loads the prefixed RDB file, and continues loading the AOF -# tail. -# -# This is currently turned off by default in order to avoid the surprise -# of a format change, but will at some point be used as the default. -aof-use-rdb-preamble no - -################################ LUA SCRIPTING ############################### - -# Max execution time of a Lua script in milliseconds. -# -# If the maximum execution time is reached Redis will log that a script is -# still in execution after the maximum allowed time and will start to -# reply to queries with an error. -# -# When a long running script exceeds the maximum execution time only the -# SCRIPT KILL and SHUTDOWN NOSAVE commands are available. The first can be -# used to stop a script that did not yet called write commands. The second -# is the only way to shut down the server in the case a write command was -# already issued by the script but the user doesn't want to wait for the natural -# termination of the script. -# -# Set it to 0 or a negative value for unlimited execution without warnings. -lua-time-limit 5000 - -################################ REDIS CLUSTER ############################### -# -# ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ -# WARNING EXPERIMENTAL: Redis Cluster is considered to be stable code, however -# in order to mark it as "mature" we need to wait for a non trivial percentage -# of users to deploy it in production. -# ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ -# -# Normal Redis instances can't be part of a Redis Cluster; only nodes that are -# started as cluster nodes can. In order to start a Redis instance as a -# cluster node enable the cluster support uncommenting the following: -# -# cluster-enabled yes - -# Every cluster node has a cluster configuration file. This file is not -# intended to be edited by hand. It is created and updated by Redis nodes. -# Every Redis Cluster node requires a different cluster configuration file. -# Make sure that instances running in the same system do not have -# overlapping cluster configuration file names. -# -# cluster-config-file nodes-6379.conf - -# Cluster node timeout is the amount of milliseconds a node must be unreachable -# for it to be considered in failure state. -# Most other internal time limits are multiple of the node timeout. -# -# cluster-node-timeout 15000 - -# A slave of a failing master will avoid to start a failover if its data -# looks too old. -# -# There is no simple way for a slave to actually have an exact measure of -# its "data age", so the following two checks are performed: -# -# 1) If there are multiple slaves able to failover, they exchange messages -# in order to try to give an advantage to the slave with the best -# replication offset (more data from the master processed). -# Slaves will try to get their rank by offset, and apply to the start -# of the failover a delay proportional to their rank. -# -# 2) Every single slave computes the time of the last interaction with -# its master. This can be the last ping or command received (if the master -# is still in the "connected" state), or the time that elapsed since the -# disconnection with the master (if the replication link is currently down). -# If the last interaction is too old, the slave will not try to failover -# at all. -# -# The point "2" can be tuned by user. Specifically a slave will not perform -# the failover if, since the last interaction with the master, the time -# elapsed is greater than: -# -# (node-timeout * slave-validity-factor) + repl-ping-slave-period -# -# So for example if node-timeout is 30 seconds, and the slave-validity-factor -# is 10, and assuming a default repl-ping-slave-period of 10 seconds, the -# slave will not try to failover if it was not able to talk with the master -# for longer than 310 seconds. -# -# A large slave-validity-factor may allow slaves with too old data to failover -# a master, while a too small value may prevent the cluster from being able to -# elect a slave at all. -# -# For maximum availability, it is possible to set the slave-validity-factor -# to a value of 0, which means, that slaves will always try to failover the -# master regardless of the last time they interacted with the master. -# (However they'll always try to apply a delay proportional to their -# offset rank). -# -# Zero is the only value able to guarantee that when all the partitions heal -# the cluster will always be able to continue. -# -# cluster-slave-validity-factor 10 - -# Cluster slaves are able to migrate to orphaned masters, that are masters -# that are left without working slaves. This improves the cluster ability -# to resist to failures as otherwise an orphaned master can't be failed over -# in case of failure if it has no working slaves. -# -# Slaves migrate to orphaned masters only if there are still at least a -# given number of other working slaves for their old master. This number -# is the "migration barrier". A migration barrier of 1 means that a slave -# will migrate only if there is at least 1 other working slave for its master -# and so forth. It usually reflects the number of slaves you want for every -# master in your cluster. -# -# Default is 1 (slaves migrate only if their masters remain with at least -# one slave). To disable migration just set it to a very large value. -# A value of 0 can be set but is useful only for debugging and dangerous -# in production. -# -# cluster-migration-barrier 1 - -# By default Redis Cluster nodes stop accepting queries if they detect there -# is at least an hash slot uncovered (no available node is serving it). -# This way if the cluster is partially down (for example a range of hash slots -# are no longer covered) all the cluster becomes, eventually, unavailable. -# It automatically returns available as soon as all the slots are covered again. -# -# However sometimes you want the subset of the cluster which is working, -# to continue to accept queries for the part of the key space that is still -# covered. In order to do so, just set the cluster-require-full-coverage -# option to no. -# -# cluster-require-full-coverage yes - -# This option, when set to yes, prevents slaves from trying to failover its -# master during master failures. However the master can still perform a -# manual failover, if forced to do so. -# -# This is useful in different scenarios, especially in the case of multiple -# data center operations, where we want one side to never be promoted if not -# in the case of a total DC failure. -# -# cluster-slave-no-failover no - -# In order to setup your cluster make sure to read the documentation -# available at http://redis.io web site. - -########################## CLUSTER DOCKER/NAT support ######################## - -# In certain deployments, Redis Cluster nodes address discovery fails, because -# addresses are NAT-ted or because ports are forwarded (the typical case is -# Docker and other containers). -# -# In order to make Redis Cluster working in such environments, a static -# configuration where each node knows its public address is needed. The -# following two options are used for this scope, and are: -# -# * cluster-announce-ip -# * cluster-announce-port -# * cluster-announce-bus-port -# -# Each instruct the node about its address, client port, and cluster message -# bus port. The information is then published in the header of the bus packets -# so that other nodes will be able to correctly map the address of the node -# publishing the information. -# -# If the above options are not used, the normal Redis Cluster auto-detection -# will be used instead. -# -# Note that when remapped, the bus port may not be at the fixed offset of -# clients port + 10000, so you can specify any port and bus-port depending -# on how they get remapped. If the bus-port is not set, a fixed offset of -# 10000 will be used as usually. -# -# Example: -# -# cluster-announce-ip 10.1.1.5 -# cluster-announce-port 6379 -# cluster-announce-bus-port 6380 - -################################## SLOW LOG ################################### - -# The Redis Slow Log is a system to log queries that exceeded a specified -# execution time. The execution time does not include the I/O operations -# like talking with the client, sending the reply and so forth, -# but just the time needed to actually execute the command (this is the only -# stage of command execution where the thread is blocked and can not serve -# other requests in the meantime). -# -# You can configure the slow log with two parameters: one tells Redis -# what is the execution time, in microseconds, to exceed in order for the -# command to get logged, and the other parameter is the length of the -# slow log. When a new command is logged the oldest one is removed from the -# queue of logged commands. - -# The following time is expressed in microseconds, so 1000000 is equivalent -# to one second. Note that a negative number disables the slow log, while -# a value of zero forces the logging of every command. -slowlog-log-slower-than 10000 - -# There is no limit to this length. Just be aware that it will consume memory. -# You can reclaim memory used by the slow log with SLOWLOG RESET. -slowlog-max-len 128 - -################################ LATENCY MONITOR ############################## - -# The Redis latency monitoring subsystem samples different operations -# at runtime in order to collect data related to possible sources of -# latency of a Redis instance. -# -# Via the LATENCY command this information is available to the user that can -# print graphs and obtain reports. -# -# The system only logs operations that were performed in a time equal or -# greater than the amount of milliseconds specified via the -# latency-monitor-threshold configuration directive. When its value is set -# to zero, the latency monitor is turned off. -# -# By default latency monitoring is disabled since it is mostly not needed -# if you don't have latency issues, and collecting data has a performance -# impact, that while very small, can be measured under big load. Latency -# monitoring can easily be enabled at runtime using the command -# "CONFIG SET latency-monitor-threshold " if needed. -latency-monitor-threshold 0 - -############################# EVENT NOTIFICATION ############################## - -# Redis can notify Pub/Sub clients about events happening in the key space. -# This feature is documented at http://redis.io/topics/notifications -# -# For instance if keyspace events notification is enabled, and a client -# performs a DEL operation on key "foo" stored in the Database 0, two -# messages will be published via Pub/Sub: -# -# PUBLISH __keyspace@0__:foo del -# PUBLISH __keyevent@0__:del foo -# -# It is possible to select the events that Redis will notify among a set -# of classes. Every class is identified by a single character: -# -# K Keyspace events, published with __keyspace@__ prefix. -# E Keyevent events, published with __keyevent@__ prefix. -# g Generic commands (non-type specific) like DEL, EXPIRE, RENAME, ... -# $ String commands -# l List commands -# s Set commands -# h Hash commands -# z Sorted set commands -# x Expired events (events generated every time a key expires) -# e Evicted events (events generated when a key is evicted for maxmemory) -# A Alias for g$lshzxe, so that the "AKE" string means all the events. -# -# The "notify-keyspace-events" takes as argument a string that is composed -# of zero or multiple characters. The empty string means that notifications -# are disabled. -# -# Example: to enable list and generic events, from the point of view of the -# event name, use: -# -# notify-keyspace-events Elg -# -# Example 2: to get the stream of the expired keys subscribing to channel -# name __keyevent@0__:expired use: -# -# notify-keyspace-events Ex -# -# By default all notifications are disabled because most users don't need -# this feature and the feature has some overhead. Note that if you don't -# specify at least one of K or E, no events will be delivered. -notify-keyspace-events "" - -############################### ADVANCED CONFIG ############################### - -# Hashes are encoded using a memory efficient data structure when they have a -# small number of entries, and the biggest entry does not exceed a given -# threshold. These thresholds can be configured using the following directives. -hash-max-ziplist-entries 512 -hash-max-ziplist-value 64 - -# Lists are also encoded in a special way to save a lot of space. -# The number of entries allowed per internal list node can be specified -# as a fixed maximum size or a maximum number of elements. -# For a fixed maximum size, use -5 through -1, meaning: -# -5: max size: 64 Kb <-- not recommended for normal workloads -# -4: max size: 32 Kb <-- not recommended -# -3: max size: 16 Kb <-- probably not recommended -# -2: max size: 8 Kb <-- good -# -1: max size: 4 Kb <-- good -# Positive numbers mean store up to _exactly_ that number of elements -# per list node. -# The highest performing option is usually -2 (8 Kb size) or -1 (4 Kb size), -# but if your use case is unique, adjust the settings as necessary. -list-max-ziplist-size -2 - -# Lists may also be compressed. -# Compress depth is the number of quicklist ziplist nodes from *each* side of -# the list to *exclude* from compression. The head and tail of the list -# are always uncompressed for fast push/pop operations. Settings are: -# 0: disable all list compression -# 1: depth 1 means "don't start compressing until after 1 node into the list, -# going from either the head or tail" -# So: [head]->node->node->...->node->[tail] -# [head], [tail] will always be uncompressed; inner nodes will compress. -# 2: [head]->[next]->node->node->...->node->[prev]->[tail] -# 2 here means: don't compress head or head->next or tail->prev or tail, -# but compress all nodes between them. -# 3: [head]->[next]->[next]->node->node->...->node->[prev]->[prev]->[tail] -# etc. -list-compress-depth 0 - -# Sets have a special encoding in just one case: when a set is composed -# of just strings that happen to be integers in radix 10 in the range -# of 64 bit signed integers. -# The following configuration setting sets the limit in the size of the -# set in order to use this special memory saving encoding. -set-max-intset-entries 512 - -# Similarly to hashes and lists, sorted sets are also specially encoded in -# order to save a lot of space. This encoding is only used when the length and -# elements of a sorted set are below the following limits: -zset-max-ziplist-entries 128 -zset-max-ziplist-value 64 - -# HyperLogLog sparse representation bytes limit. The limit includes the -# 16 bytes header. When an HyperLogLog using the sparse representation crosses -# this limit, it is converted into the dense representation. -# -# A value greater than 16000 is totally useless, since at that point the -# dense representation is more memory efficient. -# -# The suggested value is ~ 3000 in order to have the benefits of -# the space efficient encoding without slowing down too much PFADD, -# which is O(N) with the sparse encoding. The value can be raised to -# ~ 10000 when CPU is not a concern, but space is, and the data set is -# composed of many HyperLogLogs with cardinality in the 0 - 15000 range. -hll-sparse-max-bytes 3000 - -# Active rehashing uses 1 millisecond every 100 milliseconds of CPU time in -# order to help rehashing the main Redis hash table (the one mapping top-level -# keys to values). The hash table implementation Redis uses (see dict.c) -# performs a lazy rehashing: the more operation you run into a hash table -# that is rehashing, the more rehashing "steps" are performed, so if the -# server is idle the rehashing is never complete and some more memory is used -# by the hash table. -# -# The default is to use this millisecond 10 times every second in order to -# actively rehash the main dictionaries, freeing memory when possible. -# -# If unsure: -# use "activerehashing no" if you have hard latency requirements and it is -# not a good thing in your environment that Redis can reply from time to time -# to queries with 2 milliseconds delay. -# -# use "activerehashing yes" if you don't have such hard requirements but -# want to free memory asap when possible. -activerehashing yes - -# The client output buffer limits can be used to force disconnection of clients -# that are not reading data from the server fast enough for some reason (a -# common reason is that a Pub/Sub client can't consume messages as fast as the -# publisher can produce them). -# -# The limit can be set differently for the three different classes of clients: -# -# normal -> normal clients including MONITOR clients -# slave -> slave clients -# pubsub -> clients subscribed to at least one pubsub channel or pattern -# -# The syntax of every client-output-buffer-limit directive is the following: -# -# client-output-buffer-limit -# -# A client is immediately disconnected once the hard limit is reached, or if -# the soft limit is reached and remains reached for the specified number of -# seconds (continuously). -# So for instance if the hard limit is 32 megabytes and the soft limit is -# 16 megabytes / 10 seconds, the client will get disconnected immediately -# if the size of the output buffers reach 32 megabytes, but will also get -# disconnected if the client reaches 16 megabytes and continuously overcomes -# the limit for 10 seconds. -# -# By default normal clients are not limited because they don't receive data -# without asking (in a push way), but just after a request, so only -# asynchronous clients may create a scenario where data is requested faster -# than it can read. -# -# Instead there is a default limit for pubsub and slave clients, since -# subscribers and slaves receive data in a push fashion. -# -# Both the hard or the soft limit can be disabled by setting them to zero. -client-output-buffer-limit normal 0 0 0 -client-output-buffer-limit slave 256mb 64mb 60 -client-output-buffer-limit pubsub 32mb 8mb 60 - -# Client query buffers accumulate new commands. They are limited to a fixed -# amount by default in order to avoid that a protocol desynchronization (for -# instance due to a bug in the client) will lead to unbound memory usage in -# the query buffer. However you can configure it here if you have very special -# needs, such us huge multi/exec requests or alike. -# -# client-query-buffer-limit 1gb - -# In the Redis protocol, bulk requests, that are, elements representing single -# strings, are normally limited ot 512 mb. However you can change this limit -# here. -# -# proto-max-bulk-len 512mb - -# Redis calls an internal function to perform many background tasks, like -# closing connections of clients in timeout, purging expired keys that are -# never requested, and so forth. -# -# Not all tasks are performed with the same frequency, but Redis checks for -# tasks to perform according to the specified "hz" value. -# -# By default "hz" is set to 10. Raising the value will use more CPU when -# Redis is idle, but at the same time will make Redis more responsive when -# there are many keys expiring at the same time, and timeouts may be -# handled with more precision. -# -# The range is between 1 and 500, however a value over 100 is usually not -# a good idea. Most users should use the default of 10 and raise this up to -# 100 only in environments where very low latency is required. -hz 10 - -# When a child rewrites the AOF file, if the following option is enabled -# the file will be fsync-ed every 32 MB of data generated. This is useful -# in order to commit the file to the disk more incrementally and avoid -# big latency spikes. -aof-rewrite-incremental-fsync yes - -# Redis LFU eviction (see maxmemory setting) can be tuned. However it is a good -# idea to start with the default settings and only change them after investigating -# how to improve the performances and how the keys LFU change over time, which -# is possible to inspect via the OBJECT FREQ command. -# -# There are two tunable parameters in the Redis LFU implementation: the -# counter logarithm factor and the counter decay time. It is important to -# understand what the two parameters mean before changing them. -# -# The LFU counter is just 8 bits per key, it's maximum value is 255, so Redis -# uses a probabilistic increment with logarithmic behavior. Given the value -# of the old counter, when a key is accessed, the counter is incremented in -# this way: -# -# 1. A random number R between 0 and 1 is extracted. -# 2. A probability P is calculated as 1/(old_value*lfu_log_factor+1). -# 3. The counter is incremented only if R < P. -# -# The default lfu-log-factor is 10. This is a table of how the frequency -# counter changes with a different number of accesses with different -# logarithmic factors: -# -# +--------+------------+------------+------------+------------+------------+ -# | factor | 100 hits | 1000 hits | 100K hits | 1M hits | 10M hits | -# +--------+------------+------------+------------+------------+------------+ -# | 0 | 104 | 255 | 255 | 255 | 255 | -# +--------+------------+------------+------------+------------+------------+ -# | 1 | 18 | 49 | 255 | 255 | 255 | -# +--------+------------+------------+------------+------------+------------+ -# | 10 | 10 | 18 | 142 | 255 | 255 | -# +--------+------------+------------+------------+------------+------------+ -# | 100 | 8 | 11 | 49 | 143 | 255 | -# +--------+------------+------------+------------+------------+------------+ -# -# NOTE: The above table was obtained by running the following commands: -# -# redis-benchmark -n 1000000 incr foo -# redis-cli object freq foo -# -# NOTE 2: The counter initial value is 5 in order to give new objects a chance -# to accumulate hits. -# -# The counter decay time is the time, in minutes, that must elapse in order -# for the key counter to be divided by two (or decremented if it has a value -# less <= 10). -# -# The default value for the lfu-decay-time is 1. A Special value of 0 means to -# decay the counter every time it happens to be scanned. -# -# lfu-log-factor 10 -# lfu-decay-time 1 - -########################### ACTIVE DEFRAGMENTATION ####################### -# -# WARNING THIS FEATURE IS EXPERIMENTAL. However it was stress tested -# even in production and manually tested by multiple engineers for some -# time. -# -# What is active defragmentation? -# ------------------------------- -# -# Active (online) defragmentation allows a Redis server to compact the -# spaces left between small allocations and deallocations of data in memory, -# thus allowing to reclaim back memory. -# -# Fragmentation is a natural process that happens with every allocator (but -# less so with Jemalloc, fortunately) and certain workloads. Normally a server -# restart is needed in order to lower the fragmentation, or at least to flush -# away all the data and create it again. However thanks to this feature -# implemented by Oran Agra for Redis 4.0 this process can happen at runtime -# in an "hot" way, while the server is running. -# -# Basically when the fragmentation is over a certain level (see the -# configuration options below) Redis will start to create new copies of the -# values in contiguous memory regions by exploiting certain specific Jemalloc -# features (in order to understand if an allocation is causing fragmentation -# and to allocate it in a better place), and at the same time, will release the -# old copies of the data. This process, repeated incrementally for all the keys -# will cause the fragmentation to drop back to normal values. -# -# Important things to understand: -# -# 1. This feature is disabled by default, and only works if you compiled Redis -# to use the copy of Jemalloc we ship with the source code of Redis. -# This is the default with Linux builds. -# -# 2. You never need to enable this feature if you don't have fragmentation -# issues. -# -# 3. Once you experience fragmentation, you can enable this feature when -# needed with the command "CONFIG SET activedefrag yes". -# -# The configuration parameters are able to fine tune the behavior of the -# defragmentation process. If you are not sure about what they mean it is -# a good idea to leave the defaults untouched. - -# Enabled active defragmentation -# activedefrag yes - -# Minimum amount of fragmentation waste to start active defrag -# active-defrag-ignore-bytes 100mb - -# Minimum percentage of fragmentation to start active defrag -# active-defrag-threshold-lower 10 - -# Maximum percentage of fragmentation at which we use maximum effort -# active-defrag-threshold-upper 100 - -# Minimal effort for defrag in CPU percentage -# active-defrag-cycle-min 25 - -# Maximal effort for defrag in CPU percentage -# active-defrag-cycle-max 75 diff --git a/aoe-semantic-apis/src/app.ts b/aoe-semantic-apis/src/app.ts index 3417e9513..3127f42f3 100644 --- a/aoe-semantic-apis/src/app.ts +++ b/aoe-semantic-apis/src/app.ts @@ -5,6 +5,7 @@ import cron from 'node-cron'; import cors from 'cors'; import router from './routes'; +import healthRouter from './healthRoute'; import { client, updateRedis } from './util/redis.utils'; import { winstonLogger } from './util'; @@ -25,7 +26,8 @@ client.on('error', (error: any) => { winstonLogger.error(error); }); -client.on('connect', async () => { +client.on('ready', async () => { + winstonLogger.info('Pushing data to REDIS'); await updateRedis(); }); @@ -36,7 +38,8 @@ cron.schedule('0 0 3 * * 0', async () => { }); // Prefixed routes -app.use('/api/v1', router); +app.use('/ref/api/v1', router); +app.use('/', healthRouter); // Default error handler. app.use(((err, req: Request, res: Response, next: NextFunction) => { diff --git a/aoe-semantic-apis/src/config/index.ts b/aoe-semantic-apis/src/config/index.ts index 017a498a2..7691953e3 100644 --- a/aoe-semantic-apis/src/config/index.ts +++ b/aoe-semantic-apis/src/config/index.ts @@ -13,6 +13,7 @@ process.env.EXTERNAL_API_SUOMI_KOODISTOT || missingEnvs.push('EXTERNAL_API_SUOMI process.env.REDIS_HOST || missingEnvs.push('REDIS_HOST'); process.env.REDIS_PASS || missingEnvs.push('REDIS_PASS'); process.env.REDIS_PORT || missingEnvs.push('REDIS_PORT'); +process.env.REDIS_USE_TLS || missingEnvs.push('REDIS_USE_TLS'); if (missingEnvs.length > 0) { winstonLogger.error('All required environment variables are not available: %s', missingEnvs); @@ -41,6 +42,8 @@ export default { REDIS_OPTIONS: { host: process.env.REDIS_HOST as string, pass: process.env.REDIS_PASS as string, + username: process.env.REDIS_USERNAME as string, port: (parseInt(process.env.REDIS_PORT as string, 10) as number) || 6379, + protocol: process.env.REDIS_USE_TLS != 'true' ? 'redis' : 'rediss', }, } as any; diff --git a/aoe-semantic-apis/src/controllers/asiasanat.ts b/aoe-semantic-apis/src/controllers/asiasanat.ts index 7b7e53c12..d8c7e696c 100644 --- a/aoe-semantic-apis/src/controllers/asiasanat.ts +++ b/aoe-semantic-apis/src/controllers/asiasanat.ts @@ -18,12 +18,14 @@ const params = 'data'; * @returns {Promise} */ export async function setAsiasanat(): Promise { + winstonLogger.info('Getting asiasanat from API in setAsiasanat()'); const results: Record[] = await getDataFromApi( config.EXTERNAL_API.asiasanat || 'not-defined', `/${endpoint}/`, { Accept: 'application/rdf+xml' }, params, ); + winstonLogger.info('... API fetch done!'); if (!results || results?.length < 500) { winstonLogger.error('No data from api.finto.fi'); @@ -87,9 +89,13 @@ export async function setAsiasanat(): Promise { winstonLogger.error('Creating new sets of YSO asiasanat failed in setAsiasanat()'); return; } else { + winstonLogger.info('Pushing asiasanat to Redis...'); await setAsync(`${rediskey}.fi`, JSON.stringify(finnish)); + winstonLogger.info('... finnish done!'); await setAsync(`${rediskey}.en`, JSON.stringify(english)); + winstonLogger.info('... english done!'); await setAsync(`${rediskey}.sv`, JSON.stringify(swedish)); + winstonLogger.info('... swedish done! Finished!'); } } catch (err) { throw Error(err); diff --git a/aoe-semantic-apis/src/controllers/koulutusasteet.ts b/aoe-semantic-apis/src/controllers/koulutusasteet.ts index 0e6214218..a74aa50da 100644 --- a/aoe-semantic-apis/src/controllers/koulutusasteet.ts +++ b/aoe-semantic-apis/src/controllers/koulutusasteet.ts @@ -17,12 +17,14 @@ const params = 'codes/?format=json'; * @returns {Promise} */ export async function setKoulutusasteet(): Promise { + winstonLogger.info('Getting educational levels from API in setKoulutus()'); const results: Record[] = await getDataFromApi( config.EXTERNAL_API.suomiKoodistot, `/${endpoint}/`, { Accept: 'application/json' }, params, ); + winstonLogger.info('... API fetch done!'); if (!results || !(results as any).results || (results as any).results.length < 1) { winstonLogger.error('No data from koodistot.suomi.fi in setKoulutusasteet()'); diff --git a/aoe-semantic-apis/src/healthRoute.ts b/aoe-semantic-apis/src/healthRoute.ts new file mode 100644 index 000000000..d48131e90 --- /dev/null +++ b/aoe-semantic-apis/src/healthRoute.ts @@ -0,0 +1,9 @@ +import express, { Request, Response } from 'express'; + +const router = express.Router(); + +router.get('/health', (req: Request, res: Response) => { + res.status(200).send('OK'); +}); + +export default router; diff --git a/aoe-semantic-apis/src/util/redis.utils.ts b/aoe-semantic-apis/src/util/redis.utils.ts index 5f0bf5aad..7abb9a879 100644 --- a/aoe-semantic-apis/src/util/redis.utils.ts +++ b/aoe-semantic-apis/src/util/redis.utils.ts @@ -27,15 +27,35 @@ import { import { setLukionVanhatOppiaineetKurssit } from '../controllers/vanha-lukio'; import { setTuvaOppiaineetTavoitteet } from '../controllers/tuva'; -export const client = redis.createClient({ - host: config.REDIS_OPTIONS.host, - port: config.REDIS_OPTIONS.port, - password: config.REDIS_OPTIONS.pass, -}); +export const client = redis + .createClient({ + url: `${config.REDIS_OPTIONS.protocol}://${config.REDIS_OPTIONS.username}:${encodeURIComponent( + config.REDIS_OPTIONS.pass, + )}@${config.REDIS_OPTIONS.host}:${config.REDIS_OPTIONS.port}`, + }) + .on('ready', () => { + winstonLogger.info( + 'REDIS [%s://%s:%d] Connection is operable', + config.REDIS_OPTIONS.protocol, + config.REDIS_OPTIONS.host, + config.REDIS_OPTIONS.port, + ); + }) + .on('error', (err: Error): void => { + winstonLogger.error( + 'REDIS [%s://%s:%d] Error: %o', + config.REDIS_OPTIONS.protocol, + config.REDIS_OPTIONS.host, + config.REDIS_OPTIONS.port, + err, + ); + }); + export const getAsync = promisify(client.get).bind(client); export const setAsync = promisify(client.set).bind(client); export async function updateRedis(): Promise { + winstonLogger.info('Starting Redis update ...'); await setAsiasanat().catch((err) => winstonLogger.error('Setting YSO asiasanat failed in setAsiasanat(): %o', err)); await setKoulutusasteet().catch((err) => winstonLogger.error('Setting educational levels failed in setKoulutusasteet(): %o', err), @@ -111,4 +131,5 @@ export async function updateRedis(): Promise { err, ), ); + winstonLogger.info('... Redis update done!'); } diff --git a/aoe-streaming-app/.env.template b/aoe-streaming-app/.env.template index f43857ac4..1a7aa16f5 100644 --- a/aoe-streaming-app/.env.template +++ b/aoe-streaming-app/.env.template @@ -1,11 +1,12 @@ # Application LOG_LEVEL=debug +PORT=3001 NODE_ENV=development -PORT=3000 # Object Storage -STORAGE_BUCKET= +STORAGE_BUCKET=aoe STORAGE_KEY= STORAGE_REGION= STORAGE_SECRET= STORAGE_URL= + diff --git a/aoe-streaming-app/.gitignore b/aoe-streaming-app/.gitignore index beb25e4b5..c6c5ea78f 100644 --- a/aoe-streaming-app/.gitignore +++ b/aoe-streaming-app/.gitignore @@ -3,6 +3,8 @@ logs node_modules output -.env .idea .vscode + +# env file +.env diff --git a/aoe-streaming-app/.gitlab-ci.yml b/aoe-streaming-app/.gitlab-ci.yml deleted file mode 100644 index 4322102ef..000000000 --- a/aoe-streaming-app/.gitlab-ci.yml +++ /dev/null @@ -1,83 +0,0 @@ -variables: - DOCKER_HOST: unix:///var/run/docker.sock - DOCKER_DRIVER: overlay2 - -image: - name: docker/compose:1.29.2 - -services: - - name: docker:dind - alias: docker - -stages: - - build - - deploy - - cleanup - -before_script: - - docker version - - docker-compose version - -build_demo: - stage: build - script: - - echo "DEMO build" - - docker-compose -f docker-compose.prod.yml down - - docker-compose -f docker-compose.prod.yml build - only: - - dev - tags: - - demo-stream - -deploy_demo: - stage: deploy - script: - - echo "DEMO deploy" - - docker-compose -f docker-compose.prod.yml up -d - only: - - dev - tags: - - demo-stream - -cleanup_demo: - stage: cleanup - script: - - echo "DEMO system clean up" - - docker system prune -a --volumes -f - only: - - dev - tags: - - demo-stream - when: manual - -build_prod: - stage: build - script: - - echo "PROD build" - - docker-compose -f docker-compose.prod.yml down - - docker-compose -f docker-compose.prod.yml build - only: - - main - tags: - - prod-stream - -deploy_prod: - stage: deploy - script: - - echo "PROD deploy" - - docker-compose -f docker-compose.prod.yml up -d - only: - - main - tags: - - prod-stream - -cleanup_prod: - stage: cleanup - script: - - echo "PROD system clean up" - - docker system prune -a --volumes -f - only: - - main - tags: - - prod-stream - when: manual diff --git a/aoe-streaming-app/README.md b/aoe-streaming-app/README.md index 9ca2923ea..4c37869c8 100644 --- a/aoe-streaming-app/README.md +++ b/aoe-streaming-app/README.md @@ -1,13 +1,3 @@ -# [AOE - Library of Open Educational Resources](https://github.com/CSCfi/aoe) - -## Service Component Links in GitHub (mirrored) -- [aoe-data-analytics](https://github.com/CSCfi/aoe-data-analytics) -- [aoe-data-services](https://github.com/CSCfi/aoe-data-services) -- [aoe-semantic-apis](https://github.com/CSCfi/aoe-semantic-apis) -- aoe-streaming-app -- [aoe-web-backend](https://github.com/CSCfi/aoe-web-backend) -- [aoe-web-frontend](https://github.com/CSCfi/aoe-web-frontend) - # AOE Streaming Service Media Streaming Service for the Library of Open Educational Resources diff --git a/aoe-streaming-app/deploy-scripts/01-build.sh b/aoe-streaming-app/deploy-scripts/01-build.sh new file mode 100755 index 000000000..ae73dd30e --- /dev/null +++ b/aoe-streaming-app/deploy-scripts/01-build.sh @@ -0,0 +1,23 @@ +#!/usr/bin/env bash +set -o errexit -o nounset -o pipefail + +# shellcheck source=../scripts/common-functions.sh +source "$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )/../../scripts/common-functions.sh" + +# shellcheck source=./deploy-functions.sh +source "$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )/../../deploy-scripts/deploy-functions.sh" +source "$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )/../../scripts/build-functions.sh" + + +function main { + local aoe_service_name="aoe-streaming-app" + local service_image_tag="AOE_STREAMING_APP_TAG" + + cd "$repo" + + buildService "$aoe_service_name" "$service_image_tag" +} + +main + + diff --git a/aoe-streaming-app/deploy-scripts/02-push-image.sh b/aoe-streaming-app/deploy-scripts/02-push-image.sh new file mode 100755 index 000000000..0113cf2b4 --- /dev/null +++ b/aoe-streaming-app/deploy-scripts/02-push-image.sh @@ -0,0 +1,30 @@ +#!/usr/bin/env bash +set -o errexit -o nounset -o pipefail + +# shellcheck source=../scripts/common-functions.sh +source "$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )/../../scripts/common-functions.sh" + +# shellcheck source=./deploy-functions.sh +source "$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )/../../deploy-scripts/deploy-functions.sh" + +function main() { + setup + + local aoe_service_name="aoe-streaming-app" + local github_image_tag="$github_registry${aoe_service_name}:${IMAGE_TAG}" + + local ecr_registry="${REGISTRY}/$aoe_service_name" + local ecr_image_tag="${ecr_registry}:${IMAGE_TAG}" + upload_image_to_ecr "$github_image_tag" "$ecr_image_tag" +} + +function setup() { + cd "${repo}" + require_command docker + require_docker_compose + configure_aws_credentials + get_ecr_login_credentials +} + + +main "$@" diff --git a/aoe-streaming-app/docker-compose.prod.yml b/aoe-streaming-app/docker-compose.prod.yml deleted file mode 100644 index 4568312f4..000000000 --- a/aoe-streaming-app/docker-compose.prod.yml +++ /dev/null @@ -1,19 +0,0 @@ -version: "3.8" -services: - aoe-streaming-app: - build: - context: . - dockerfile: ./docker/Dockerfile - image: aoe-streaming-app:latest - container_name: aoe-streaming-app - restart: unless-stopped - ports: - - "3000:3000" - env_file: - - /environment/aoe-streaming-app/.env - networks: - - aoe-streaming-network - -networks: - aoe-streaming-network: - driver: bridge diff --git a/aoe-streaming-app/docker-compose.yml b/aoe-streaming-app/docker-compose.yml deleted file mode 100644 index 463e5cd5c..000000000 --- a/aoe-streaming-app/docker-compose.yml +++ /dev/null @@ -1,21 +0,0 @@ -version: "3.8" -services: - aoe-streaming-app: - build: - context: . - dockerfile: ./docker/Dockerfile - image: aoe-streaming-app:latest - container_name: aoe-streaming-app - restart: unless-stopped - ports: - - "3000:3000" - environment: - - NODE_ENV=development - env_file: - - ./.env - networks: - - aoe-streaming-network - -networks: - aoe-streaming-network: - driver: bridge diff --git a/aoe-streaming-app/src/app.ts b/aoe-streaming-app/src/app.ts index c7776307e..a42d6197b 100644 --- a/aoe-streaming-app/src/app.ts +++ b/aoe-streaming-app/src/app.ts @@ -37,7 +37,7 @@ app.use(morganHttpLogger); // Connected API versions and custom middlewares app.use('/', apiRouterRoot); -app.use('/api/v1', postHttpProcessor, apiRouterV1); +app.use('/stream/api/v1', postHttpProcessor, apiRouterV1); app.use('/favicon.ico', express.static('./views/favicon.ico')); // Default error handler diff --git a/aoe-streaming-app/src/server.ts b/aoe-streaming-app/src/server.ts index 2016fa980..76cb8406b 100644 --- a/aoe-streaming-app/src/server.ts +++ b/aoe-streaming-app/src/server.ts @@ -16,21 +16,12 @@ const server: Server = app.listen(port, () => { // Socket event handlers for the debugging purposes server.on('connection', (socket: Socket) => { - // winstonLogger.debug('SOCKET OPENED: ' + JSON.stringify(socket.address())); - // socket.on('end', () => console.log('SOCKET END: other end of the socket sends a FIN packet')); socket.on('timeout', () => { - // winstonLogger.debug('SOCKET TIMEOUT'); - // socket.destroy(); socket.end(); }); socket.on('error', () => { - // winstonLogger.error('SOCKET ERROR: %s', JSON.stringify(error)); - // socket.destroy(); socket.end(); }); - // socket.on('close', (isError: boolean) => { - // winstonLogger.debug('SOCKET CLOSED: ' + JSON.stringify({ isError: isError })); - // }); }); export default server; diff --git a/aoe-streaming-app/src/service/storage-service.ts b/aoe-streaming-app/src/service/storage-service.ts index 4f99db9e4..080bf32e8 100644 --- a/aoe-streaming-app/src/service/storage-service.ts +++ b/aoe-streaming-app/src/service/storage-service.ts @@ -4,14 +4,20 @@ import { ServiceConfigurationOptions } from 'aws-sdk/lib/service'; import { Request, Response } from 'express'; import { winstonLogger } from '../util'; +const isProd = process.env.NODE_ENV === 'production'; + // Cloud object storage configuration const configAWS: ServiceConfigurationOptions = { - credentials: { - accessKeyId: process.env.STORAGE_KEY as string, - secretAccessKey: process.env.STORAGE_SECRET as string, - }, - endpoint: process.env.STORAGE_URL as string, region: process.env.STORAGE_REGION as string, + ...(!isProd + ? { + endpoint: process.env.STORAGE_URL as string, + credentials: { + accessKeyId: process.env.STORAGE_KEY as string, + secretAccessKey: process.env.STORAGE_SECRET as string, + }, + } + : {}), }; const configS3: ClientConfiguration = { httpOptions: { @@ -76,7 +82,6 @@ export const getObjectAsStream = async (req: Request, res: Response): Promise { // Forward headers to the response - // res.set(headers); res.set({ 'Content-Length': headers['content-length'], 'Content-Range': headers['content-range'], @@ -112,8 +117,6 @@ export const getObjectAsStream = async (req: Request, res: Response): Promise { winstonLogger.debug( @@ -122,8 +125,6 @@ export const getObjectAsStream = async (req: Request, res: Response): Promise [express], Session & Partial [express-session], Passport [passport] and - * User [express], each from a different 3rd party JS source library. - * - * The directory of custom TypeScript declaration files must be added into "typeRoots" of tsconfig.json file and naming - * conventions must follow [@]types//index.d.ts. It is also possible to place a single index.d.ts file in - * the root of project's source tree like src/index.d.ts to declare custom types. - * - * For more information see https://www.typescriptlang.org/docs/handbook/declaration-files/introduction.html - */ -// declare module 'express-session' { -// interface SessionData { -// passport: Passport; -// } -// -// interface Passport { -// user: User; -// } -// -// interface User { -// uid: string; -// } -// } diff --git a/aoe-web-backend/test.env b/aoe-web-backend/.env.template similarity index 51% rename from aoe-web-backend/test.env rename to aoe-web-backend/.env.template index c33f62bd7..9954bff05 100644 --- a/aoe-web-backend/test.env +++ b/aoe-web-backend/.env.template @@ -1,138 +1,138 @@ -## Application Preferences -## production | development | localhost | test -NODE_ENV=test -LOG_LEVEL=debug -PORT_LISTEN=3000 -TEST_RUN=true - -## AOE server and service component general purpose configurations -SERVER_CONFIG_OAIPMH_ANALYTICS_URL=https://demo.aoe.fi/stat - -## Kafka Message Queue System -## Comma separated list of User-Agent identifiers (case-insensitive) -KAFKA_EXCLUDED_AGENT_IDENTIFIERS=oersi -KAFKA_BROKER_SERVERS=127.0.0.1:9092,127.0.0.1:9094,1270.0.0.1:9096 -KAFKA_BROKER_TOPIC_MATERIAL_ACTIVITY=material_activity -KAFKA_BROKER_TOPIC_SEARCH_REQUESTS=search_requests -KAFKA_CLIENT_ID=aoe-web-backend - -## Session and Cookie Management -SESSION_COOKIE_DOMAIN=localhost -SESSION_COOKIE_HTTP_ONLY=true -SESSION_COOKIE_MAX_AGE=86400000 -SESSION_COOKIE_PATH=/api -SESSION_COOKIE_SAME_SITE=lax -SESSION_COOKIE_SECURE=false -SESSION_OPTION_PROXY=false -SESSION_OPTION_RESAVE=true -SESSION_OPTION_ROLLING=false -SESSION_OPTION_SAVE_UNINITIALIZED=true -SESSION_SECRET=SessionPass4CITestRun - -HTTP_OPTIONS_TIMEOUT=5000 -HTTP_OPTIONS_RETRY=2 -HTTP_OPTIONS_CLOCK_TOLERANCE=5 -FILE_DOWNLOAD_URL=https://demo.aoe.fi/api/v1/download/file/ -THUMBNAIL_DOWNLOAD_URL=https://demo.aoe.fi/api/v1/thumbnail/ -COLLECTION_THUMBNAIL_DOWNLOAD_URL=https://demo.aoe.fi/api/v1/collection/thumbnail/ -MATERIAL_VERSION_URL=https://demo.aoe.fi/#/material/ - -# ALLAS_ENABLED relaced with CLOUD_STORAGE_ENABLED -CLOUD_STORAGE_ENABLED=0 -KAFKA_ENABLED=1 -LOGIN_ENABLED=0 - -THUMBNAIL_END_POINT=/app/thumbnail/ -FILE_SIZE_LIMIT=10737418240 -THUMBNAIL_FILE_SIZE_LIMIT=10485760 - -## Cloud Storage -CLOUD_STORAGE_ACCESS_KEY= -CLOUD_STORAGE_ACCESS_SECRET= -CLOUD_STORAGE_API= -CLOUD_STORAGE_REGION= -CLOUD_STORAGE_BUCKET= -CLOUD_STORAGE_BUCKET_PDF= -CLOUD_STORAGE_BUCKET_THUMBNAIL= - -MATERIAL_FILE_UPLOAD_FOLDER=uploads - -## AAI Connection -CLIENT_ID= -CLIENT_SECRET= -PROXY_URI= -REDIRECT_URI=https://demo.aoe.fi/api/secure/redirect -SUCCESS_REDIRECT_URI=/ -FAILURE_REDIRECT_URI=/api/login - -## Backup Preferences -BACK_UP_PATH=/databackup/testing/ -THUMBNAIL_BACK_UP_PATH=/databackup/aoethumbnailtest/ - -## Elasticsearch -# CREATE_ES_INDEX=1 -ES_NODE=http://demo.opmat-es.csc.fi:9200 -ES_INDEX=aoetest -ES_COLLECTION_INDEX=aoecollectiontest -ES_MAPPING_FILE=/app/aoemapping.json -ES_COLLECTION_MAPPING_FILE=/app/aoecollectionmapping.json -ES_SIZE_DEFAULT=1000 -ES_FROM_DEFAULT=0 - -## Email Preferences -BASE_URL=http://localhost:3000/api/v1/ -JWT_SECRET= -EMAIL_FROM= -TRANSPORT_AUTH_USER= -TRANSPORT_AUTH_HOST= -TRANSPORT_PORT=25 -SEND_EMAIL=0 -VERIFY_EMAIL_REDIRECT_URL=/ - -## H5P and HTML Content -HTML_BASE_URL=http://localhost:4200 -HTML_FOLDER=/webdata/htmlfolder - -H5P_JSON_CONFIGURATION=dist/services/config/h5p.json -H5P_PATH_LIBRARIES=/webdata/h5p/libraries -H5P_PATH_TEMPORARY_STORAGE=/webdata/h5p/temporary-storage -H5P_PATH_CONTENT=/webdata/h5p/content -H5P_PATH_CORE=/webdata/h5p/core -H5P_PATH_EDITOR=/webdata/h5p/editor -H5P_PLAY_API=http://localhost:3000/h5p/play/ -H5P_USER_EMAIL= - -## PDF Converter -CONVERSION_TO_PDF_API=https://localhost:3000/api/v1/pdf/content/ -CONVERSION_TO_PDF_ENABLED=0 - -## PID Service -PID_SERVICE_RUN_SCHEDULED=0 -PID_SERVICE_ENABLED=0 -PID_API_KEY= -PID_SERVICE_URL= - -## PosgreSQL Database -POSTGRESQL_HOST=localhost -POSTGRESQL_PORT=5432 -POSTGRESQL_DATA=aoe - -POSTGRES_USER= -POSTGRES_PASSWORD= - -PG_USER= -PG_PASS= - -## Redis Session Storage -REDIS_HOST=localhost -REDIS_PORT=6380 -REDIS_PASS= - -## Stream Redirect Criteria -STREAM_ENABLED=0 -STREAM_FILESIZE_MIN=100000 -STREAM_REDIRECT_URI=https://stream.demo.aoe.fi/api/v1/material/ - -### Provide a host name without schema (http/https) -STREAM_STATUS_HOST=stream.demo.aoe.fi -STREAM_STATUS_PATH=/api/v1/material/ +## Application Preferences +## production | development | localhost | test +NODE_ENV=development +LOG_LEVEL=debug +PORT_LISTEN=3000 +PORT=3000 + +NODE_TLS_REJECT_UNAUTHORIZED=0 + +## AOE server and service component general purpose configurations +SERVER_CONFIG_OAIPMH_ANALYTICS_URL=http://aoe-data-analytics:8080 + +## Kafka Message Queue System +## Comma separated list of User-Agent identifiers (case-insensitive) +KAFKA_EXCLUDED_AGENT_IDENTIFIERS=oersi +KAFKA_BROKER_SERVERS= +KAFKA_BROKER_TOPIC_MATERIAL_ACTIVITY=material_activity +KAFKA_BROKER_TOPIC_SEARCH_REQUESTS=search_requests +KAFKA_CLIENT_ID=aoe-web-backend +KAFKA_CLIENT_REGION=eu-west-1 + +## Session and Cookie Management +SESSION_COOKIE_DOMAIN=localhost:4200 +SESSION_COOKIE_HTTP_ONLY=true +SESSION_COOKIE_MAX_AGE=86400000 +SESSION_COOKIE_PATH=/api +SESSION_COOKIE_SAME_SITE=none +SESSION_COOKIE_SECURE=false +SESSION_OPTION_PROXY=false +SESSION_OPTION_RESAVE=false +SESSION_OPTION_ROLLING=false +SESSION_OPTION_SAVE_UNINITIALIZED=false +SESSION_SECRET= + +HTTP_OPTIONS_TIMEOUT=5000 +HTTP_OPTIONS_RETRY=2 +HTTP_OPTIONS_CLOCK_TOLERANCE=5 + +FILE_DOWNLOAD_URL=https://demo.aoe.fi/api/v1/download/file/ +THUMBNAIL_DOWNLOAD_URL=https://demo.aoe.fi/api/v1/thumbnail/ +COLLECTION_THUMBNAIL_DOWNLOAD_URL=https://demo.aoe.fi/api/v1/collection/thumbnail/ +MATERIAL_VERSION_URL=https://demo.aoe.fi/#/material/ + +CLOUD_STORAGE_ACCESS_KEY= +CLOUD_STORAGE_ACCESS_SECRET= +CLOUD_STORAGE_API=http://s3.localhost.localstack.cloud:4566 +CLOUD_STORAGE_ENABLED=1 +KAFKA_ENABLED=1 +LOGIN_ENABLED=1 + +FILE_SIZE_LIMIT=10737418240 + +## Cloud Storage +CLOUD_STORAGE_ACCESS_KEY= +CLOUD_STORAGE_ACCESS_SECRET= +CLOUD_STORAGE_API=http://s3.localhost.localstack.cloud:4566 +CLOUD_STORAGE_REGION=eu-west-1 +CLOUD_STORAGE_BUCKET=aoe +CLOUD_STORAGE_BUCKET_PDF=aoepdf +CLOUD_STORAGE_BUCKET_THUMBNAIL=aoethumbnail + +MATERIAL_FILE_UPLOAD_FOLDER=/app/uploads + +## AAI Connection +CLIENT_ID=aoe-client +CLIENT_SECRET= +PROXY_URI=http://aoe-oidc-server +REDIRECT_URI=https://demo.aoe.fi/api/secure/redirect +SUCCESS_REDIRECT_URI=/ +FAILURE_REDIRECT_URI=/api/login + +## Elasticsearch +CREATE_ES_INDEX=1 +ES_NODE=http://opensearch:9200 +ES_INDEX=aoe +ES_COLLECTION_INDEX=aoecollection +ES_MAPPING_FILE=/app/aoemapping.json +ES_COLLECTION_MAPPING_FILE=/app/aoecollectionmapping.json +ES_SIZE_DEFAULT=1000 +ES_FROM_DEFAULT=0 + +## Email Preferences +BASE_URL=http://localhost:3000/api/v1/ +JWT_SECRET= +EMAIL_FROM= +TRANSPORT_AUTH_USER= +TRANSPORT_AUTH_HOST= +TRANSPORT_PORT=25 +SEND_EMAIL=0 +VERIFY_EMAIL_REDIRECT_URL=/ + +## H5P and HTML Content +HTML_BASE_URL=https://demo.aoe.fi +HTML_FOLDER=/webdata/htmlfolder +H5P_JSON_CONFIGURATION=dist/services/config/h5p.json +H5P_PATH_LIBRARIES=/webdata/h5p/libraries +H5P_PATH_TEMPORARY_STORAGE=/webdata/h5p/temporary-storage +H5P_PATH_CONTENT=/webdata/h5p/content +H5P_PATH_CORE=/app/h5p +H5P_PATH_EDITOR=/webdata/h5p/editor +H5P_PLAY_API=https://demo.aoe.fi/h5p/play/ +H5P_USER_EMAIL= + +## PDF Converter +CONVERSION_TO_PDF_API=https://demo.aoe.fi/api/v1/pdf/content/ +CONVERSION_TO_PDF_ENABLED=1 + +## PID Service +PID_SERVICE_RUN_SCHEDULED=0 +PID_SERVICE_ENABLED=0 + +## PosgreSQL Database +POSTGRESQL_HOST= +POSTGRESQL_PORT= +POSTGRESQL_DATA= + +PG_USER= +PG_PASS= + +## Redis Session Storage +REDIS_HOST= +REDIS_PORT= +REDIS_PASS= +REDIS_USERNAME= +REDIS_USE_TLS=false + + +## Stream Redirect Criteria +STREAM_ENABLED=1 +STREAM_FILESIZE_MIN=100000 +STREAM_REDIRECT_URI=https://demo.aoe.fi/stream/api/v1/material/ + +### Provide a host name without schema (http/https) +STREAM_STATUS_HOST=nginx +STREAM_STATUS_PATH=/stream/api/v1/material/ +STREAM_STATUS_HOST_HTTPS_ENABLED=1 + + diff --git a/aoe-web-backend/.gitignore b/aoe-web-backend/.gitignore index fab211652..f828c78e1 100644 --- a/aoe-web-backend/.gitignore +++ b/aoe-web-backend/.gitignore @@ -1,12 +1,12 @@ # Build dist/ +# env file +.env + # Dependency Libraries node_modules/ -# Environment Variables -.env - # IDEs .idea/ .vscode/ diff --git a/aoe-web-backend/.gitlab-ci.yml b/aoe-web-backend/.gitlab-ci.yml deleted file mode 100644 index 19df9266d..000000000 --- a/aoe-web-backend/.gitlab-ci.yml +++ /dev/null @@ -1,104 +0,0 @@ -variables: - DOCKER_HOST: unix:///var/run/docker.sock # tcp://docker:2375 - DOCKER_DRIVER: overlay2 - # DOCKER_TLS_CERTDIR: '/certs' - # DOCKER_CERT_PATH: '$DOCKER_TLS_CERTDIR/client' - # DOCKER_TLS_VERIFY: 1 - -image: docker:24.0.6 - -stages: - - test - - build - - deploy - - cleanup - -test_test: - stage: test - image: node:18.18.2 - script: - - npm ci - - npm run test - cache: - paths: - - node_modules/ - only: - - test - tags: - - test-apps - -build_test: - stage: build - script: - - echo "TEST build" - - docker login -u $AOE_DOCKER_USER -p $AOE_DOCKER_PASS $AOE_DOCKER_REGISTRY - - docker compose -f docker-compose.prod.yml build - only: - - test - tags: - - test-apps - -deploy_test: - stage: deploy - script: - - echo "TEST deploy" - - docker compose -f docker-compose.prod.yml up -d - only: - - test - tags: - - test-apps - -cleanup_test: - stage: cleanup - script: - - echo "TEST system clean up" - - docker system prune -a -f --volumes - only: - - test - tags: - - test-apps - -test_prod: - stage: test - image: node:18.18.2 - script: - - npm ci - - npm run test - cache: - paths: - - node_modules/ - only: - - main - tags: - - prod - -build_prod: - stage: build - script: - - echo "PROD build" - - docker login -u $AOE_DOCKER_USER -p $AOE_DOCKER_PASS $AOE_DOCKER_REGISTRY - - docker compose -f docker-compose.prod2.yml build - only: - - main - tags: - - prod - -deploy_prod: - stage: deploy - script: - - echo "PROD deploy" - - docker compose -f docker-compose.prod2.yml up -d - only: - - main - tags: - - prod - -cleanup_prod: - stage: cleanup - script: - - echo "PROD system clean up" - - docker system prune -a -f --volumes - only: - - main - tags: - - prod diff --git a/aoe-web-backend/README.md b/aoe-web-backend/README.md index 320e7e24d..cbe335ef0 100644 --- a/aoe-web-backend/README.md +++ b/aoe-web-backend/README.md @@ -1,9 +1,3 @@ -# [AOE - Library of Open Educational Resources](https://github.com/CSCfi/aoe) +# AOE Web Backend service -## Service Component Links in GitHub (mirror) - -- [aoe-data-analytics](https://github.com/CSCfi/aoe-data-analytics) -- [aoe-data-services](https://github.com/CSCfi/aoe-data-services) -- [aoe-semantic-apis](https://github.com/CSCfi/aoe-semantic-apis) -- [aoe-streaming-app](https://github.com/CSCfi/aoe-streaming-app) -- aoe-web-backend +Web-backend Service for the Library of Open Educational Resources diff --git a/aoe-web-backend/cert/README.md b/aoe-web-backend/cert/README.md deleted file mode 100644 index a1583e7d2..000000000 --- a/aoe-web-backend/cert/README.md +++ /dev/null @@ -1 +0,0 @@ -Self-signed certificates for development purposes diff --git a/aoe-web-backend/cert/cert.crt b/aoe-web-backend/cert/cert.crt deleted file mode 100644 index 53f1484ab..000000000 --- a/aoe-web-backend/cert/cert.crt +++ /dev/null @@ -1,42 +0,0 @@ ------BEGIN CERTIFICATE----- -MIIDTTCCAjWgAwIBAgIGMTQ0Mzc1MA0GCSqGSIb3DQEBCwUAMF4xEDAOBgNVBAMT -B1Rlc3QgQ0ExCzAJBgNVBAYTAlVTMRMwEQYDVQQIEwpDYWxpZm9ybmlhMRYwFAYD -VQQHEw1TYW4gRnJhbmNpc2NvMRAwDgYDVQQKEwdUZXN0IENBMB4XDTIyMDQyNzA4 -NTIwNFoXDTIzMDQyNzA4NTIwNFowFDESMBAGA1UEAxMJbG9jYWxob3N0MIIBIjAN -BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA8HzWkbFihggQLEeTohTvgJTRAwBF -rDsBxbtX/tY9bMLrI1tdpDfYh/B5eGGy3GWDQhV8P2NY1lgCI++SFBVpHfRJtVjB -XXFdlxF7GSo1rCcbCEihmgbQgQ9w51M1fXfUybLCt3lQWXaAGfxDwjT8JND/ULq+ -4Q3Pn8kaSQdDy5Ig6vaMma/29c/jo+rFfLF+Ke5NrDa8POiOwRCtXlfQ9im8nfGa -Q2KCnt9VNCPsecltNB+BFtQ+YQCsMFJk+6v/oQ/X0ShwjJ5TCOsrQKMCXjIyhuiT -LGBM+qLgwXPdL2nV+5xkl1WTBomB1v6LHDJK+9OGVbALlOXzy20m330+PwIDAQAB -o1swWTAMBgNVHRMBAf8EAjAAMA4GA1UdDwEB/wQEAwIFoDAdBgNVHSUEFjAUBggr -BgEFBQcDAQYIKwYBBQUHAwIwGgYDVR0RBBMwEYIJbG9jYWxob3N0hwR/AAABMA0G -CSqGSIb3DQEBCwUAA4IBAQAmC7Nt3ms6VIhqT22PmStxtPBQFZtGyxWjajnuZIY8 -cNVufbBCpTA3YydTUT0TQBPWE94UFJAcGP+Oy4EWusvHc+6agy46WZCnH9Sb2rdA -e1IXzdcQ5l9SKvi9w87wQgpGKeKjkcyTuMyD6UmQsI2ymbdabnLN4Xi0zyDTdpWQ -Yc7NMf/QNA/GnRR1tTCrW5A0F63i4ssoVu1jTEFI0HVnGlvTz3o/NqFXz6ehlUzy -Ig8oSjV1HkjLJ1lL6RKnx2u8shthPjV6Gd5n+bbK2OZwQqqY3KyGo9Ct1C9jWnZL -Bj23Vcr2qtEZ4Xmca8hXeIdSyzbS8DPA5OAqrgbcjPj3 ------END CERTIFICATE----- - ------BEGIN CERTIFICATE----- -MIIDXjCCAkagAwIBAgIFNjk3ODYwDQYJKoZIhvcNAQELBQAwXjEQMA4GA1UEAxMH -VGVzdCBDQTELMAkGA1UEBhMCVVMxEzARBgNVBAgTCkNhbGlmb3JuaWExFjAUBgNV -BAcTDVNhbiBGcmFuY2lzY28xEDAOBgNVBAoTB1Rlc3QgQ0EwHhcNMjIwNDI3MDg1 -MDUxWhcNMjMwNDI3MDg1MDUxWjBeMRAwDgYDVQQDEwdUZXN0IENBMQswCQYDVQQG -EwJVUzETMBEGA1UECBMKQ2FsaWZvcm5pYTEWMBQGA1UEBxMNU2FuIEZyYW5jaXNj -bzEQMA4GA1UEChMHVGVzdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC -ggEBAK4r9m/lQUaPaHJ63OFi6LFZexB42WoyGlBjU+Mig8PqG2d8BnviC7h4j+/z -j8D966gZKDiJvnMN9ZFXMfsgERNZoPgiBnN1I0Ew9jFLOPN59skszshSz4zs4nLw -LfCNOYKdbR37LtqCYPfKqr9PVG9eoP9VO3kGChpMJlv4DBw0IehRIn3DH7qUoxYR -uwQU2A+jColUgd9XPs+R9ZBcan0vcw8+ktPDuX3rX62vxj/jU6n4JADSgJfEdaYK -USff9YgYSDfeFMcdVGNHWqlnZA8qY4dk3plbuM4q4WEgCptQzohYZDOiKfTZr0K9 -zTeO0bnuADJZYEmStBy2ueCL86UCAwEAAaMjMCEwDwYDVR0TAQH/BAUwAwEB/zAO -BgNVHQ8BAf8EBAMCAgQwDQYJKoZIhvcNAQELBQADggEBAHDMumddxPRE7B3IUCrz -bfwOhSmIYofT1rHXtpbrJGC0TdrgHLRhBBrApgoRjTiBDkrN18QNLU6tPHUCwkV/ -hzMCYQLKyH4UKdO39Lcb0+Q6kV1deOmkbaPhGKE4+tOqVOuueSl9hEfs9soSH55/ -We1WTBW7XvmuPh8Jz4sMzz78A9dYCZcpWWb6Cu2U6hrC7lBxWgqMhk/u0OuIc4EU -6GM7M3EOFbc6EK+PT8+gr0YMzM9wA/JzhTrqxpyAcwep/9IVW8zofQkUn3FkPW4x -K4EJ+aNrmUvSisyvMzeMNv8IgknU9v6sXsUvRSfNUyPC925tedlXfY7RYLf0AZED -hNw= ------END CERTIFICATE----- diff --git a/aoe-web-backend/cert/cert.key b/aoe-web-backend/cert/cert.key deleted file mode 100644 index 5dcecae93..000000000 --- a/aoe-web-backend/cert/cert.key +++ /dev/null @@ -1,27 +0,0 @@ ------BEGIN RSA PRIVATE KEY----- -MIIEpQIBAAKCAQEA8HzWkbFihggQLEeTohTvgJTRAwBFrDsBxbtX/tY9bMLrI1td -pDfYh/B5eGGy3GWDQhV8P2NY1lgCI++SFBVpHfRJtVjBXXFdlxF7GSo1rCcbCEih -mgbQgQ9w51M1fXfUybLCt3lQWXaAGfxDwjT8JND/ULq+4Q3Pn8kaSQdDy5Ig6vaM -ma/29c/jo+rFfLF+Ke5NrDa8POiOwRCtXlfQ9im8nfGaQ2KCnt9VNCPsecltNB+B -FtQ+YQCsMFJk+6v/oQ/X0ShwjJ5TCOsrQKMCXjIyhuiTLGBM+qLgwXPdL2nV+5xk -l1WTBomB1v6LHDJK+9OGVbALlOXzy20m330+PwIDAQABAoIBAQDMl7hH7JxAIN35 -fc9opdz4DCMhJXsgwnaIjRCYuhU50T4QL2ukCOrLA9ejhaKQBEC92N+CYfezKt5v -SiKBuZFbLXgDswWWhJq1qeaLCzDsS7yguIHbMw7xrXxFILIyEt9R1SX4x1mHMtxa -vxW6RC969t8vBCDeAEzxyDn3hRm1Ne8564BQofv4N0cZPOjoxgtEvpq/F98qAN75 -0cyzEWUG0kLbcudIqQPiaNIIlMEziCmgZ/dseB/Dn27Evc9B42Nj5RV27J+JD908 -1u3uFBU00iuR8LeskkIcwWY9lYeuiaUx70vCfXoefvCHSNRXpFfH+aVHu6uEeocp -/jwz/w+pAoGBAP8GcSMUSktXYYfMljADWgIznmHMBcW/UQ7HzLkn+osYrXtSQbry -GMXiKhorINlOOe6xvv97Wyo85IQWLCYfGZ56XclHtDDdKPfvA991NgAPURwswHu4 -OpjMn8z0RldrgphaLNYzfB+p/OFb8biKbG0EvKq7TG/1G4yJ9k765wttAoGBAPFo -K5w4UWe/BBUCkZTdYcTfL3BRxfq19QPxyXIAjSTEvrWlYPfl3j4lpyotiwfa9/NQ -mirVt+rG+YoWwhmZJs7kRcFKCk59aLf4dnS94qf4cTVon4clRkcKETlM62FoyHvA -TDUgmdnKCKTEov1jQyYzujyQow6SdhTvxlSs7VjbAoGBAKIQxfU0/8HBVgmmygrZ -Fmovw4cC0OhKHIj4D/Hapi2fESA9tefH0lZu07KtlcKDI+28p1RUYCsHZ3FJZeUT -/oPn35H6L1ieM1rcWem34wlGspTlG62D9jkEUMk+szT67ZNEOxokvcyhikLs20Ab -N6xQRlV5kanUah11z0GB0TJBAoGAPbtsw4MnVgdjzNeeUNoX4VdNc0SGAym6oSqY -X/3CGi/xsNO8q0fuAChyyfdhgT2+gEsRUXjbkj8Vmvlc41zlrqxzJg1/+zhYqgrw -h8PzDQ8cLBM4EEA463uokeWYn41DD1pKYp+WmDnw4emi4Hccuc0qh8hY2iYb6hiy -t/6Mp1cCgYEA84Rmq51vn/jBvkqPuQ4JGegZwDjQ2YWaYY17eaqBfrhF7Npyp4qI -ppeg04n+qLm6Ou2ykAza64aOD5uj0T1vYDanUahEK3Slfbi5EhIGzDX8U08t+mqM -ygGd3Toe4bXwU8nJVtx55kqZaj1E4NECUstUaT2MWeYWiNrpVdkTYRk= ------END RSA PRIVATE KEY----- diff --git a/aoe-web-backend/deploy-scripts/01-build.sh b/aoe-web-backend/deploy-scripts/01-build.sh new file mode 100755 index 000000000..8de17547a --- /dev/null +++ b/aoe-web-backend/deploy-scripts/01-build.sh @@ -0,0 +1,23 @@ +#!/usr/bin/env bash +set -o errexit -o nounset -o pipefail + +# shellcheck source=../scripts/common-functions.sh +source "$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )/../../scripts/common-functions.sh" + +# shellcheck source=./deploy-functions.sh +source "$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )/../../deploy-scripts/deploy-functions.sh" +source "$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )/../../scripts/build-functions.sh" + + +function main { + local aoe_service_name="aoe-web-backend" + local service_image_tag="AOE_WEB_BACKEND_TAG" + + cd "$repo" + + buildService "$aoe_service_name" "$service_image_tag" +} + +main + + diff --git a/aoe-web-backend/deploy-scripts/02-push-image.sh b/aoe-web-backend/deploy-scripts/02-push-image.sh new file mode 100755 index 000000000..f80a88f7f --- /dev/null +++ b/aoe-web-backend/deploy-scripts/02-push-image.sh @@ -0,0 +1,30 @@ +#!/usr/bin/env bash +set -o errexit -o nounset -o pipefail + +# shellcheck source=../scripts/common-functions.sh +source "$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )/../../scripts/common-functions.sh" + +# shellcheck source=./deploy-functions.sh +source "$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )/../../deploy-scripts/deploy-functions.sh" + +function main() { + setup + + local aoe_service_name="aoe-web-backend" + local github_image_tag="$github_registry${aoe_service_name}:${IMAGE_TAG}" + + local ecr_registry="${REGISTRY}/$aoe_service_name" + local ecr_image_tag="${ecr_registry}:${IMAGE_TAG}" + upload_image_to_ecr "$github_image_tag" "$ecr_image_tag" +} + +function setup() { + cd "${repo}" + require_command docker + require_docker_compose + configure_aws_credentials + get_ecr_login_credentials +} + + +main "$@" diff --git a/aoe-web-backend/docker-compose.prod.yml b/aoe-web-backend/docker-compose.prod.yml deleted file mode 100644 index 88d0f82ea..000000000 --- a/aoe-web-backend/docker-compose.prod.yml +++ /dev/null @@ -1,31 +0,0 @@ -services: - aoe-web-backend: - build: - context: . - dockerfile: ./docker/Dockerfile - args: - AOE_DOCKER_REGISTRY: "${AOE_DOCKER_REGISTRY}" - image: test-aoe-web-backend:latest - container_name: aoe-web-backend - restart: unless-stopped - ports: - - "3002:3000" - volumes: - - /data/thumbnail/:/app/thumbnail:rw - - /data/uploads/:/app/uploads:rw - - /webdata/:/webdata:rw - - /databackup/aoe/:/databackup/aoe/ - - /databackup/aoethumbnail/:/databackup/aoethumbnail - env_file: - - "${ENV_FILE:-/environment/aoe-web-backend/.env}" - environment: - PG_USER: "${POSTGRES_USER}" - PG_PASS: "${POSTGRES_PASSWORD}" - REDIS_PASS: "${REDIS_SESSION_PASSWORD}" - user: "root:5606" - networks: - - network-web-backend - -networks: - network-web-backend: - driver: bridge diff --git a/aoe-web-backend/docker-compose.prod2.yml b/aoe-web-backend/docker-compose.prod2.yml deleted file mode 100644 index d9592774c..000000000 --- a/aoe-web-backend/docker-compose.prod2.yml +++ /dev/null @@ -1,31 +0,0 @@ -version: "3.9" -services: - aoe-web-backend: - build: - context: . - dockerfile: ./docker/Dockerfile - args: - AOE_DOCKER_REGISTRY: "${AOE_DOCKER_REGISTRY}" - image: aoe-web-backend:latest - container_name: aoe-web-backend - restart: unless-stopped - ports: - - "3002:3000" - volumes: - - /data/thumbnail/:/app/thumbnail:rw - - /data/uploads/:/app/uploads:rw - - /webdata/:/webdata:rw - - /databackup/aoe/:/databackup/aoe/ - - /databackup/aoethumbnail/:/databackup/aoethumbnail - env_file: - - "${ENV_FILE:-/environment/aoe-web-backend/.env}" - environment: - PG_USER: ${POSTGRES_USER_SECONDARY} - PG_PASS: ${POSTGRES_PASSWORD_SECONDARY} - user: root:5606 - networks: - - network-web-backend - -networks: - network-web-backend: - driver: bridge diff --git a/aoe-web-backend/docker-compose.yml b/aoe-web-backend/docker-compose.yml deleted file mode 100644 index 2697b8ab6..000000000 --- a/aoe-web-backend/docker-compose.yml +++ /dev/null @@ -1,73 +0,0 @@ -version: "3.9" -services: - aoe-web-backend: - build: - context: . - dockerfile: ./docker/Dockerfile - args: - AOE_DOCKER_REGISTRY: aoe-docker-local.artifactory.ci.csc.fi - image: aoe-web-backend:latest - container_name: aoe-web-backend - restart: unless-stopped - ports: - - "3000:3000" - depends_on: - - aoe-postgres - - aoe-redis-session - volumes: - - uploads:/app/uploads - - webdata:/webdata - # - data/thumbnail:/app/thumbnail - # - data/uploads:/app/uploads - # - data/webdata:/webdata - # - data/databackup/aoe/:/databackup/aoe/ - # - data/databackup/aoethumbnail:/databackup/aoethumbnail - environment: - - NODE_ENV=localhost - env_file: - - "${ENV_FILE:-.env}" - networks: - - network-development - - aoe-postgres: - build: - context: . - dockerfile: ./docker/aoe-postgres/Dockerfile - image: postgres:12-alpine - container_name: aoe-postgres - restart: unless-stopped - ports: - - "5432:5432" - environment: - - POSTGRES_DB=aoe - - POSTGRES_PASSWORD=aoe_secret - - POSTGRES_USER=aoe_admin - volumes: - - ./docker/aoe-postgres/init-user-db.sh:/docker-entrypoint-initdb.d/init-user-db.sh - - ./docker/aoe-postgres/aoe-db-dump.sql:/docker-entrypoint-initdb.d/aoe-db-dump.sql - networks: - - network-development - - aoe-redis-session: - build: - context: . - dockerfile: ./docker/aoe-redis-session/Dockerfile - image: redis-session:6.2.4-alpine - privileged: true - command: sh -c "./init.sh" - container_name: aoe-redis-session - restart: unless-stopped - ports: - - "6379:6379" - environment: - - REDIS_REPLICATION_MODE=master - networks: - - network-development - -volumes: - uploads: - webdata: - -networks: - network-development: - name: network-development diff --git a/aoe-web-backend/docker/Dockerfile b/aoe-web-backend/docker/Dockerfile index df31e64e6..65fc90eac 100644 --- a/aoe-web-backend/docker/Dockerfile +++ b/aoe-web-backend/docker/Dockerfile @@ -1,33 +1,29 @@ -ARG AOE_DOCKER_REGISTRY -FROM node:18.18.2-alpine3.18 as node_builder +FROM node:18.18.2-alpine3.18 AS node_builder + WORKDIR /app COPY ./ /app -# Install only locked dependencies from package-lock.json +# Install only locked dependencies from package-lock.json (faster) RUN npm ci && \ - npm run build -- --omit=dev - -FROM $AOE_DOCKER_REGISTRY/aoe-oracle-base:latest -# Preinstalled contents of aoe-oracle-base image: -# FROM oraclelinux:8.8 -# RUN groupadd --gid 5606 webapp && \ -# useradd -ms /bin/false -d /app node && \ -# mkdir -p /app/data && \ -# mkdir -p /app/uploads && \ -# mkdir -p /app/thumbnail && \ -# mkdir -p /webdata/ && \ -# chown -R node:node /app && \ -# yum update -y && \ -# yum install -y epel-release && \ -# yum install -y curl && \ -# yum install -y https://rpm.nodesource.com/pub_18.x/nodistro/repo/nodesource-release-nodistro-1.noarch.rpm && \ -# yum install -y nodejs --setopt=nodesource-nodejs.module_hotfixes=1 && \ -# npm install -g pm2@2.5.0 && \ -# yum install -y libreoffice && \ -# usermod -aG 5606 node && \ -# ln -s /webdata /app/webdata + npm run build -- --omit=dev && \ +mkdir -p /app/h5p && \ + unzip /app/docker/h5p.zip -d /app/h5p + +FROM node:18.18.2-alpine3.18 +# Copy source code and configuration files to the app directory COPY --from=node_builder /app/dist/ /app/dist/ +COPY --from=node_builder /app/h5p/ /app/h5p/ + +WORKDIR /app + +RUN apk add --no-cache \ + fontconfig \ + libreoffice \ + ttf-dejavu \ + ttf-freefont \ + ttf-liberation + COPY ./views /app/views COPY ./package.json /app COPY ./package-lock.json /app diff --git a/aoe-web-backend/docker/aoe-postgres/Dockerfile b/aoe-web-backend/docker/aoe-postgres/Dockerfile deleted file mode 100644 index 4cbde0aef..000000000 --- a/aoe-web-backend/docker/aoe-postgres/Dockerfile +++ /dev/null @@ -1,3 +0,0 @@ -FROM postgres:12 -RUN localedef -i fi_FI -c -f UTF-8 -A /usr/share/locale/locale.alias fi_FI.UTF-8 -ENV LANG fi_FI.utf8 diff --git a/aoe-web-backend/docker/aoe-postgres/init-user-db.sh b/aoe-web-backend/docker/aoe-postgres/init-user-db.sh deleted file mode 100644 index eb3a92eaa..000000000 --- a/aoe-web-backend/docker/aoe-postgres/init-user-db.sh +++ /dev/null @@ -1,6 +0,0 @@ -#!/bin/bash -set -e - -psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" <<-EOSQL - GRANT ALL PRIVILEGES ON DATABASE aoe TO aoe_admin; -EOSQL diff --git a/aoe-web-backend/docker/aoe-redis-session/Dockerfile b/aoe-web-backend/docker/aoe-redis-session/Dockerfile deleted file mode 100644 index 5cb53514a..000000000 --- a/aoe-web-backend/docker/aoe-redis-session/Dockerfile +++ /dev/null @@ -1,9 +0,0 @@ -FROM redis:6.2.4-alpine - -WORKDIR /redis -COPY ./docker/aoe-redis-session/redis.conf /usr/local/etc/redis/redis.conf -COPY ./docker/aoe-redis-session/init.sh ./ - -RUN chmod +x init.sh - -EXPOSE 6379 diff --git a/aoe-web-backend/docker/aoe-redis-session/init.sh b/aoe-web-backend/docker/aoe-redis-session/init.sh deleted file mode 100644 index bfea59091..000000000 --- a/aoe-web-backend/docker/aoe-redis-session/init.sh +++ /dev/null @@ -1,31 +0,0 @@ -# WARNING you have Transparent Huge Pages (THP) support enabled in your kernel. -# This will create latency and memory usage issues with Redis. -# To fix this issue run the command 'echo never > /sys/kernel/mm/transparent_hugepage/enabled' as root, -# and add it to your /etc/rc.local in order to retain the setting after a reboot. -# Redis must be restarted after THP is disabled. - -echo never > /sys/kernel/mm/transparent_hugepage/enabled -echo never > /sys/kernel/mm/transparent_hugepage/defrag - -# WARNING: The TCP backlog setting of 511 cannot be enforced -# because /proc/sys/net/core/somaxconn is set to the lower value of 128. - -sysctl -w net.core.somaxconn=512 - -# WARNING overcommit_memory is set to 0! Background save may fail under low memory condition. -# To fix this issue add 'vm.overcommit_memory = 1' to /etc/sysctl.conf and then reboot -# or run the command 'sysctl vm.overcommit_memory=1' for this to take effect. -# The overcommit_memory has 3 options. -# 0, the system kernel check if there is enough memory to be allocated to the process or not, -# if not enough, it will return errors to the process. -# 1, the system kernel is allowed to allocate the whole memory to the process -# no matter what the status of memory is. -# 2, the system kernel is allowed to allocate a memory whose size could be bigger than -# the sum of the size of physical memory and the size of exchange workspace to the process. - -sysctl vm.overcommit_memory=1 - -# start redis server - -# redis-server /usr/local/etc/redis/redis.conf --bind 0.0.0.0 --appendonly yes --requirepass redis_session_secret -redis-server /usr/local/etc/redis/redis.conf --appendonly yes --requirepass redis_session_secret diff --git a/aoe-web-backend/docker/aoe-redis-session/redis.conf b/aoe-web-backend/docker/aoe-redis-session/redis.conf deleted file mode 100644 index 465728068..000000000 --- a/aoe-web-backend/docker/aoe-redis-session/redis.conf +++ /dev/null @@ -1,1317 +0,0 @@ -# Redis configuration file example. -# -# Note that in order to read the configuration file, Redis must be -# started with the file path as first argument: -# -# ./redis-server /path/to/redis.conf - -# Note on units: when memory size is needed, it is possible to specify -# it in the usual form of 1k 5GB 4M and so forth: -# -# 1k => 1000 bytes -# 1kb => 1024 bytes -# 1m => 1000000 bytes -# 1mb => 1024*1024 bytes -# 1g => 1000000000 bytes -# 1gb => 1024*1024*1024 bytes -# -# units are case insensitive so 1GB 1Gb 1gB are all the same. - -################################## INCLUDES ################################### - -# Include one or more other config files here. This is useful if you -# have a standard template that goes to all Redis servers but also need -# to customize a few per-server settings. Include files can include -# other files, so use this wisely. -# -# Notice option "include" won't be rewritten by command "CONFIG REWRITE" -# from admin or Redis Sentinel. Since Redis always uses the last processed -# line as value of a configuration directive, you'd better put includes -# at the beginning of this file to avoid overwriting config change at runtime. -# -# If instead you are interested in using includes to override configuration -# options, it is better to use include as the last line. -# -# include /path/to/local.conf -# include /path/to/other.conf - -################################## MODULES ##################################### - -# Load modules at startup. If the server is not able to load modules -# it will abort. It is possible to use multiple loadmodule directives. -# -# loadmodule /path/to/my_module.so -# loadmodule /path/to/other_module.so - -################################## NETWORK ##################################### - -# By default, if no "bind" configuration directive is specified, Redis listens -# for connections from all the network interfaces available on the server. -# It is possible to listen to just one or multiple selected interfaces using -# the "bind" configuration directive, followed by one or more IP addresses. -# -# Examples: -# -# bind 192.168.1.100 10.0.0.1 -# bind 127.0.0.1 ::1 -# -# ~~~ WARNING ~~~ If the computer running Redis is directly exposed to the -# internet, binding to all the interfaces is dangerous and will expose the -# instance to everybody on the internet. So by default we uncomment the -# following bind directive, that will force Redis to listen only into -# the IPv4 lookback interface address (this means Redis will be able to -# accept connections only from clients running into the same computer it -# is running). -# -# IF YOU ARE SURE YOU WANT YOUR INSTANCE TO LISTEN TO ALL THE INTERFACES -# JUST COMMENT THE FOLLOWING LINE. -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -# bind 127.0.0.1 -bind 0.0.0.0 - -# Protected mode is a layer of security protection, in order to avoid that -# Redis instances left open on the internet are accessed and exploited. -# -# When protected mode is on and if: -# -# 1) The server is not binding explicitly to a set of addresses using the -# "bind" directive. -# 2) No password is configured. -# -# The server only accepts connections from clients connecting from the -# IPv4 and IPv6 loopback addresses 127.0.0.1 and ::1, and from Unix domain -# sockets. -# -# By default protected mode is enabled. You should disable it only if -# you are sure you want clients from other hosts to connect to Redis -# even if no authentication is configured, nor a specific set of interfaces -# are explicitly listed using the "bind" directive. -protected-mode no - -# Accept connections on the specified port, default is 6379 (IANA #815344). -# If port 0 is specified Redis will not listen on a TCP socket. -port 6379 - -# TCP listen() backlog. -# -# In high requests-per-second environments you need an high backlog in order -# to avoid slow clients connections issues. Note that the Linux kernel -# will silently truncate it to the value of /proc/sys/net/core/somaxconn so -# make sure to raise both the value of somaxconn and tcp_max_syn_backlog -# in order to get the desired effect. -tcp-backlog 511 - -# Unix socket. -# -# Specify the path for the Unix socket that will be used to listen for -# incoming connections. There is no default, so Redis will not listen -# on a unix socket when not specified. -# -# unixsocket /tmp/redis.sock -# unixsocketperm 700 - -# Close the connection after a client is idle for N seconds (0 to disable) -timeout 0 - -# TCP keepalive. -# -# If non-zero, use SO_KEEPALIVE to send TCP ACKs to clients in absence -# of communication. This is useful for two reasons: -# -# 1) Detect dead peers. -# 2) Take the connection alive from the point of view of network -# equipment in the middle. -# -# On Linux, the specified value (in seconds) is the period used to send ACKs. -# Note that to close the connection the double of the time is needed. -# On other kernels the period depends on the kernel configuration. -# -# A reasonable value for this option is 300 seconds, which is the new -# Redis default starting with Redis 3.2.1. -tcp-keepalive 300 - -################################# GENERAL ##################################### - -# By default Redis does not run as a daemon. Use 'yes' if you need it. -# Note that Redis will write a pid file in /var/run/redis.pid when daemonized. -daemonize no - -# If you run Redis from upstart or systemd, Redis can interact with your -# supervision tree. Options: -# supervised no - no supervision interaction -# supervised upstart - signal upstart by putting Redis into SIGSTOP mode -# supervised systemd - signal systemd by writing READY=1 to $NOTIFY_SOCKET -# supervised auto - detect upstart or systemd method based on -# UPSTART_JOB or NOTIFY_SOCKET environment variables -# Note: these supervision methods only signal "process is ready." -# They do not enable continuous liveness pings back to your supervisor. -supervised no - -# If a pid file is specified, Redis writes it where specified at startup -# and removes it at exit. -# -# When the server runs non daemonized, no pid file is created if none is -# specified in the configuration. When the server is daemonized, the pid file -# is used even if not specified, defaulting to "/var/run/redis.pid". -# -# Creating a pid file is best effort: if Redis is not able to create it -# nothing bad happens, the server will start and run normally. -pidfile /var/run/redis_6379.pid - -# Specify the server verbosity level. -# This can be one of: -# debug (a lot of information, useful for development/testing) -# verbose (many rarely useful info, but not a mess like the debug level) -# notice (moderately verbose, what you want in production probably) -# warning (only very important / critical messages are logged) -loglevel notice - -# Specify the log file name. Also the empty string can be used to force -# Redis to log on the standard output. Note that if you use standard -# output for logging but daemonize, logs will be sent to /dev/null -logfile "" - -# To enable logging to the system logger, just set 'syslog-enabled' to yes, -# and optionally update the other syslog parameters to suit your needs. -# syslog-enabled no - -# Specify the syslog identity. -# syslog-ident redis - -# Specify the syslog facility. Must be USER or between LOCAL0-LOCAL7. -# syslog-facility local0 - -# Set the number of databases. The default database is DB 0, you can select -# a different one on a per-connection basis using SELECT where -# dbid is a number between 0 and 'databases'-1 -databases 16 - -# By default Redis shows an ASCII art logo only when started to log to the -# standard output and if the standard output is a TTY. Basically this means -# that normally a logo is displayed only in interactive sessions. -# -# However it is possible to force the pre-4.0 behavior and always show a -# ASCII art logo in startup logs by setting the following option to yes. -always-show-logo yes - -################################ SNAPSHOTTING ################################ -# -# Save the DB on disk: -# -# save -# -# Will save the DB if both the given number of seconds and the given -# number of write operations against the DB occurred. -# -# In the example below the behaviour will be to save: -# after 900 sec (15 min) if at least 1 key changed -# after 300 sec (5 min) if at least 10 keys changed -# after 60 sec if at least 10000 keys changed -# -# Note: you can disable saving completely by commenting out all "save" lines. -# -# It is also possible to remove all the previously configured save -# points by adding a save directive with a single empty string argument -# like in the following example: -# -# save "" - -save 900 1 -save 300 10 -save 60 10000 - -# By default Redis will stop accepting writes if RDB snapshots are enabled -# (at least one save point) and the latest background save failed. -# This will make the user aware (in a hard way) that data is not persisting -# on disk properly, otherwise chances are that no one will notice and some -# disaster will happen. -# -# If the background saving process will start working again Redis will -# automatically allow writes again. -# -# However if you have setup your proper monitoring of the Redis server -# and persistence, you may want to disable this feature so that Redis will -# continue to work as usual even if there are problems with disk, -# permissions, and so forth. -stop-writes-on-bgsave-error yes - -# Compress string objects using LZF when dump .rdb databases? -# For default that's set to 'yes' as it's almost always a win. -# If you want to save some CPU in the saving child set it to 'no' but -# the dataset will likely be bigger if you have compressible values or keys. -rdbcompression yes - -# Since version 5 of RDB a CRC64 checksum is placed at the end of the file. -# This makes the format more resistant to corruption but there is a performance -# hit to pay (around 10%) when saving and loading RDB files, so you can disable it -# for maximum performances. -# -# RDB files created with checksum disabled have a checksum of zero that will -# tell the loading code to skip the check. -rdbchecksum yes - -# The filename where to dump the DB -dbfilename dump.rdb - -# The working directory. -# -# The DB will be written inside this directory, with the filename specified -# above using the 'dbfilename' configuration directive. -# -# The Append Only File will also be created inside this directory. -# -# Note that you must specify a directory here, not a file name. -dir ./ - -################################# REPLICATION ################################# - -# Master-Slave replication. Use slaveof to make a Redis instance a copy of -# another Redis server. A few things to understand ASAP about Redis replication. -# -# 1) Redis replication is asynchronous, but you can configure a master to -# stop accepting writes if it appears to be not connected with at least -# a given number of slaves. -# 2) Redis slaves are able to perform a partial resynchronization with the -# master if the replication link is lost for a relatively small amount of -# time. You may want to configure the replication backlog size (see the next -# sections of this file) with a sensible value depending on your needs. -# 3) Replication is automatic and does not need user intervention. After a -# network partition slaves automatically try to reconnect to masters -# and resynchronize with them. -# -# slaveof - -# If the master is password protected (using the "requirepass" configuration -# directive below) it is possible to tell the slave to authenticate before -# starting the replication synchronization process, otherwise the master will -# refuse the slave request. -# -# masterauth - -# When a slave loses its connection with the master, or when the replication -# is still in progress, the slave can act in two different ways: -# -# 1) if slave-serve-stale-data is set to 'yes' (the default) the slave will -# still reply to client requests, possibly with out of date data, or the -# data set may just be empty if this is the first synchronization. -# -# 2) if slave-serve-stale-data is set to 'no' the slave will reply with -# an error "SYNC with master in progress" to all the kind of commands -# but to INFO and SLAVEOF. -# -slave-serve-stale-data yes - -# You can configure a slave instance to accept writes or not. Writing against -# a slave instance may be useful to store some ephemeral data (because data -# written on a slave will be easily deleted after resync with the master) but -# may also cause problems if clients are writing to it because of a -# misconfiguration. -# -# Since Redis 2.6 by default slaves are read-only. -# -# Note: read only slaves are not designed to be exposed to untrusted clients -# on the internet. It's just a protection layer against misuse of the instance. -# Still a read only slave exports by default all the administrative commands -# such as CONFIG, DEBUG, and so forth. To a limited extent you can improve -# security of read only slaves using 'rename-command' to shadow all the -# administrative / dangerous commands. -slave-read-only yes - -# Replication SYNC strategy: disk or socket. -# -# ------------------------------------------------------- -# WARNING: DISKLESS REPLICATION IS EXPERIMENTAL CURRENTLY -# ------------------------------------------------------- -# -# New slaves and reconnecting slaves that are not able to continue the replication -# process just receiving differences, need to do what is called a "full -# synchronization". An RDB file is transmitted from the master to the slaves. -# The transmission can happen in two different ways: -# -# 1) Disk-backed: The Redis master creates a new process that writes the RDB -# file on disk. Later the file is transferred by the parent -# process to the slaves incrementally. -# 2) Diskless: The Redis master creates a new process that directly writes the -# RDB file to slave sockets, without touching the disk at all. -# -# With disk-backed replication, while the RDB file is generated, more slaves -# can be queued and served with the RDB file as soon as the current child producing -# the RDB file finishes its work. With diskless replication instead once -# the transfer starts, new slaves arriving will be queued and a new transfer -# will start when the current one terminates. -# -# When diskless replication is used, the master waits a configurable amount of -# time (in seconds) before starting the transfer in the hope that multiple slaves -# will arrive and the transfer can be parallelized. -# -# With slow disks and fast (large bandwidth) networks, diskless replication -# works better. -repl-diskless-sync no - -# When diskless replication is enabled, it is possible to configure the delay -# the server waits in order to spawn the child that transfers the RDB via socket -# to the slaves. -# -# This is important since once the transfer starts, it is not possible to serve -# new slaves arriving, that will be queued for the next RDB transfer, so the server -# waits a delay in order to let more slaves arrive. -# -# The delay is specified in seconds, and by default is 5 seconds. To disable -# it entirely just set it to 0 seconds and the transfer will start ASAP. -repl-diskless-sync-delay 5 - -# Slaves send PINGs to server in a predefined interval. It's possible to change -# this interval with the repl_ping_slave_period option. The default value is 10 -# seconds. -# -# repl-ping-slave-period 10 - -# The following option sets the replication timeout for: -# -# 1) Bulk transfer I/O during SYNC, from the point of view of slave. -# 2) Master timeout from the point of view of slaves (data, pings). -# 3) Slave timeout from the point of view of masters (REPLCONF ACK pings). -# -# It is important to make sure that this value is greater than the value -# specified for repl-ping-slave-period otherwise a timeout will be detected -# every time there is low traffic between the master and the slave. -# -# repl-timeout 60 - -# Disable TCP_NODELAY on the slave socket after SYNC? -# -# If you select "yes" Redis will use a smaller number of TCP packets and -# less bandwidth to send data to slaves. But this can add a delay for -# the data to appear on the slave side, up to 40 milliseconds with -# Linux kernels using a default configuration. -# -# If you select "no" the delay for data to appear on the slave side will -# be reduced but more bandwidth will be used for replication. -# -# By default we optimize for low latency, but in very high traffic conditions -# or when the master and slaves are many hops away, turning this to "yes" may -# be a good idea. -repl-disable-tcp-nodelay no - -# Set the replication backlog size. The backlog is a buffer that accumulates -# slave data when slaves are disconnected for some time, so that when a slave -# wants to reconnect again, often a full resync is not needed, but a partial -# resync is enough, just passing the portion of data the slave missed while -# disconnected. -# -# The bigger the replication backlog, the longer the time the slave can be -# disconnected and later be able to perform a partial resynchronization. -# -# The backlog is only allocated once there is at least a slave connected. -# -# repl-backlog-size 1mb - -# After a master has no longer connected slaves for some time, the backlog -# will be freed. The following option configures the amount of seconds that -# need to elapse, starting from the time the last slave disconnected, for -# the backlog buffer to be freed. -# -# Note that slaves never free the backlog for timeout, since they may be -# promoted to masters later, and should be able to correctly "partially -# resynchronize" with the slaves: hence they should always accumulate backlog. -# -# A value of 0 means to never release the backlog. -# -# repl-backlog-ttl 3600 - -# The slave priority is an integer number published by Redis in the INFO output. -# It is used by Redis Sentinel in order to select a slave to promote into a -# master if the master is no longer working correctly. -# -# A slave with a low priority number is considered better for promotion, so -# for instance if there are three slaves with priority 10, 100, 25 Sentinel will -# pick the one with priority 10, that is the lowest. -# -# However a special priority of 0 marks the slave as not able to perform the -# role of master, so a slave with priority of 0 will never be selected by -# Redis Sentinel for promotion. -# -# By default the priority is 100. -slave-priority 100 - -# It is possible for a master to stop accepting writes if there are less than -# N slaves connected, having a lag less or equal than M seconds. -# -# The N slaves need to be in "online" state. -# -# The lag in seconds, that must be <= the specified value, is calculated from -# the last ping received from the slave, that is usually sent every second. -# -# This option does not GUARANTEE that N replicas will accept the write, but -# will limit the window of exposure for lost writes in case not enough slaves -# are available, to the specified number of seconds. -# -# For example to require at least 3 slaves with a lag <= 10 seconds use: -# -# min-slaves-to-write 3 -# min-slaves-max-lag 10 -# -# Setting one or the other to 0 disables the feature. -# -# By default min-slaves-to-write is set to 0 (feature disabled) and -# min-slaves-max-lag is set to 10. - -# A Redis master is able to list the address and port of the attached -# slaves in different ways. For example the "INFO replication" section -# offers this information, which is used, among other tools, by -# Redis Sentinel in order to discover slave instances. -# Another place where this info is available is in the output of the -# "ROLE" command of a master. -# -# The listed IP and address normally reported by a slave is obtained -# in the following way: -# -# IP: The address is auto detected by checking the peer address -# of the socket used by the slave to connect with the master. -# -# Port: The port is communicated by the slave during the replication -# handshake, and is normally the port that the slave is using to -# list for connections. -# -# However when port forwarding or Network Address Translation (NAT) is -# used, the slave may be actually reachable via different IP and port -# pairs. The following two options can be used by a slave in order to -# report to its master a specific set of IP and port, so that both INFO -# and ROLE will report those values. -# -# There is no need to use both the options if you need to override just -# the port or the IP address. -# -# slave-announce-ip 5.5.5.5 -# slave-announce-port 1234 - -################################## SECURITY ################################### - -# Require clients to issue AUTH before processing any other -# commands. This might be useful in environments in which you do not trust -# others with access to the host running redis-server. -# -# This should stay commented out for backward compatibility and because most -# people do not need auth (e.g. they run their own servers). -# -# Warning: since Redis is pretty fast an outside user can try up to -# 150k passwords per second against a good box. This means that you should -# use a very strong password otherwise it will be very easy to break. -# -# requirepass foobared - -# Command renaming. -# -# It is possible to change the name of dangerous commands in a shared -# environment. For instance the CONFIG command may be renamed into something -# hard to guess so that it will still be available for internal-use tools -# but not available for general clients. -# -# Example: -# -# rename-command CONFIG b840fc02d524045429941cc15f59e41cb7be6c52 -# -# It is also possible to completely kill a command by renaming it into -# an empty string: -# -# rename-command CONFIG "" -# -# Please note that changing the name of commands that are logged into the -# AOF file or transmitted to slaves may cause problems. - -################################### CLIENTS #################################### - -# Set the max number of connected clients at the same time. By default -# this limit is set to 10000 clients, however if the Redis server is not -# able to configure the process file limit to allow for the specified limit -# the max number of allowed clients is set to the current file limit -# minus 32 (as Redis reserves a few file descriptors for internal uses). -# -# Once the limit is reached Redis will close all the new connections sending -# an error 'max number of clients reached'. -# -# maxclients 10000 - -############################## MEMORY MANAGEMENT ################################ - -# Set a memory usage limit to the specified amount of bytes. -# When the memory limit is reached Redis will try to remove keys -# according to the eviction policy selected (see maxmemory-policy). -# -# If Redis can't remove keys according to the policy, or if the policy is -# set to 'noeviction', Redis will start to reply with errors to commands -# that would use more memory, like SET, LPUSH, and so on, and will continue -# to reply to read-only commands like GET. -# -# This option is usually useful when using Redis as an LRU or LFU cache, or to -# set a hard memory limit for an instance (using the 'noeviction' policy). -# -# WARNING: If you have slaves attached to an instance with maxmemory on, -# the size of the output buffers needed to feed the slaves are subtracted -# from the used memory count, so that network problems / resyncs will -# not trigger a loop where keys are evicted, and in turn the output -# buffer of slaves is full with DELs of keys evicted triggering the deletion -# of more keys, and so forth until the database is completely emptied. -# -# In short... if you have slaves attached it is suggested that you set a lower -# limit for maxmemory so that there is some free RAM on the system for slave -# output buffers (but this is not needed if the policy is 'noeviction'). -# -# maxmemory - -# MAXMEMORY POLICY: how Redis will select what to remove when maxmemory -# is reached. You can select among five behaviors: -# -# volatile-lru -> Evict using approximated LRU among the keys with an expire set. -# allkeys-lru -> Evict any key using approximated LRU. -# volatile-lfu -> Evict using approximated LFU among the keys with an expire set. -# allkeys-lfu -> Evict any key using approximated LFU. -# volatile-random -> Remove a random key among the ones with an expire set. -# allkeys-random -> Remove a random key, any key. -# volatile-ttl -> Remove the key with the nearest expire time (minor TTL) -# noeviction -> Don't evict anything, just return an error on write operations. -# -# LRU means Least Recently Used -# LFU means Least Frequently Used -# -# Both LRU, LFU and volatile-ttl are implemented using approximated -# randomized algorithms. -# -# Note: with any of the above policies, Redis will return an error on write -# operations, when there are no suitable keys for eviction. -# -# At the date of writing these commands are: set setnx setex append -# incr decr rpush lpush rpushx lpushx linsert lset rpoplpush sadd -# sinter sinterstore sunion sunionstore sdiff sdiffstore zadd zincrby -# zunionstore zinterstore hset hsetnx hmset hincrby incrby decrby -# getset mset msetnx exec sort -# -# The default is: -# -# maxmemory-policy noeviction - -# LRU, LFU and minimal TTL algorithms are not precise algorithms but approximated -# algorithms (in order to save memory), so you can tune it for speed or -# accuracy. For default Redis will check five keys and pick the one that was -# used less recently, you can change the sample size using the following -# configuration directive. -# -# The default of 5 produces good enough results. 10 Approximates very closely -# true LRU but costs more CPU. 3 is faster but not very accurate. -# -# maxmemory-samples 5 - -############################# LAZY FREEING #################################### - -# Redis has two primitives to delete keys. One is called DEL and is a blocking -# deletion of the object. It means that the server stops processing new commands -# in order to reclaim all the memory associated with an object in a synchronous -# way. If the key deleted is associated with a small object, the time needed -# in order to execute the DEL command is very small and comparable to most other -# O(1) or O(log_N) commands in Redis. However if the key is associated with an -# aggregated value containing millions of elements, the server can block for -# a long time (even seconds) in order to complete the operation. -# -# For the above reasons Redis also offers non blocking deletion primitives -# such as UNLINK (non blocking DEL) and the ASYNC option of FLUSHALL and -# FLUSHDB commands, in order to reclaim memory in background. Those commands -# are executed in constant time. Another thread will incrementally free the -# object in the background as fast as possible. -# -# DEL, UNLINK and ASYNC option of FLUSHALL and FLUSHDB are user-controlled. -# It's up to the design of the application to understand when it is a good -# idea to use one or the other. However the Redis server sometimes has to -# delete keys or flush the whole database as a side effect of other operations. -# Specifically Redis deletes objects independently of a user call in the -# following scenarios: -# -# 1) On eviction, because of the maxmemory and maxmemory policy configurations, -# in order to make room for new data, without going over the specified -# memory limit. -# 2) Because of expire: when a key with an associated time to live (see the -# EXPIRE command) must be deleted from memory. -# 3) Because of a side effect of a command that stores data on a key that may -# already exist. For example the RENAME command may delete the old key -# content when it is replaced with another one. Similarly SUNIONSTORE -# or SORT with STORE option may delete existing keys. The SET command -# itself removes any old content of the specified key in order to replace -# it with the specified string. -# 4) During replication, when a slave performs a full resynchronization with -# its master, the content of the whole database is removed in order to -# load the RDB file just transfered. -# -# In all the above cases the default is to delete objects in a blocking way, -# like if DEL was called. However you can configure each case specifically -# in order to instead release memory in a non-blocking way like if UNLINK -# was called, using the following configuration directives: - -lazyfree-lazy-eviction no -lazyfree-lazy-expire no -lazyfree-lazy-server-del no -slave-lazy-flush no - -############################## APPEND ONLY MODE ############################### - -# By default Redis asynchronously dumps the dataset on disk. This mode is -# good enough in many applications, but an issue with the Redis process or -# a power outage may result into a few minutes of writes lost (depending on -# the configured save points). -# -# The Append Only File is an alternative persistence mode that provides -# much better durability. For instance using the default data fsync policy -# (see later in the config file) Redis can lose just one second of writes in a -# dramatic event like a server power outage, or a single write if something -# wrong with the Redis process itself happens, but the operating system is -# still running correctly. -# -# AOF and RDB persistence can be enabled at the same time without problems. -# If the AOF is enabled on startup Redis will load the AOF, that is the file -# with the better durability guarantees. -# -# Please check http://redis.io/topics/persistence for more information. - -appendonly no - -# The name of the append only file (default: "appendonly.aof") - -appendfilename "appendonly.aof" - -# The fsync() call tells the Operating System to actually write data on disk -# instead of waiting for more data in the output buffer. Some OS will really flush -# data on disk, some other OS will just try to do it ASAP. -# -# Redis supports three different modes: -# -# no: don't fsync, just let the OS flush the data when it wants. Faster. -# always: fsync after every write to the append only log. Slow, Safest. -# everysec: fsync only one time every second. Compromise. -# -# The default is "everysec", as that's usually the right compromise between -# speed and data safety. It's up to you to understand if you can relax this to -# "no" that will let the operating system flush the output buffer when -# it wants, for better performances (but if you can live with the idea of -# some data loss consider the default persistence mode that's snapshotting), -# or on the contrary, use "always" that's very slow but a bit safer than -# everysec. -# -# More details please check the following article: -# http://antirez.com/post/redis-persistence-demystified.html -# -# If unsure, use "everysec". - -# appendfsync always -appendfsync everysec -# appendfsync no - -# When the AOF fsync policy is set to always or everysec, and a background -# saving process (a background save or AOF log background rewriting) is -# performing a lot of I/O against the disk, in some Linux configurations -# Redis may block too long on the fsync() call. Note that there is no fix for -# this currently, as even performing fsync in a different thread will block -# our synchronous write(2) call. -# -# In order to mitigate this problem it's possible to use the following option -# that will prevent fsync() from being called in the main process while a -# BGSAVE or BGREWRITEAOF is in progress. -# -# This means that while another child is saving, the durability of Redis is -# the same as "appendfsync none". In practical terms, this means that it is -# possible to lose up to 30 seconds of log in the worst scenario (with the -# default Linux settings). -# -# If you have latency problems turn this to "yes". Otherwise leave it as -# "no" that is the safest pick from the point of view of durability. - -no-appendfsync-on-rewrite no - -# Automatic rewrite of the append only file. -# Redis is able to automatically rewrite the log file implicitly calling -# BGREWRITEAOF when the AOF log size grows by the specified percentage. -# -# This is how it works: Redis remembers the size of the AOF file after the -# latest rewrite (if no rewrite has happened since the restart, the size of -# the AOF at startup is used). -# -# This base size is compared to the current size. If the current size is -# bigger than the specified percentage, the rewrite is triggered. Also -# you need to specify a minimal size for the AOF file to be rewritten, this -# is useful to avoid rewriting the AOF file even if the percentage increase -# is reached but it is still pretty small. -# -# Specify a percentage of zero in order to disable the automatic AOF -# rewrite feature. - -auto-aof-rewrite-percentage 100 -auto-aof-rewrite-min-size 64mb - -# An AOF file may be found to be truncated at the end during the Redis -# startup process, when the AOF data gets loaded back into memory. -# This may happen when the system where Redis is running -# crashes, especially when an ext4 filesystem is mounted without the -# data=ordered option (however this can't happen when Redis itself -# crashes or aborts but the operating system still works correctly). -# -# Redis can either exit with an error when this happens, or load as much -# data as possible (the default now) and start if the AOF file is found -# to be truncated at the end. The following option controls this behavior. -# -# If aof-load-truncated is set to yes, a truncated AOF file is loaded and -# the Redis server starts emitting a log to inform the user of the event. -# Otherwise if the option is set to no, the server aborts with an error -# and refuses to start. When the option is set to no, the user requires -# to fix the AOF file using the "redis-check-aof" utility before to restart -# the server. -# -# Note that if the AOF file will be found to be corrupted in the middle -# the server will still exit with an error. This option only applies when -# Redis will try to read more data from the AOF file but not enough bytes -# will be found. -aof-load-truncated yes - -# When rewriting the AOF file, Redis is able to use an RDB preamble in the -# AOF file for faster rewrites and recoveries. When this option is turned -# on the rewritten AOF file is composed of two different stanzas: -# -# [RDB file][AOF tail] -# -# When loading Redis recognizes that the AOF file starts with the "REDIS" -# string and loads the prefixed RDB file, and continues loading the AOF -# tail. -# -# This is currently turned off by default in order to avoid the surprise -# of a format change, but will at some point be used as the default. -aof-use-rdb-preamble no - -################################ LUA SCRIPTING ############################### - -# Max execution time of a Lua script in milliseconds. -# -# If the maximum execution time is reached Redis will log that a script is -# still in execution after the maximum allowed time and will start to -# reply to queries with an error. -# -# When a long running script exceeds the maximum execution time only the -# SCRIPT KILL and SHUTDOWN NOSAVE commands are available. The first can be -# used to stop a script that did not yet called write commands. The second -# is the only way to shut down the server in the case a write command was -# already issued by the script but the user doesn't want to wait for the natural -# termination of the script. -# -# Set it to 0 or a negative value for unlimited execution without warnings. -lua-time-limit 5000 - -################################ REDIS CLUSTER ############################### -# -# ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ -# WARNING EXPERIMENTAL: Redis Cluster is considered to be stable code, however -# in order to mark it as "mature" we need to wait for a non trivial percentage -# of users to deploy it in production. -# ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ -# -# Normal Redis instances can't be part of a Redis Cluster; only nodes that are -# started as cluster nodes can. In order to start a Redis instance as a -# cluster node enable the cluster support uncommenting the following: -# -# cluster-enabled yes - -# Every cluster node has a cluster configuration file. This file is not -# intended to be edited by hand. It is created and updated by Redis nodes. -# Every Redis Cluster node requires a different cluster configuration file. -# Make sure that instances running in the same system do not have -# overlapping cluster configuration file names. -# -# cluster-config-file nodes-6379.conf - -# Cluster node timeout is the amount of milliseconds a node must be unreachable -# for it to be considered in failure state. -# Most other internal time limits are multiple of the node timeout. -# -# cluster-node-timeout 15000 - -# A slave of a failing master will avoid to start a failover if its data -# looks too old. -# -# There is no simple way for a slave to actually have an exact measure of -# its "data age", so the following two checks are performed: -# -# 1) If there are multiple slaves able to failover, they exchange messages -# in order to try to give an advantage to the slave with the best -# replication offset (more data from the master processed). -# Slaves will try to get their rank by offset, and apply to the start -# of the failover a delay proportional to their rank. -# -# 2) Every single slave computes the time of the last interaction with -# its master. This can be the last ping or command received (if the master -# is still in the "connected" state), or the time that elapsed since the -# disconnection with the master (if the replication link is currently down). -# If the last interaction is too old, the slave will not try to failover -# at all. -# -# The point "2" can be tuned by user. Specifically a slave will not perform -# the failover if, since the last interaction with the master, the time -# elapsed is greater than: -# -# (node-timeout * slave-validity-factor) + repl-ping-slave-period -# -# So for example if node-timeout is 30 seconds, and the slave-validity-factor -# is 10, and assuming a default repl-ping-slave-period of 10 seconds, the -# slave will not try to failover if it was not able to talk with the master -# for longer than 310 seconds. -# -# A large slave-validity-factor may allow slaves with too old data to failover -# a master, while a too small value may prevent the cluster from being able to -# elect a slave at all. -# -# For maximum availability, it is possible to set the slave-validity-factor -# to a value of 0, which means, that slaves will always try to failover the -# master regardless of the last time they interacted with the master. -# (However they'll always try to apply a delay proportional to their -# offset rank). -# -# Zero is the only value able to guarantee that when all the partitions heal -# the cluster will always be able to continue. -# -# cluster-slave-validity-factor 10 - -# Cluster slaves are able to migrate to orphaned masters, that are masters -# that are left without working slaves. This improves the cluster ability -# to resist to failures as otherwise an orphaned master can't be failed over -# in case of failure if it has no working slaves. -# -# Slaves migrate to orphaned masters only if there are still at least a -# given number of other working slaves for their old master. This number -# is the "migration barrier". A migration barrier of 1 means that a slave -# will migrate only if there is at least 1 other working slave for its master -# and so forth. It usually reflects the number of slaves you want for every -# master in your cluster. -# -# Default is 1 (slaves migrate only if their masters remain with at least -# one slave). To disable migration just set it to a very large value. -# A value of 0 can be set but is useful only for debugging and dangerous -# in production. -# -# cluster-migration-barrier 1 - -# By default Redis Cluster nodes stop accepting queries if they detect there -# is at least an hash slot uncovered (no available node is serving it). -# This way if the cluster is partially down (for example a range of hash slots -# are no longer covered) all the cluster becomes, eventually, unavailable. -# It automatically returns available as soon as all the slots are covered again. -# -# However sometimes you want the subset of the cluster which is working, -# to continue to accept queries for the part of the key space that is still -# covered. In order to do so, just set the cluster-require-full-coverage -# option to no. -# -# cluster-require-full-coverage yes - -# This option, when set to yes, prevents slaves from trying to failover its -# master during master failures. However the master can still perform a -# manual failover, if forced to do so. -# -# This is useful in different scenarios, especially in the case of multiple -# data center operations, where we want one side to never be promoted if not -# in the case of a total DC failure. -# -# cluster-slave-no-failover no - -# In order to setup your cluster make sure to read the documentation -# available at http://redis.io web site. - -########################## CLUSTER DOCKER/NAT support ######################## - -# In certain deployments, Redis Cluster nodes address discovery fails, because -# addresses are NAT-ted or because ports are forwarded (the typical case is -# Docker and other containers). -# -# In order to make Redis Cluster working in such environments, a static -# configuration where each node knows its public address is needed. The -# following two options are used for this scope, and are: -# -# * cluster-announce-ip -# * cluster-announce-port -# * cluster-announce-bus-port -# -# Each instruct the node about its address, client port, and cluster message -# bus port. The information is then published in the header of the bus packets -# so that other nodes will be able to correctly map the address of the node -# publishing the information. -# -# If the above options are not used, the normal Redis Cluster auto-detection -# will be used instead. -# -# Note that when remapped, the bus port may not be at the fixed offset of -# clients port + 10000, so you can specify any port and bus-port depending -# on how they get remapped. If the bus-port is not set, a fixed offset of -# 10000 will be used as usually. -# -# Example: -# -# cluster-announce-ip 10.1.1.5 -# cluster-announce-port 6379 -# cluster-announce-bus-port 6380 - -################################## SLOW LOG ################################### - -# The Redis Slow Log is a system to log queries that exceeded a specified -# execution time. The execution time does not include the I/O operations -# like talking with the client, sending the reply and so forth, -# but just the time needed to actually execute the command (this is the only -# stage of command execution where the thread is blocked and can not serve -# other requests in the meantime). -# -# You can configure the slow log with two parameters: one tells Redis -# what is the execution time, in microseconds, to exceed in order for the -# command to get logged, and the other parameter is the length of the -# slow log. When a new command is logged the oldest one is removed from the -# queue of logged commands. - -# The following time is expressed in microseconds, so 1000000 is equivalent -# to one second. Note that a negative number disables the slow log, while -# a value of zero forces the logging of every command. -slowlog-log-slower-than 10000 - -# There is no limit to this length. Just be aware that it will consume memory. -# You can reclaim memory used by the slow log with SLOWLOG RESET. -slowlog-max-len 128 - -################################ LATENCY MONITOR ############################## - -# The Redis latency monitoring subsystem samples different operations -# at runtime in order to collect data related to possible sources of -# latency of a Redis instance. -# -# Via the LATENCY command this information is available to the user that can -# print graphs and obtain reports. -# -# The system only logs operations that were performed in a time equal or -# greater than the amount of milliseconds specified via the -# latency-monitor-threshold configuration directive. When its value is set -# to zero, the latency monitor is turned off. -# -# By default latency monitoring is disabled since it is mostly not needed -# if you don't have latency issues, and collecting data has a performance -# impact, that while very small, can be measured under big load. Latency -# monitoring can easily be enabled at runtime using the command -# "CONFIG SET latency-monitor-threshold " if needed. -latency-monitor-threshold 0 - -############################# EVENT NOTIFICATION ############################## - -# Redis can notify Pub/Sub clients about events happening in the key space. -# This feature is documented at http://redis.io/topics/notifications -# -# For instance if keyspace events notification is enabled, and a client -# performs a DEL operation on key "foo" stored in the Database 0, two -# messages will be published via Pub/Sub: -# -# PUBLISH __keyspace@0__:foo del -# PUBLISH __keyevent@0__:del foo -# -# It is possible to select the events that Redis will notify among a set -# of classes. Every class is identified by a single character: -# -# K Keyspace events, published with __keyspace@__ prefix. -# E Keyevent events, published with __keyevent@__ prefix. -# g Generic commands (non-type specific) like DEL, EXPIRE, RENAME, ... -# $ String commands -# l List commands -# s Set commands -# h Hash commands -# z Sorted set commands -# x Expired events (events generated every time a key expires) -# e Evicted events (events generated when a key is evicted for maxmemory) -# A Alias for g$lshzxe, so that the "AKE" string means all the events. -# -# The "notify-keyspace-events" takes as argument a string that is composed -# of zero or multiple characters. The empty string means that notifications -# are disabled. -# -# Example: to enable list and generic events, from the point of view of the -# event name, use: -# -# notify-keyspace-events Elg -# -# Example 2: to get the stream of the expired keys subscribing to channel -# name __keyevent@0__:expired use: -# -# notify-keyspace-events Ex -# -# By default all notifications are disabled because most users don't need -# this feature and the feature has some overhead. Note that if you don't -# specify at least one of K or E, no events will be delivered. -notify-keyspace-events "" - -############################### ADVANCED CONFIG ############################### - -# Hashes are encoded using a memory efficient data structure when they have a -# small number of entries, and the biggest entry does not exceed a given -# threshold. These thresholds can be configured using the following directives. -hash-max-ziplist-entries 512 -hash-max-ziplist-value 64 - -# Lists are also encoded in a special way to save a lot of space. -# The number of entries allowed per internal list node can be specified -# as a fixed maximum size or a maximum number of elements. -# For a fixed maximum size, use -5 through -1, meaning: -# -5: max size: 64 Kb <-- not recommended for normal workloads -# -4: max size: 32 Kb <-- not recommended -# -3: max size: 16 Kb <-- probably not recommended -# -2: max size: 8 Kb <-- good -# -1: max size: 4 Kb <-- good -# Positive numbers mean store up to _exactly_ that number of elements -# per list node. -# The highest performing option is usually -2 (8 Kb size) or -1 (4 Kb size), -# but if your use case is unique, adjust the settings as necessary. -list-max-ziplist-size -2 - -# Lists may also be compressed. -# Compress depth is the number of quicklist ziplist nodes from *each* side of -# the list to *exclude* from compression. The head and tail of the list -# are always uncompressed for fast push/pop operations. Settings are: -# 0: disable all list compression -# 1: depth 1 means "don't start compressing until after 1 node into the list, -# going from either the head or tail" -# So: [head]->node->node->...->node->[tail] -# [head], [tail] will always be uncompressed; inner nodes will compress. -# 2: [head]->[next]->node->node->...->node->[prev]->[tail] -# 2 here means: don't compress head or head->next or tail->prev or tail, -# but compress all nodes between them. -# 3: [head]->[next]->[next]->node->node->...->node->[prev]->[prev]->[tail] -# etc. -list-compress-depth 0 - -# Sets have a special encoding in just one case: when a set is composed -# of just strings that happen to be integers in radix 10 in the range -# of 64 bit signed integers. -# The following configuration setting sets the limit in the size of the -# set in order to use this special memory saving encoding. -set-max-intset-entries 512 - -# Similarly to hashes and lists, sorted sets are also specially encoded in -# order to save a lot of space. This encoding is only used when the length and -# elements of a sorted set are below the following limits: -zset-max-ziplist-entries 128 -zset-max-ziplist-value 64 - -# HyperLogLog sparse representation bytes limit. The limit includes the -# 16 bytes header. When an HyperLogLog using the sparse representation crosses -# this limit, it is converted into the dense representation. -# -# A value greater than 16000 is totally useless, since at that point the -# dense representation is more memory efficient. -# -# The suggested value is ~ 3000 in order to have the benefits of -# the space efficient encoding without slowing down too much PFADD, -# which is O(N) with the sparse encoding. The value can be raised to -# ~ 10000 when CPU is not a concern, but space is, and the data set is -# composed of many HyperLogLogs with cardinality in the 0 - 15000 range. -hll-sparse-max-bytes 3000 - -# Active rehashing uses 1 millisecond every 100 milliseconds of CPU time in -# order to help rehashing the main Redis hash table (the one mapping top-level -# keys to values). The hash table implementation Redis uses (see dict.c) -# performs a lazy rehashing: the more operation you run into a hash table -# that is rehashing, the more rehashing "steps" are performed, so if the -# server is idle the rehashing is never complete and some more memory is used -# by the hash table. -# -# The default is to use this millisecond 10 times every second in order to -# actively rehash the main dictionaries, freeing memory when possible. -# -# If unsure: -# use "activerehashing no" if you have hard latency requirements and it is -# not a good thing in your environment that Redis can reply from time to time -# to queries with 2 milliseconds delay. -# -# use "activerehashing yes" if you don't have such hard requirements but -# want to free memory asap when possible. -activerehashing yes - -# The client output buffer limits can be used to force disconnection of clients -# that are not reading data from the server fast enough for some reason (a -# common reason is that a Pub/Sub client can't consume messages as fast as the -# publisher can produce them). -# -# The limit can be set differently for the three different classes of clients: -# -# normal -> normal clients including MONITOR clients -# slave -> slave clients -# pubsub -> clients subscribed to at least one pubsub channel or pattern -# -# The syntax of every client-output-buffer-limit directive is the following: -# -# client-output-buffer-limit -# -# A client is immediately disconnected once the hard limit is reached, or if -# the soft limit is reached and remains reached for the specified number of -# seconds (continuously). -# So for instance if the hard limit is 32 megabytes and the soft limit is -# 16 megabytes / 10 seconds, the client will get disconnected immediately -# if the size of the output buffers reach 32 megabytes, but will also get -# disconnected if the client reaches 16 megabytes and continuously overcomes -# the limit for 10 seconds. -# -# By default normal clients are not limited because they don't receive data -# without asking (in a push way), but just after a request, so only -# asynchronous clients may create a scenario where data is requested faster -# than it can read. -# -# Instead there is a default limit for pubsub and slave clients, since -# subscribers and slaves receive data in a push fashion. -# -# Both the hard or the soft limit can be disabled by setting them to zero. -client-output-buffer-limit normal 0 0 0 -client-output-buffer-limit slave 256mb 64mb 60 -client-output-buffer-limit pubsub 32mb 8mb 60 - -# Client query buffers accumulate new commands. They are limited to a fixed -# amount by default in order to avoid that a protocol desynchronization (for -# instance due to a bug in the client) will lead to unbound memory usage in -# the query buffer. However you can configure it here if you have very special -# needs, such us huge multi/exec requests or alike. -# -# client-query-buffer-limit 1gb - -# In the Redis protocol, bulk requests, that are, elements representing single -# strings, are normally limited ot 512 mb. However you can change this limit -# here. -# -# proto-max-bulk-len 512mb - -# Redis calls an internal function to perform many background tasks, like -# closing connections of clients in timeout, purging expired keys that are -# never requested, and so forth. -# -# Not all tasks are performed with the same frequency, but Redis checks for -# tasks to perform according to the specified "hz" value. -# -# By default "hz" is set to 10. Raising the value will use more CPU when -# Redis is idle, but at the same time will make Redis more responsive when -# there are many keys expiring at the same time, and timeouts may be -# handled with more precision. -# -# The range is between 1 and 500, however a value over 100 is usually not -# a good idea. Most users should use the default of 10 and raise this up to -# 100 only in environments where very low latency is required. -hz 10 - -# When a child rewrites the AOF file, if the following option is enabled -# the file will be fsync-ed every 32 MB of data generated. This is useful -# in order to commit the file to the disk more incrementally and avoid -# big latency spikes. -aof-rewrite-incremental-fsync yes - -# Redis LFU eviction (see maxmemory setting) can be tuned. However it is a good -# idea to start with the default settings and only change them after investigating -# how to improve the performances and how the keys LFU change over time, which -# is possible to inspect via the OBJECT FREQ command. -# -# There are two tunable parameters in the Redis LFU implementation: the -# counter logarithm factor and the counter decay time. It is important to -# understand what the two parameters mean before changing them. -# -# The LFU counter is just 8 bits per key, it's maximum value is 255, so Redis -# uses a probabilistic increment with logarithmic behavior. Given the value -# of the old counter, when a key is accessed, the counter is incremented in -# this way: -# -# 1. A random number R between 0 and 1 is extracted. -# 2. A probability P is calculated as 1/(old_value*lfu_log_factor+1). -# 3. The counter is incremented only if R < P. -# -# The default lfu-log-factor is 10. This is a table of how the frequency -# counter changes with a different number of accesses with different -# logarithmic factors: -# -# +--------+------------+------------+------------+------------+------------+ -# | factor | 100 hits | 1000 hits | 100K hits | 1M hits | 10M hits | -# +--------+------------+------------+------------+------------+------------+ -# | 0 | 104 | 255 | 255 | 255 | 255 | -# +--------+------------+------------+------------+------------+------------+ -# | 1 | 18 | 49 | 255 | 255 | 255 | -# +--------+------------+------------+------------+------------+------------+ -# | 10 | 10 | 18 | 142 | 255 | 255 | -# +--------+------------+------------+------------+------------+------------+ -# | 100 | 8 | 11 | 49 | 143 | 255 | -# +--------+------------+------------+------------+------------+------------+ -# -# NOTE: The above table was obtained by running the following commands: -# -# redis-benchmark -n 1000000 incr foo -# redis-cli object freq foo -# -# NOTE 2: The counter initial value is 5 in order to give new objects a chance -# to accumulate hits. -# -# The counter decay time is the time, in minutes, that must elapse in order -# for the key counter to be divided by two (or decremented if it has a value -# less <= 10). -# -# The default value for the lfu-decay-time is 1. A Special value of 0 means to -# decay the counter every time it happens to be scanned. -# -# lfu-log-factor 10 -# lfu-decay-time 1 - -########################### ACTIVE DEFRAGMENTATION ####################### -# -# WARNING THIS FEATURE IS EXPERIMENTAL. However it was stress tested -# even in production and manually tested by multiple engineers for some -# time. -# -# What is active defragmentation? -# ------------------------------- -# -# Active (online) defragmentation allows a Redis server to compact the -# spaces left between small allocations and deallocations of data in memory, -# thus allowing to reclaim back memory. -# -# Fragmentation is a natural process that happens with every allocator (but -# less so with Jemalloc, fortunately) and certain workloads. Normally a server -# restart is needed in order to lower the fragmentation, or at least to flush -# away all the data and create it again. However thanks to this feature -# implemented by Oran Agra for Redis 4.0 this process can happen at runtime -# in an "hot" way, while the server is running. -# -# Basically when the fragmentation is over a certain level (see the -# configuration options below) Redis will start to create new copies of the -# values in contiguous memory regions by exploiting certain specific Jemalloc -# features (in order to understand if an allocation is causing fragmentation -# and to allocate it in a better place), and at the same time, will release the -# old copies of the data. This process, repeated incrementally for all the keys -# will cause the fragmentation to drop back to normal values. -# -# Important things to understand: -# -# 1. This feature is disabled by default, and only works if you compiled Redis -# to use the copy of Jemalloc we ship with the source code of Redis. -# This is the default with Linux builds. -# -# 2. You never need to enable this feature if you don't have fragmentation -# issues. -# -# 3. Once you experience fragmentation, you can enable this feature when -# needed with the command "CONFIG SET activedefrag yes". -# -# The configuration parameters are able to fine tune the behavior of the -# defragmentation process. If you are not sure about what they mean it is -# a good idea to leave the defaults untouched. - -# Enabled active defragmentation -# activedefrag yes - -# Minimum amount of fragmentation waste to start active defrag -# active-defrag-ignore-bytes 100mb - -# Minimum percentage of fragmentation to start active defrag -# active-defrag-threshold-lower 10 - -# Maximum percentage of fragmentation at which we use maximum effort -# active-defrag-threshold-upper 100 - -# Minimal effort for defrag in CPU percentage -# active-defrag-cycle-min 25 - -# Maximal effort for defrag in CPU percentage -# active-defrag-cycle-max 75 diff --git a/aoe-web-backend/docker/h5p.zip b/aoe-web-backend/docker/h5p.zip new file mode 100644 index 000000000..c01d2cc8a Binary files /dev/null and b/aoe-web-backend/docker/h5p.zip differ diff --git a/aoe-web-backend/package-lock.json b/aoe-web-backend/package-lock.json index cf154573f..e372c6ac8 100644 --- a/aoe-web-backend/package-lock.json +++ b/aoe-web-backend/package-lock.json @@ -9,10 +9,11 @@ "version": "11.2.0", "license": "MIT", "dependencies": { - "@elastic/elasticsearch": "^7.9.1", "@lumieducation/h5p-express": "^9.3.2", "@lumieducation/h5p-server": "^9.3.2", + "@opensearch-project/opensearch": "^2.13.0", "@types/cookie-session": "^2.0.49", + "aws-msk-iam-sasl-signer-js": "^1.0.0", "aws-sdk": "^2.789.0", "bluebird": "^3.7.2", "body-parser": "^1.19.0", @@ -316,6 +317,574 @@ "node": ">=14.0.0" } }, + "node_modules/@aws-sdk/client-cognito-identity": { + "version": "3.699.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-cognito-identity/-/client-cognito-identity-3.699.0.tgz", + "integrity": "sha512-9tFt+we6AIvj/f1+nrLHuCWcQmyfux5gcBSOy9d9+zIG56YxGEX7S9TaZnybogpVV8A0BYWml36WvIHS9QjIpA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/sha256-browser": "5.2.0", + "@aws-crypto/sha256-js": "5.2.0", + "@aws-sdk/client-sso-oidc": "3.699.0", + "@aws-sdk/client-sts": "3.699.0", + "@aws-sdk/core": "3.696.0", + "@aws-sdk/credential-provider-node": "3.699.0", + "@aws-sdk/middleware-host-header": "3.696.0", + "@aws-sdk/middleware-logger": "3.696.0", + "@aws-sdk/middleware-recursion-detection": "3.696.0", + "@aws-sdk/middleware-user-agent": "3.696.0", + "@aws-sdk/region-config-resolver": "3.696.0", + "@aws-sdk/types": "3.696.0", + "@aws-sdk/util-endpoints": "3.696.0", + "@aws-sdk/util-user-agent-browser": "3.696.0", + "@aws-sdk/util-user-agent-node": "3.696.0", + "@smithy/config-resolver": "^3.0.12", + "@smithy/core": "^2.5.3", + "@smithy/fetch-http-handler": "^4.1.1", + "@smithy/hash-node": "^3.0.10", + "@smithy/invalid-dependency": "^3.0.10", + "@smithy/middleware-content-length": "^3.0.12", + "@smithy/middleware-endpoint": "^3.2.3", + "@smithy/middleware-retry": "^3.0.27", + "@smithy/middleware-serde": "^3.0.10", + "@smithy/middleware-stack": "^3.0.10", + "@smithy/node-config-provider": "^3.1.11", + "@smithy/node-http-handler": "^3.3.1", + "@smithy/protocol-http": "^4.1.7", + "@smithy/smithy-client": "^3.4.4", + "@smithy/types": "^3.7.1", + "@smithy/url-parser": "^3.0.10", + "@smithy/util-base64": "^3.0.0", + "@smithy/util-body-length-browser": "^3.0.0", + "@smithy/util-body-length-node": "^3.0.0", + "@smithy/util-defaults-mode-browser": "^3.0.27", + "@smithy/util-defaults-mode-node": "^3.0.27", + "@smithy/util-endpoints": "^2.1.6", + "@smithy/util-middleware": "^3.0.10", + "@smithy/util-retry": "^3.0.10", + "@smithy/util-utf8": "^3.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-cognito-identity/node_modules/@aws-sdk/client-sso": { + "version": "3.696.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-sso/-/client-sso-3.696.0.tgz", + "integrity": "sha512-q5TTkd08JS0DOkHfUL853tuArf7NrPeqoS5UOvqJho8ibV9Ak/a/HO4kNvy9Nj3cib/toHYHsQIEtecUPSUUrQ==", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/sha256-browser": "5.2.0", + "@aws-crypto/sha256-js": "5.2.0", + "@aws-sdk/core": "3.696.0", + "@aws-sdk/middleware-host-header": "3.696.0", + "@aws-sdk/middleware-logger": "3.696.0", + "@aws-sdk/middleware-recursion-detection": "3.696.0", + "@aws-sdk/middleware-user-agent": "3.696.0", + "@aws-sdk/region-config-resolver": "3.696.0", + "@aws-sdk/types": "3.696.0", + "@aws-sdk/util-endpoints": "3.696.0", + "@aws-sdk/util-user-agent-browser": "3.696.0", + "@aws-sdk/util-user-agent-node": "3.696.0", + "@smithy/config-resolver": "^3.0.12", + "@smithy/core": "^2.5.3", + "@smithy/fetch-http-handler": "^4.1.1", + "@smithy/hash-node": "^3.0.10", + "@smithy/invalid-dependency": "^3.0.10", + "@smithy/middleware-content-length": "^3.0.12", + "@smithy/middleware-endpoint": "^3.2.3", + "@smithy/middleware-retry": "^3.0.27", + "@smithy/middleware-serde": "^3.0.10", + "@smithy/middleware-stack": "^3.0.10", + "@smithy/node-config-provider": "^3.1.11", + "@smithy/node-http-handler": "^3.3.1", + "@smithy/protocol-http": "^4.1.7", + "@smithy/smithy-client": "^3.4.4", + "@smithy/types": "^3.7.1", + "@smithy/url-parser": "^3.0.10", + "@smithy/util-base64": "^3.0.0", + "@smithy/util-body-length-browser": "^3.0.0", + "@smithy/util-body-length-node": "^3.0.0", + "@smithy/util-defaults-mode-browser": "^3.0.27", + "@smithy/util-defaults-mode-node": "^3.0.27", + "@smithy/util-endpoints": "^2.1.6", + "@smithy/util-middleware": "^3.0.10", + "@smithy/util-retry": "^3.0.10", + "@smithy/util-utf8": "^3.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-cognito-identity/node_modules/@aws-sdk/client-sso-oidc": { + "version": "3.699.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-sso-oidc/-/client-sso-oidc-3.699.0.tgz", + "integrity": "sha512-u8a1GorY5D1l+4FQAf4XBUC1T10/t7neuwT21r0ymrtMFSK2a9QqVHKMoLkvavAwyhJnARSBM9/UQC797PFOFw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/sha256-browser": "5.2.0", + "@aws-crypto/sha256-js": "5.2.0", + "@aws-sdk/core": "3.696.0", + "@aws-sdk/credential-provider-node": "3.699.0", + "@aws-sdk/middleware-host-header": "3.696.0", + "@aws-sdk/middleware-logger": "3.696.0", + "@aws-sdk/middleware-recursion-detection": "3.696.0", + "@aws-sdk/middleware-user-agent": "3.696.0", + "@aws-sdk/region-config-resolver": "3.696.0", + "@aws-sdk/types": "3.696.0", + "@aws-sdk/util-endpoints": "3.696.0", + "@aws-sdk/util-user-agent-browser": "3.696.0", + "@aws-sdk/util-user-agent-node": "3.696.0", + "@smithy/config-resolver": "^3.0.12", + "@smithy/core": "^2.5.3", + "@smithy/fetch-http-handler": "^4.1.1", + "@smithy/hash-node": "^3.0.10", + "@smithy/invalid-dependency": "^3.0.10", + "@smithy/middleware-content-length": "^3.0.12", + "@smithy/middleware-endpoint": "^3.2.3", + "@smithy/middleware-retry": "^3.0.27", + "@smithy/middleware-serde": "^3.0.10", + "@smithy/middleware-stack": "^3.0.10", + "@smithy/node-config-provider": "^3.1.11", + "@smithy/node-http-handler": "^3.3.1", + "@smithy/protocol-http": "^4.1.7", + "@smithy/smithy-client": "^3.4.4", + "@smithy/types": "^3.7.1", + "@smithy/url-parser": "^3.0.10", + "@smithy/util-base64": "^3.0.0", + "@smithy/util-body-length-browser": "^3.0.0", + "@smithy/util-body-length-node": "^3.0.0", + "@smithy/util-defaults-mode-browser": "^3.0.27", + "@smithy/util-defaults-mode-node": "^3.0.27", + "@smithy/util-endpoints": "^2.1.6", + "@smithy/util-middleware": "^3.0.10", + "@smithy/util-retry": "^3.0.10", + "@smithy/util-utf8": "^3.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "@aws-sdk/client-sts": "^3.699.0" + } + }, + "node_modules/@aws-sdk/client-cognito-identity/node_modules/@aws-sdk/client-sts": { + "version": "3.699.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-sts/-/client-sts-3.699.0.tgz", + "integrity": "sha512-++lsn4x2YXsZPIzFVwv3fSUVM55ZT0WRFmPeNilYIhZClxHLmVAWKH4I55cY9ry60/aTKYjzOXkWwyBKGsGvQg==", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/sha256-browser": "5.2.0", + "@aws-crypto/sha256-js": "5.2.0", + "@aws-sdk/client-sso-oidc": "3.699.0", + "@aws-sdk/core": "3.696.0", + "@aws-sdk/credential-provider-node": "3.699.0", + "@aws-sdk/middleware-host-header": "3.696.0", + "@aws-sdk/middleware-logger": "3.696.0", + "@aws-sdk/middleware-recursion-detection": "3.696.0", + "@aws-sdk/middleware-user-agent": "3.696.0", + "@aws-sdk/region-config-resolver": "3.696.0", + "@aws-sdk/types": "3.696.0", + "@aws-sdk/util-endpoints": "3.696.0", + "@aws-sdk/util-user-agent-browser": "3.696.0", + "@aws-sdk/util-user-agent-node": "3.696.0", + "@smithy/config-resolver": "^3.0.12", + "@smithy/core": "^2.5.3", + "@smithy/fetch-http-handler": "^4.1.1", + "@smithy/hash-node": "^3.0.10", + "@smithy/invalid-dependency": "^3.0.10", + "@smithy/middleware-content-length": "^3.0.12", + "@smithy/middleware-endpoint": "^3.2.3", + "@smithy/middleware-retry": "^3.0.27", + "@smithy/middleware-serde": "^3.0.10", + "@smithy/middleware-stack": "^3.0.10", + "@smithy/node-config-provider": "^3.1.11", + "@smithy/node-http-handler": "^3.3.1", + "@smithy/protocol-http": "^4.1.7", + "@smithy/smithy-client": "^3.4.4", + "@smithy/types": "^3.7.1", + "@smithy/url-parser": "^3.0.10", + "@smithy/util-base64": "^3.0.0", + "@smithy/util-body-length-browser": "^3.0.0", + "@smithy/util-body-length-node": "^3.0.0", + "@smithy/util-defaults-mode-browser": "^3.0.27", + "@smithy/util-defaults-mode-node": "^3.0.27", + "@smithy/util-endpoints": "^2.1.6", + "@smithy/util-middleware": "^3.0.10", + "@smithy/util-retry": "^3.0.10", + "@smithy/util-utf8": "^3.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-cognito-identity/node_modules/@aws-sdk/core": { + "version": "3.696.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/core/-/core-3.696.0.tgz", + "integrity": "sha512-3c9III1k03DgvRZWg8vhVmfIXPG6hAciN9MzQTzqGngzWAELZF/WONRTRQuDFixVtarQatmLHYVw/atGeA2Byw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.696.0", + "@smithy/core": "^2.5.3", + "@smithy/node-config-provider": "^3.1.11", + "@smithy/property-provider": "^3.1.9", + "@smithy/protocol-http": "^4.1.7", + "@smithy/signature-v4": "^4.2.2", + "@smithy/smithy-client": "^3.4.4", + "@smithy/types": "^3.7.1", + "@smithy/util-middleware": "^3.0.10", + "fast-xml-parser": "4.4.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-cognito-identity/node_modules/@aws-sdk/credential-provider-env": { + "version": "3.696.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-env/-/credential-provider-env-3.696.0.tgz", + "integrity": "sha512-T9iMFnJL7YTlESLpVFT3fg1Lkb1lD+oiaIC8KMpepb01gDUBIpj9+Y+pA/cgRWW0yRxmkDXNazAE2qQTVFGJzA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.696.0", + "@aws-sdk/types": "3.696.0", + "@smithy/property-provider": "^3.1.9", + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-cognito-identity/node_modules/@aws-sdk/credential-provider-http": { + "version": "3.696.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-http/-/credential-provider-http-3.696.0.tgz", + "integrity": "sha512-GV6EbvPi2eq1+WgY/o2RFA3P7HGmnkIzCNmhwtALFlqMroLYWKE7PSeHw66Uh1dFQeVESn0/+hiUNhu1mB0emA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.696.0", + "@aws-sdk/types": "3.696.0", + "@smithy/fetch-http-handler": "^4.1.1", + "@smithy/node-http-handler": "^3.3.1", + "@smithy/property-provider": "^3.1.9", + "@smithy/protocol-http": "^4.1.7", + "@smithy/smithy-client": "^3.4.4", + "@smithy/types": "^3.7.1", + "@smithy/util-stream": "^3.3.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-cognito-identity/node_modules/@aws-sdk/credential-provider-ini": { + "version": "3.699.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.699.0.tgz", + "integrity": "sha512-dXmCqjJnKmG37Q+nLjPVu22mNkrGHY8hYoOt3Jo9R2zr5MYV7s/NHsCHr+7E+BZ+tfZYLRPeB1wkpTeHiEcdRw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.696.0", + "@aws-sdk/credential-provider-env": "3.696.0", + "@aws-sdk/credential-provider-http": "3.696.0", + "@aws-sdk/credential-provider-process": "3.696.0", + "@aws-sdk/credential-provider-sso": "3.699.0", + "@aws-sdk/credential-provider-web-identity": "3.696.0", + "@aws-sdk/types": "3.696.0", + "@smithy/credential-provider-imds": "^3.2.6", + "@smithy/property-provider": "^3.1.9", + "@smithy/shared-ini-file-loader": "^3.1.10", + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "@aws-sdk/client-sts": "^3.699.0" + } + }, + "node_modules/@aws-sdk/client-cognito-identity/node_modules/@aws-sdk/credential-provider-node": { + "version": "3.699.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-node/-/credential-provider-node-3.699.0.tgz", + "integrity": "sha512-MmEmNDo1bBtTgRmdNfdQksXu4uXe66s0p1hi1YPrn1h59Q605eq/xiWbGL6/3KdkViH6eGUuABeV2ODld86ylg==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/credential-provider-env": "3.696.0", + "@aws-sdk/credential-provider-http": "3.696.0", + "@aws-sdk/credential-provider-ini": "3.699.0", + "@aws-sdk/credential-provider-process": "3.696.0", + "@aws-sdk/credential-provider-sso": "3.699.0", + "@aws-sdk/credential-provider-web-identity": "3.696.0", + "@aws-sdk/types": "3.696.0", + "@smithy/credential-provider-imds": "^3.2.6", + "@smithy/property-provider": "^3.1.9", + "@smithy/shared-ini-file-loader": "^3.1.10", + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-cognito-identity/node_modules/@aws-sdk/credential-provider-process": { + "version": "3.696.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-process/-/credential-provider-process-3.696.0.tgz", + "integrity": "sha512-mL1RcFDe9sfmyU5K1nuFkO8UiJXXxLX4JO1gVaDIOvPqwStpUAwi3A1BoeZhWZZNQsiKI810RnYGo0E0WB/hUA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.696.0", + "@aws-sdk/types": "3.696.0", + "@smithy/property-provider": "^3.1.9", + "@smithy/shared-ini-file-loader": "^3.1.10", + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-cognito-identity/node_modules/@aws-sdk/credential-provider-sso": { + "version": "3.699.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.699.0.tgz", + "integrity": "sha512-Ekp2cZG4pl9D8+uKWm4qO1xcm8/MeiI8f+dnlZm8aQzizeC+aXYy9GyoclSf6daK8KfRPiRfM7ZHBBL5dAfdMA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/client-sso": "3.696.0", + "@aws-sdk/core": "3.696.0", + "@aws-sdk/token-providers": "3.699.0", + "@aws-sdk/types": "3.696.0", + "@smithy/property-provider": "^3.1.9", + "@smithy/shared-ini-file-loader": "^3.1.10", + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-cognito-identity/node_modules/@aws-sdk/credential-provider-web-identity": { + "version": "3.696.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-web-identity/-/credential-provider-web-identity-3.696.0.tgz", + "integrity": "sha512-XJ/CVlWChM0VCoc259vWguFUjJDn/QwDqHwbx+K9cg3v6yrqXfK5ai+p/6lx0nQpnk4JzPVeYYxWRpaTsGC9rg==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.696.0", + "@aws-sdk/types": "3.696.0", + "@smithy/property-provider": "^3.1.9", + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "@aws-sdk/client-sts": "^3.696.0" + } + }, + "node_modules/@aws-sdk/client-cognito-identity/node_modules/@aws-sdk/middleware-host-header": { + "version": "3.696.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-host-header/-/middleware-host-header-3.696.0.tgz", + "integrity": "sha512-zELJp9Ta2zkX7ELggMN9qMCgekqZhFC5V2rOr4hJDEb/Tte7gpfKSObAnw/3AYiVqt36sjHKfdkoTsuwGdEoDg==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.696.0", + "@smithy/protocol-http": "^4.1.7", + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-cognito-identity/node_modules/@aws-sdk/middleware-logger": { + "version": "3.696.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-logger/-/middleware-logger-3.696.0.tgz", + "integrity": "sha512-KhkHt+8AjCxcR/5Zp3++YPJPpFQzxpr+jmONiT/Jw2yqnSngZ0Yspm5wGoRx2hS1HJbyZNuaOWEGuJoxLeBKfA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.696.0", + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-cognito-identity/node_modules/@aws-sdk/middleware-recursion-detection": { + "version": "3.696.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-recursion-detection/-/middleware-recursion-detection-3.696.0.tgz", + "integrity": "sha512-si/maV3Z0hH7qa99f9ru2xpS5HlfSVcasRlNUXKSDm611i7jFMWwGNLUOXFAOLhXotPX5G3Z6BLwL34oDeBMug==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.696.0", + "@smithy/protocol-http": "^4.1.7", + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-cognito-identity/node_modules/@aws-sdk/middleware-user-agent": { + "version": "3.696.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-user-agent/-/middleware-user-agent-3.696.0.tgz", + "integrity": "sha512-Lvyj8CTyxrHI6GHd2YVZKIRI5Fmnugt3cpJo0VrKKEgK5zMySwEZ1n4dqPK6czYRWKd5+WnYHYAuU+Wdk6Jsjw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.696.0", + "@aws-sdk/types": "3.696.0", + "@aws-sdk/util-endpoints": "3.696.0", + "@smithy/core": "^2.5.3", + "@smithy/protocol-http": "^4.1.7", + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-cognito-identity/node_modules/@aws-sdk/region-config-resolver": { + "version": "3.696.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/region-config-resolver/-/region-config-resolver-3.696.0.tgz", + "integrity": "sha512-7EuH142lBXjI8yH6dVS/CZeiK/WZsmb/8zP6bQbVYpMrppSTgB3MzZZdxVZGzL5r8zPQOU10wLC4kIMy0qdBVQ==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.696.0", + "@smithy/node-config-provider": "^3.1.11", + "@smithy/types": "^3.7.1", + "@smithy/util-config-provider": "^3.0.0", + "@smithy/util-middleware": "^3.0.10", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-cognito-identity/node_modules/@aws-sdk/token-providers": { + "version": "3.699.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/token-providers/-/token-providers-3.699.0.tgz", + "integrity": "sha512-kuiEW9DWs7fNos/SM+y58HCPhcIzm1nEZLhe2/7/6+TvAYLuEWURYsbK48gzsxXlaJ2k/jGY3nIsA7RptbMOwA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.696.0", + "@smithy/property-provider": "^3.1.9", + "@smithy/shared-ini-file-loader": "^3.1.10", + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "@aws-sdk/client-sso-oidc": "^3.699.0" + } + }, + "node_modules/@aws-sdk/client-cognito-identity/node_modules/@aws-sdk/types": { + "version": "3.696.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.696.0.tgz", + "integrity": "sha512-9rTvUJIAj5d3//U5FDPWGJ1nFJLuWb30vugGOrWk7aNZ6y9tuA3PI7Cc9dP8WEXKVyK1vuuk8rSFP2iqXnlgrw==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-cognito-identity/node_modules/@aws-sdk/util-endpoints": { + "version": "3.696.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-endpoints/-/util-endpoints-3.696.0.tgz", + "integrity": "sha512-T5s0IlBVX+gkb9g/I6CLt4yAZVzMSiGnbUqWihWsHvQR1WOoIcndQy/Oz/IJXT9T2ipoy7a80gzV6a5mglrioA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.696.0", + "@smithy/types": "^3.7.1", + "@smithy/util-endpoints": "^2.1.6", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-cognito-identity/node_modules/@aws-sdk/util-user-agent-browser": { + "version": "3.696.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-browser/-/util-user-agent-browser-3.696.0.tgz", + "integrity": "sha512-Z5rVNDdmPOe6ELoM5AhF/ja5tSjbe6ctSctDPb0JdDf4dT0v2MfwhJKzXju2RzX8Es/77Glh7MlaXLE0kCB9+Q==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.696.0", + "@smithy/types": "^3.7.1", + "bowser": "^2.11.0", + "tslib": "^2.6.2" + } + }, + "node_modules/@aws-sdk/client-cognito-identity/node_modules/@aws-sdk/util-user-agent-node": { + "version": "3.696.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-node/-/util-user-agent-node-3.696.0.tgz", + "integrity": "sha512-KhKqcfyXIB0SCCt+qsu4eJjsfiOrNzK5dCV7RAW2YIpp+msxGUUX0NdRE9rkzjiv+3EMktgJm3eEIS+yxtlVdQ==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/middleware-user-agent": "3.696.0", + "@aws-sdk/types": "3.696.0", + "@smithy/node-config-provider": "^3.1.11", + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "aws-crt": ">=1.0.0" + }, + "peerDependenciesMeta": { + "aws-crt": { + "optional": true + } + } + }, + "node_modules/@aws-sdk/client-cognito-identity/node_modules/@smithy/fetch-http-handler": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/@smithy/fetch-http-handler/-/fetch-http-handler-4.1.1.tgz", + "integrity": "sha512-bH7QW0+JdX0bPBadXt8GwMof/jz0H28I84hU1Uet9ISpzUqXqRQ3fEZJ+ANPOhzSEczYvANNl3uDQDYArSFDtA==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/protocol-http": "^4.1.7", + "@smithy/querystring-builder": "^3.0.10", + "@smithy/types": "^3.7.1", + "@smithy/util-base64": "^3.0.0", + "tslib": "^2.6.2" + } + }, + "node_modules/@aws-sdk/client-cognito-identity/node_modules/@smithy/types": { + "version": "3.7.1", + "resolved": "https://registry.npmjs.org/@smithy/types/-/types-3.7.1.tgz", + "integrity": "sha512-XKLcLXZY7sUQgvvWyeaL/qwNPp6V3dWcUjqrQKjSb+tzYiCy340R/c64LV5j+Tnb2GhmunEX0eou+L+m2hJNYA==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-cognito-identity/node_modules/@smithy/util-middleware": { + "version": "3.0.10", + "resolved": "https://registry.npmjs.org/@smithy/util-middleware/-/util-middleware-3.0.10.tgz", + "integrity": "sha512-eJO+/+RsrG2RpmY68jZdwQtnfsxjmPxzMlQpnHKjFPwrYqvlcT+fHdT+ZVwcjlWSrByOhGr9Ff2GG17efc192A==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, "node_modules/@aws-sdk/client-s3": { "version": "3.678.0", "resolved": "https://registry.npmjs.org/@aws-sdk/client-s3/-/client-s3-3.678.0.tgz", @@ -555,6 +1124,47 @@ "node": ">=16.0.0" } }, + "node_modules/@aws-sdk/credential-provider-cognito-identity": { + "version": "3.699.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-cognito-identity/-/credential-provider-cognito-identity-3.699.0.tgz", + "integrity": "sha512-iuaTnudaBfEET+o444sDwf71Awe6UiZfH+ipUPmswAi2jZDwdFF1nxMKDEKL8/LV5WpXsdKSfwgS0RQeupURew==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/client-cognito-identity": "3.699.0", + "@aws-sdk/types": "3.696.0", + "@smithy/property-provider": "^3.1.9", + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-cognito-identity/node_modules/@aws-sdk/types": { + "version": "3.696.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.696.0.tgz", + "integrity": "sha512-9rTvUJIAj5d3//U5FDPWGJ1nFJLuWb30vugGOrWk7aNZ6y9tuA3PI7Cc9dP8WEXKVyK1vuuk8rSFP2iqXnlgrw==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-cognito-identity/node_modules/@smithy/types": { + "version": "3.7.1", + "resolved": "https://registry.npmjs.org/@smithy/types/-/types-3.7.1.tgz", + "integrity": "sha512-XKLcLXZY7sUQgvvWyeaL/qwNPp6V3dWcUjqrQKjSb+tzYiCy340R/c64LV5j+Tnb2GhmunEX0eou+L+m2hJNYA==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, "node_modules/@aws-sdk/credential-provider-env": { "version": "3.678.0", "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-env/-/credential-provider-env-3.678.0.tgz", @@ -676,17 +1286,561 @@ "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-web-identity/-/credential-provider-web-identity-3.678.0.tgz", "integrity": "sha512-fcYZjTTFcef99l+BhcEAhHS4tEK1kE6Xj5Zz5lT4tFA07BkQt3d6kUKRVVfJnsbcHH4RDBUCnLhU8HPfc/kvjA==", "dependencies": { - "@aws-sdk/core": "3.678.0", - "@aws-sdk/types": "3.667.0", - "@smithy/property-provider": "^3.1.7", - "@smithy/types": "^3.5.0", + "@aws-sdk/core": "3.678.0", + "@aws-sdk/types": "3.667.0", + "@smithy/property-provider": "^3.1.7", + "@smithy/types": "^3.5.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "@aws-sdk/client-sts": "^3.678.0" + } + }, + "node_modules/@aws-sdk/credential-providers": { + "version": "3.699.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-providers/-/credential-providers-3.699.0.tgz", + "integrity": "sha512-jBjOntl9zN9Nvb0jmbMGRbiTzemDz64ij7W6BDavxBJRZpRoNeN0QCz6RolkCyXnyUJjo5mF2unY2wnv00A+LQ==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/client-cognito-identity": "3.699.0", + "@aws-sdk/client-sso": "3.696.0", + "@aws-sdk/client-sts": "3.699.0", + "@aws-sdk/core": "3.696.0", + "@aws-sdk/credential-provider-cognito-identity": "3.699.0", + "@aws-sdk/credential-provider-env": "3.696.0", + "@aws-sdk/credential-provider-http": "3.696.0", + "@aws-sdk/credential-provider-ini": "3.699.0", + "@aws-sdk/credential-provider-node": "3.699.0", + "@aws-sdk/credential-provider-process": "3.696.0", + "@aws-sdk/credential-provider-sso": "3.699.0", + "@aws-sdk/credential-provider-web-identity": "3.696.0", + "@aws-sdk/types": "3.696.0", + "@smithy/credential-provider-imds": "^3.2.6", + "@smithy/property-provider": "^3.1.9", + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/credential-providers/node_modules/@aws-sdk/client-sso": { + "version": "3.696.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-sso/-/client-sso-3.696.0.tgz", + "integrity": "sha512-q5TTkd08JS0DOkHfUL853tuArf7NrPeqoS5UOvqJho8ibV9Ak/a/HO4kNvy9Nj3cib/toHYHsQIEtecUPSUUrQ==", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/sha256-browser": "5.2.0", + "@aws-crypto/sha256-js": "5.2.0", + "@aws-sdk/core": "3.696.0", + "@aws-sdk/middleware-host-header": "3.696.0", + "@aws-sdk/middleware-logger": "3.696.0", + "@aws-sdk/middleware-recursion-detection": "3.696.0", + "@aws-sdk/middleware-user-agent": "3.696.0", + "@aws-sdk/region-config-resolver": "3.696.0", + "@aws-sdk/types": "3.696.0", + "@aws-sdk/util-endpoints": "3.696.0", + "@aws-sdk/util-user-agent-browser": "3.696.0", + "@aws-sdk/util-user-agent-node": "3.696.0", + "@smithy/config-resolver": "^3.0.12", + "@smithy/core": "^2.5.3", + "@smithy/fetch-http-handler": "^4.1.1", + "@smithy/hash-node": "^3.0.10", + "@smithy/invalid-dependency": "^3.0.10", + "@smithy/middleware-content-length": "^3.0.12", + "@smithy/middleware-endpoint": "^3.2.3", + "@smithy/middleware-retry": "^3.0.27", + "@smithy/middleware-serde": "^3.0.10", + "@smithy/middleware-stack": "^3.0.10", + "@smithy/node-config-provider": "^3.1.11", + "@smithy/node-http-handler": "^3.3.1", + "@smithy/protocol-http": "^4.1.7", + "@smithy/smithy-client": "^3.4.4", + "@smithy/types": "^3.7.1", + "@smithy/url-parser": "^3.0.10", + "@smithy/util-base64": "^3.0.0", + "@smithy/util-body-length-browser": "^3.0.0", + "@smithy/util-body-length-node": "^3.0.0", + "@smithy/util-defaults-mode-browser": "^3.0.27", + "@smithy/util-defaults-mode-node": "^3.0.27", + "@smithy/util-endpoints": "^2.1.6", + "@smithy/util-middleware": "^3.0.10", + "@smithy/util-retry": "^3.0.10", + "@smithy/util-utf8": "^3.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/credential-providers/node_modules/@aws-sdk/client-sso-oidc": { + "version": "3.699.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-sso-oidc/-/client-sso-oidc-3.699.0.tgz", + "integrity": "sha512-u8a1GorY5D1l+4FQAf4XBUC1T10/t7neuwT21r0ymrtMFSK2a9QqVHKMoLkvavAwyhJnARSBM9/UQC797PFOFw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/sha256-browser": "5.2.0", + "@aws-crypto/sha256-js": "5.2.0", + "@aws-sdk/core": "3.696.0", + "@aws-sdk/credential-provider-node": "3.699.0", + "@aws-sdk/middleware-host-header": "3.696.0", + "@aws-sdk/middleware-logger": "3.696.0", + "@aws-sdk/middleware-recursion-detection": "3.696.0", + "@aws-sdk/middleware-user-agent": "3.696.0", + "@aws-sdk/region-config-resolver": "3.696.0", + "@aws-sdk/types": "3.696.0", + "@aws-sdk/util-endpoints": "3.696.0", + "@aws-sdk/util-user-agent-browser": "3.696.0", + "@aws-sdk/util-user-agent-node": "3.696.0", + "@smithy/config-resolver": "^3.0.12", + "@smithy/core": "^2.5.3", + "@smithy/fetch-http-handler": "^4.1.1", + "@smithy/hash-node": "^3.0.10", + "@smithy/invalid-dependency": "^3.0.10", + "@smithy/middleware-content-length": "^3.0.12", + "@smithy/middleware-endpoint": "^3.2.3", + "@smithy/middleware-retry": "^3.0.27", + "@smithy/middleware-serde": "^3.0.10", + "@smithy/middleware-stack": "^3.0.10", + "@smithy/node-config-provider": "^3.1.11", + "@smithy/node-http-handler": "^3.3.1", + "@smithy/protocol-http": "^4.1.7", + "@smithy/smithy-client": "^3.4.4", + "@smithy/types": "^3.7.1", + "@smithy/url-parser": "^3.0.10", + "@smithy/util-base64": "^3.0.0", + "@smithy/util-body-length-browser": "^3.0.0", + "@smithy/util-body-length-node": "^3.0.0", + "@smithy/util-defaults-mode-browser": "^3.0.27", + "@smithy/util-defaults-mode-node": "^3.0.27", + "@smithy/util-endpoints": "^2.1.6", + "@smithy/util-middleware": "^3.0.10", + "@smithy/util-retry": "^3.0.10", + "@smithy/util-utf8": "^3.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "@aws-sdk/client-sts": "^3.699.0" + } + }, + "node_modules/@aws-sdk/credential-providers/node_modules/@aws-sdk/client-sts": { + "version": "3.699.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-sts/-/client-sts-3.699.0.tgz", + "integrity": "sha512-++lsn4x2YXsZPIzFVwv3fSUVM55ZT0WRFmPeNilYIhZClxHLmVAWKH4I55cY9ry60/aTKYjzOXkWwyBKGsGvQg==", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/sha256-browser": "5.2.0", + "@aws-crypto/sha256-js": "5.2.0", + "@aws-sdk/client-sso-oidc": "3.699.0", + "@aws-sdk/core": "3.696.0", + "@aws-sdk/credential-provider-node": "3.699.0", + "@aws-sdk/middleware-host-header": "3.696.0", + "@aws-sdk/middleware-logger": "3.696.0", + "@aws-sdk/middleware-recursion-detection": "3.696.0", + "@aws-sdk/middleware-user-agent": "3.696.0", + "@aws-sdk/region-config-resolver": "3.696.0", + "@aws-sdk/types": "3.696.0", + "@aws-sdk/util-endpoints": "3.696.0", + "@aws-sdk/util-user-agent-browser": "3.696.0", + "@aws-sdk/util-user-agent-node": "3.696.0", + "@smithy/config-resolver": "^3.0.12", + "@smithy/core": "^2.5.3", + "@smithy/fetch-http-handler": "^4.1.1", + "@smithy/hash-node": "^3.0.10", + "@smithy/invalid-dependency": "^3.0.10", + "@smithy/middleware-content-length": "^3.0.12", + "@smithy/middleware-endpoint": "^3.2.3", + "@smithy/middleware-retry": "^3.0.27", + "@smithy/middleware-serde": "^3.0.10", + "@smithy/middleware-stack": "^3.0.10", + "@smithy/node-config-provider": "^3.1.11", + "@smithy/node-http-handler": "^3.3.1", + "@smithy/protocol-http": "^4.1.7", + "@smithy/smithy-client": "^3.4.4", + "@smithy/types": "^3.7.1", + "@smithy/url-parser": "^3.0.10", + "@smithy/util-base64": "^3.0.0", + "@smithy/util-body-length-browser": "^3.0.0", + "@smithy/util-body-length-node": "^3.0.0", + "@smithy/util-defaults-mode-browser": "^3.0.27", + "@smithy/util-defaults-mode-node": "^3.0.27", + "@smithy/util-endpoints": "^2.1.6", + "@smithy/util-middleware": "^3.0.10", + "@smithy/util-retry": "^3.0.10", + "@smithy/util-utf8": "^3.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/credential-providers/node_modules/@aws-sdk/core": { + "version": "3.696.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/core/-/core-3.696.0.tgz", + "integrity": "sha512-3c9III1k03DgvRZWg8vhVmfIXPG6hAciN9MzQTzqGngzWAELZF/WONRTRQuDFixVtarQatmLHYVw/atGeA2Byw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.696.0", + "@smithy/core": "^2.5.3", + "@smithy/node-config-provider": "^3.1.11", + "@smithy/property-provider": "^3.1.9", + "@smithy/protocol-http": "^4.1.7", + "@smithy/signature-v4": "^4.2.2", + "@smithy/smithy-client": "^3.4.4", + "@smithy/types": "^3.7.1", + "@smithy/util-middleware": "^3.0.10", + "fast-xml-parser": "4.4.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/credential-providers/node_modules/@aws-sdk/credential-provider-env": { + "version": "3.696.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-env/-/credential-provider-env-3.696.0.tgz", + "integrity": "sha512-T9iMFnJL7YTlESLpVFT3fg1Lkb1lD+oiaIC8KMpepb01gDUBIpj9+Y+pA/cgRWW0yRxmkDXNazAE2qQTVFGJzA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.696.0", + "@aws-sdk/types": "3.696.0", + "@smithy/property-provider": "^3.1.9", + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/credential-providers/node_modules/@aws-sdk/credential-provider-http": { + "version": "3.696.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-http/-/credential-provider-http-3.696.0.tgz", + "integrity": "sha512-GV6EbvPi2eq1+WgY/o2RFA3P7HGmnkIzCNmhwtALFlqMroLYWKE7PSeHw66Uh1dFQeVESn0/+hiUNhu1mB0emA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.696.0", + "@aws-sdk/types": "3.696.0", + "@smithy/fetch-http-handler": "^4.1.1", + "@smithy/node-http-handler": "^3.3.1", + "@smithy/property-provider": "^3.1.9", + "@smithy/protocol-http": "^4.1.7", + "@smithy/smithy-client": "^3.4.4", + "@smithy/types": "^3.7.1", + "@smithy/util-stream": "^3.3.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/credential-providers/node_modules/@aws-sdk/credential-provider-ini": { + "version": "3.699.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.699.0.tgz", + "integrity": "sha512-dXmCqjJnKmG37Q+nLjPVu22mNkrGHY8hYoOt3Jo9R2zr5MYV7s/NHsCHr+7E+BZ+tfZYLRPeB1wkpTeHiEcdRw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.696.0", + "@aws-sdk/credential-provider-env": "3.696.0", + "@aws-sdk/credential-provider-http": "3.696.0", + "@aws-sdk/credential-provider-process": "3.696.0", + "@aws-sdk/credential-provider-sso": "3.699.0", + "@aws-sdk/credential-provider-web-identity": "3.696.0", + "@aws-sdk/types": "3.696.0", + "@smithy/credential-provider-imds": "^3.2.6", + "@smithy/property-provider": "^3.1.9", + "@smithy/shared-ini-file-loader": "^3.1.10", + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "@aws-sdk/client-sts": "^3.699.0" + } + }, + "node_modules/@aws-sdk/credential-providers/node_modules/@aws-sdk/credential-provider-node": { + "version": "3.699.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-node/-/credential-provider-node-3.699.0.tgz", + "integrity": "sha512-MmEmNDo1bBtTgRmdNfdQksXu4uXe66s0p1hi1YPrn1h59Q605eq/xiWbGL6/3KdkViH6eGUuABeV2ODld86ylg==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/credential-provider-env": "3.696.0", + "@aws-sdk/credential-provider-http": "3.696.0", + "@aws-sdk/credential-provider-ini": "3.699.0", + "@aws-sdk/credential-provider-process": "3.696.0", + "@aws-sdk/credential-provider-sso": "3.699.0", + "@aws-sdk/credential-provider-web-identity": "3.696.0", + "@aws-sdk/types": "3.696.0", + "@smithy/credential-provider-imds": "^3.2.6", + "@smithy/property-provider": "^3.1.9", + "@smithy/shared-ini-file-loader": "^3.1.10", + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/credential-providers/node_modules/@aws-sdk/credential-provider-process": { + "version": "3.696.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-process/-/credential-provider-process-3.696.0.tgz", + "integrity": "sha512-mL1RcFDe9sfmyU5K1nuFkO8UiJXXxLX4JO1gVaDIOvPqwStpUAwi3A1BoeZhWZZNQsiKI810RnYGo0E0WB/hUA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.696.0", + "@aws-sdk/types": "3.696.0", + "@smithy/property-provider": "^3.1.9", + "@smithy/shared-ini-file-loader": "^3.1.10", + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/credential-providers/node_modules/@aws-sdk/credential-provider-sso": { + "version": "3.699.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.699.0.tgz", + "integrity": "sha512-Ekp2cZG4pl9D8+uKWm4qO1xcm8/MeiI8f+dnlZm8aQzizeC+aXYy9GyoclSf6daK8KfRPiRfM7ZHBBL5dAfdMA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/client-sso": "3.696.0", + "@aws-sdk/core": "3.696.0", + "@aws-sdk/token-providers": "3.699.0", + "@aws-sdk/types": "3.696.0", + "@smithy/property-provider": "^3.1.9", + "@smithy/shared-ini-file-loader": "^3.1.10", + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/credential-providers/node_modules/@aws-sdk/credential-provider-web-identity": { + "version": "3.696.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-web-identity/-/credential-provider-web-identity-3.696.0.tgz", + "integrity": "sha512-XJ/CVlWChM0VCoc259vWguFUjJDn/QwDqHwbx+K9cg3v6yrqXfK5ai+p/6lx0nQpnk4JzPVeYYxWRpaTsGC9rg==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.696.0", + "@aws-sdk/types": "3.696.0", + "@smithy/property-provider": "^3.1.9", + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "@aws-sdk/client-sts": "^3.696.0" + } + }, + "node_modules/@aws-sdk/credential-providers/node_modules/@aws-sdk/middleware-host-header": { + "version": "3.696.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-host-header/-/middleware-host-header-3.696.0.tgz", + "integrity": "sha512-zELJp9Ta2zkX7ELggMN9qMCgekqZhFC5V2rOr4hJDEb/Tte7gpfKSObAnw/3AYiVqt36sjHKfdkoTsuwGdEoDg==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.696.0", + "@smithy/protocol-http": "^4.1.7", + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/credential-providers/node_modules/@aws-sdk/middleware-logger": { + "version": "3.696.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-logger/-/middleware-logger-3.696.0.tgz", + "integrity": "sha512-KhkHt+8AjCxcR/5Zp3++YPJPpFQzxpr+jmONiT/Jw2yqnSngZ0Yspm5wGoRx2hS1HJbyZNuaOWEGuJoxLeBKfA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.696.0", + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/credential-providers/node_modules/@aws-sdk/middleware-recursion-detection": { + "version": "3.696.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-recursion-detection/-/middleware-recursion-detection-3.696.0.tgz", + "integrity": "sha512-si/maV3Z0hH7qa99f9ru2xpS5HlfSVcasRlNUXKSDm611i7jFMWwGNLUOXFAOLhXotPX5G3Z6BLwL34oDeBMug==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.696.0", + "@smithy/protocol-http": "^4.1.7", + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/credential-providers/node_modules/@aws-sdk/middleware-user-agent": { + "version": "3.696.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-user-agent/-/middleware-user-agent-3.696.0.tgz", + "integrity": "sha512-Lvyj8CTyxrHI6GHd2YVZKIRI5Fmnugt3cpJo0VrKKEgK5zMySwEZ1n4dqPK6czYRWKd5+WnYHYAuU+Wdk6Jsjw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.696.0", + "@aws-sdk/types": "3.696.0", + "@aws-sdk/util-endpoints": "3.696.0", + "@smithy/core": "^2.5.3", + "@smithy/protocol-http": "^4.1.7", + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/credential-providers/node_modules/@aws-sdk/region-config-resolver": { + "version": "3.696.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/region-config-resolver/-/region-config-resolver-3.696.0.tgz", + "integrity": "sha512-7EuH142lBXjI8yH6dVS/CZeiK/WZsmb/8zP6bQbVYpMrppSTgB3MzZZdxVZGzL5r8zPQOU10wLC4kIMy0qdBVQ==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.696.0", + "@smithy/node-config-provider": "^3.1.11", + "@smithy/types": "^3.7.1", + "@smithy/util-config-provider": "^3.0.0", + "@smithy/util-middleware": "^3.0.10", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/credential-providers/node_modules/@aws-sdk/token-providers": { + "version": "3.699.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/token-providers/-/token-providers-3.699.0.tgz", + "integrity": "sha512-kuiEW9DWs7fNos/SM+y58HCPhcIzm1nEZLhe2/7/6+TvAYLuEWURYsbK48gzsxXlaJ2k/jGY3nIsA7RptbMOwA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.696.0", + "@smithy/property-provider": "^3.1.9", + "@smithy/shared-ini-file-loader": "^3.1.10", + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "@aws-sdk/client-sso-oidc": "^3.699.0" + } + }, + "node_modules/@aws-sdk/credential-providers/node_modules/@aws-sdk/types": { + "version": "3.696.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.696.0.tgz", + "integrity": "sha512-9rTvUJIAj5d3//U5FDPWGJ1nFJLuWb30vugGOrWk7aNZ6y9tuA3PI7Cc9dP8WEXKVyK1vuuk8rSFP2iqXnlgrw==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/credential-providers/node_modules/@aws-sdk/util-endpoints": { + "version": "3.696.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-endpoints/-/util-endpoints-3.696.0.tgz", + "integrity": "sha512-T5s0IlBVX+gkb9g/I6CLt4yAZVzMSiGnbUqWihWsHvQR1WOoIcndQy/Oz/IJXT9T2ipoy7a80gzV6a5mglrioA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.696.0", + "@smithy/types": "^3.7.1", + "@smithy/util-endpoints": "^2.1.6", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/credential-providers/node_modules/@aws-sdk/util-user-agent-browser": { + "version": "3.696.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-browser/-/util-user-agent-browser-3.696.0.tgz", + "integrity": "sha512-Z5rVNDdmPOe6ELoM5AhF/ja5tSjbe6ctSctDPb0JdDf4dT0v2MfwhJKzXju2RzX8Es/77Glh7MlaXLE0kCB9+Q==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.696.0", + "@smithy/types": "^3.7.1", + "bowser": "^2.11.0", + "tslib": "^2.6.2" + } + }, + "node_modules/@aws-sdk/credential-providers/node_modules/@aws-sdk/util-user-agent-node": { + "version": "3.696.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-node/-/util-user-agent-node-3.696.0.tgz", + "integrity": "sha512-KhKqcfyXIB0SCCt+qsu4eJjsfiOrNzK5dCV7RAW2YIpp+msxGUUX0NdRE9rkzjiv+3EMktgJm3eEIS+yxtlVdQ==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/middleware-user-agent": "3.696.0", + "@aws-sdk/types": "3.696.0", + "@smithy/node-config-provider": "^3.1.11", + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "aws-crt": ">=1.0.0" + }, + "peerDependenciesMeta": { + "aws-crt": { + "optional": true + } + } + }, + "node_modules/@aws-sdk/credential-providers/node_modules/@smithy/fetch-http-handler": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/@smithy/fetch-http-handler/-/fetch-http-handler-4.1.1.tgz", + "integrity": "sha512-bH7QW0+JdX0bPBadXt8GwMof/jz0H28I84hU1Uet9ISpzUqXqRQ3fEZJ+ANPOhzSEczYvANNl3uDQDYArSFDtA==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/protocol-http": "^4.1.7", + "@smithy/querystring-builder": "^3.0.10", + "@smithy/types": "^3.7.1", + "@smithy/util-base64": "^3.0.0", + "tslib": "^2.6.2" + } + }, + "node_modules/@aws-sdk/credential-providers/node_modules/@smithy/types": { + "version": "3.7.1", + "resolved": "https://registry.npmjs.org/@smithy/types/-/types-3.7.1.tgz", + "integrity": "sha512-XKLcLXZY7sUQgvvWyeaL/qwNPp6V3dWcUjqrQKjSb+tzYiCy340R/c64LV5j+Tnb2GhmunEX0eou+L+m2hJNYA==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/credential-providers/node_modules/@smithy/util-middleware": { + "version": "3.0.10", + "resolved": "https://registry.npmjs.org/@smithy/util-middleware/-/util-middleware-3.0.10.tgz", + "integrity": "sha512-eJO+/+RsrG2RpmY68jZdwQtnfsxjmPxzMlQpnHKjFPwrYqvlcT+fHdT+ZVwcjlWSrByOhGr9Ff2GG17efc192A==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^3.7.1", "tslib": "^2.6.2" }, "engines": { "node": ">=16.0.0" - }, - "peerDependencies": { - "@aws-sdk/client-sts": "^3.678.0" } }, "node_modules/@aws-sdk/middleware-bucket-endpoint": { @@ -936,6 +2090,46 @@ "node": ">=16.0.0" } }, + "node_modules/@aws-sdk/util-format-url": { + "version": "3.696.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-format-url/-/util-format-url-3.696.0.tgz", + "integrity": "sha512-R6yK1LozUD1GdAZRPhNsIow6VNFJUTyyoIar1OCWaknlucBMcq7musF3DN3TlORBwfFMj5buHc2ET9OtMtzvuA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.696.0", + "@smithy/querystring-builder": "^3.0.10", + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/util-format-url/node_modules/@aws-sdk/types": { + "version": "3.696.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.696.0.tgz", + "integrity": "sha512-9rTvUJIAj5d3//U5FDPWGJ1nFJLuWb30vugGOrWk7aNZ6y9tuA3PI7Cc9dP8WEXKVyK1vuuk8rSFP2iqXnlgrw==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/util-format-url/node_modules/@smithy/types": { + "version": "3.7.1", + "resolved": "https://registry.npmjs.org/@smithy/types/-/types-3.7.1.tgz", + "integrity": "sha512-XKLcLXZY7sUQgvvWyeaL/qwNPp6V3dWcUjqrQKjSb+tzYiCy340R/c64LV5j+Tnb2GhmunEX0eou+L+m2hJNYA==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, "node_modules/@aws-sdk/util-locate-window": { "version": "3.568.0", "resolved": "https://registry.npmjs.org/@aws-sdk/util-locate-window/-/util-locate-window-3.568.0.tgz", @@ -981,6 +2175,15 @@ } } }, + "node_modules/@aws-sdk/util-utf8-browser": { + "version": "3.259.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-utf8-browser/-/util-utf8-browser-3.259.0.tgz", + "integrity": "sha512-UvFa/vR+e19XookZF8RzFZBrw2EUkQWxiBW0yYQAhvk3C+QVGl0H3ouca8LDBlBfQKXwmW3huo/59H8rwb1wJw==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.3.1" + } + }, "node_modules/@aws-sdk/xml-builder": { "version": "3.662.0", "resolved": "https://registry.npmjs.org/@aws-sdk/xml-builder/-/xml-builder-3.662.0.tgz", @@ -1737,20 +2940,6 @@ "kuler": "^2.0.0" } }, - "node_modules/@elastic/elasticsearch": { - "version": "7.17.14", - "resolved": "https://registry.npmjs.org/@elastic/elasticsearch/-/elasticsearch-7.17.14.tgz", - "integrity": "sha512-6uQ1pVXutwz1Krwooo67W+3K8BwH1ASMh1WoHTpomUzw8EXecXN5lHIJ9EPqTHuv1WqR2LKkSJyagcq0HYUJpg==", - "dependencies": { - "debug": "^4.3.1", - "hpagent": "^0.1.1", - "ms": "^2.1.3", - "secure-json-parse": "^2.4.0" - }, - "engines": { - "node": ">=12" - } - }, "node_modules/@eslint-community/eslint-utils": { "version": "4.4.0", "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.4.0.tgz", @@ -2786,6 +3975,33 @@ "node": ">= 8" } }, + "node_modules/@opensearch-project/opensearch": { + "version": "2.13.0", + "resolved": "https://registry.npmjs.org/@opensearch-project/opensearch/-/opensearch-2.13.0.tgz", + "integrity": "sha512-Bu3jJ7pKzumbMMeefu7/npAWAvFu5W9SlbBow1ulhluqUpqc7QoXe0KidDrMy7Dy3BQrkI6llR3cWL4lQTZOFw==", + "license": "Apache-2.0", + "dependencies": { + "aws4": "^1.11.0", + "debug": "^4.3.1", + "hpagent": "^1.2.0", + "json11": "^2.0.0", + "ms": "^2.1.3", + "secure-json-parse": "^2.4.0" + }, + "engines": { + "node": ">=10", + "yarn": "^1.22.10" + } + }, + "node_modules/@opensearch-project/opensearch/node_modules/hpagent": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/hpagent/-/hpagent-1.2.0.tgz", + "integrity": "sha512-A91dYTeIB6NoXG+PxTQpCCDDnfHsW9kc06Lvpu1TEe9gnd6ZFeiBoRO9JvzEv6xK7EX97/dUE8g/vBMTqTS3CA==", + "license": "MIT", + "engines": { + "node": ">=14" + } + }, "node_modules/@panva/asn1.js": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/@panva/asn1.js/-/asn1.js-1.0.0.tgz", @@ -2883,11 +4099,24 @@ } }, "node_modules/@smithy/abort-controller": { - "version": "3.1.6", - "resolved": "https://registry.npmjs.org/@smithy/abort-controller/-/abort-controller-3.1.6.tgz", - "integrity": "sha512-0XuhuHQlEqbNQZp7QxxrFTdVWdwxch4vjxYgfInF91hZFkPxf9QDrdQka0KfxFMPqLNzSw0b95uGTrLliQUavQ==", + "version": "3.1.8", + "resolved": "https://registry.npmjs.org/@smithy/abort-controller/-/abort-controller-3.1.8.tgz", + "integrity": "sha512-+3DOBcUn5/rVjlxGvUPKc416SExarAQ+Qe0bqk30YSUjbepwpS7QN0cyKUSifvLJhdMZ0WPzPP5ymut0oonrpQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@smithy/abort-controller/node_modules/@smithy/types": { + "version": "3.7.1", + "resolved": "https://registry.npmjs.org/@smithy/types/-/types-3.7.1.tgz", + "integrity": "sha512-XKLcLXZY7sUQgvvWyeaL/qwNPp6V3dWcUjqrQKjSb+tzYiCy340R/c64LV5j+Tnb2GhmunEX0eou+L+m2hJNYA==", + "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^3.6.0", "tslib": "^2.6.2" }, "engines": { @@ -2912,31 +4141,58 @@ } }, "node_modules/@smithy/config-resolver": { - "version": "3.0.10", - "resolved": "https://registry.npmjs.org/@smithy/config-resolver/-/config-resolver-3.0.10.tgz", - "integrity": "sha512-Uh0Sz9gdUuz538nvkPiyv1DZRX9+D15EKDtnQP5rYVAzM/dnYk3P8cg73jcxyOitPgT3mE3OVj7ky7sibzHWkw==", + "version": "3.0.12", + "resolved": "https://registry.npmjs.org/@smithy/config-resolver/-/config-resolver-3.0.12.tgz", + "integrity": "sha512-YAJP9UJFZRZ8N+UruTeq78zkdjUHmzsY62J4qKWZ4SXB4QXJ/+680EfXXgkYA2xj77ooMqtUY9m406zGNqwivQ==", + "license": "Apache-2.0", "dependencies": { - "@smithy/node-config-provider": "^3.1.9", - "@smithy/types": "^3.6.0", + "@smithy/node-config-provider": "^3.1.11", + "@smithy/types": "^3.7.1", "@smithy/util-config-provider": "^3.0.0", - "@smithy/util-middleware": "^3.0.8", + "@smithy/util-middleware": "^3.0.10", "tslib": "^2.6.2" }, "engines": { "node": ">=16.0.0" } }, - "node_modules/@smithy/core": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/@smithy/core/-/core-2.5.1.tgz", - "integrity": "sha512-DujtuDA7BGEKExJ05W5OdxCoyekcKT3Rhg1ZGeiUWaz2BJIWXjZmsG/DIP4W48GHno7AQwRsaCb8NcBgH3QZpg==", + "node_modules/@smithy/config-resolver/node_modules/@smithy/types": { + "version": "3.7.1", + "resolved": "https://registry.npmjs.org/@smithy/types/-/types-3.7.1.tgz", + "integrity": "sha512-XKLcLXZY7sUQgvvWyeaL/qwNPp6V3dWcUjqrQKjSb+tzYiCy340R/c64LV5j+Tnb2GhmunEX0eou+L+m2hJNYA==", + "license": "Apache-2.0", "dependencies": { - "@smithy/middleware-serde": "^3.0.8", - "@smithy/protocol-http": "^4.1.5", - "@smithy/types": "^3.6.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@smithy/config-resolver/node_modules/@smithy/util-middleware": { + "version": "3.0.10", + "resolved": "https://registry.npmjs.org/@smithy/util-middleware/-/util-middleware-3.0.10.tgz", + "integrity": "sha512-eJO+/+RsrG2RpmY68jZdwQtnfsxjmPxzMlQpnHKjFPwrYqvlcT+fHdT+ZVwcjlWSrByOhGr9Ff2GG17efc192A==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@smithy/core": { + "version": "2.5.4", + "resolved": "https://registry.npmjs.org/@smithy/core/-/core-2.5.4.tgz", + "integrity": "sha512-iFh2Ymn2sCziBRLPuOOxRPkuCx/2gBdXtBGuCUFLUe6bWYjKnhHyIPqGeNkLZ5Aco/5GjebRTBFiWID3sDbrKw==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/middleware-serde": "^3.0.10", + "@smithy/protocol-http": "^4.1.7", + "@smithy/types": "^3.7.1", "@smithy/util-body-length-browser": "^3.0.0", - "@smithy/util-middleware": "^3.0.8", - "@smithy/util-stream": "^3.2.1", + "@smithy/util-middleware": "^3.0.10", + "@smithy/util-stream": "^3.3.1", "@smithy/util-utf8": "^3.0.0", "tslib": "^2.6.2" }, @@ -2944,15 +4200,53 @@ "node": ">=16.0.0" } }, + "node_modules/@smithy/core/node_modules/@smithy/types": { + "version": "3.7.1", + "resolved": "https://registry.npmjs.org/@smithy/types/-/types-3.7.1.tgz", + "integrity": "sha512-XKLcLXZY7sUQgvvWyeaL/qwNPp6V3dWcUjqrQKjSb+tzYiCy340R/c64LV5j+Tnb2GhmunEX0eou+L+m2hJNYA==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@smithy/core/node_modules/@smithy/util-middleware": { + "version": "3.0.10", + "resolved": "https://registry.npmjs.org/@smithy/util-middleware/-/util-middleware-3.0.10.tgz", + "integrity": "sha512-eJO+/+RsrG2RpmY68jZdwQtnfsxjmPxzMlQpnHKjFPwrYqvlcT+fHdT+ZVwcjlWSrByOhGr9Ff2GG17efc192A==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, "node_modules/@smithy/credential-provider-imds": { - "version": "3.2.5", - "resolved": "https://registry.npmjs.org/@smithy/credential-provider-imds/-/credential-provider-imds-3.2.5.tgz", - "integrity": "sha512-4FTQGAsuwqTzVMmiRVTn0RR9GrbRfkP0wfu/tXWVHd2LgNpTY0uglQpIScXK4NaEyXbB3JmZt8gfVqO50lP8wg==", + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/@smithy/credential-provider-imds/-/credential-provider-imds-3.2.7.tgz", + "integrity": "sha512-cEfbau+rrWF8ylkmmVAObOmjbTIzKyUC5TkBL58SbLywD0RCBC4JAUKbmtSm2w5KUJNRPGgpGFMvE2FKnuNlWQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/node-config-provider": "^3.1.11", + "@smithy/property-provider": "^3.1.10", + "@smithy/types": "^3.7.1", + "@smithy/url-parser": "^3.0.10", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@smithy/credential-provider-imds/node_modules/@smithy/types": { + "version": "3.7.1", + "resolved": "https://registry.npmjs.org/@smithy/types/-/types-3.7.1.tgz", + "integrity": "sha512-XKLcLXZY7sUQgvvWyeaL/qwNPp6V3dWcUjqrQKjSb+tzYiCy340R/c64LV5j+Tnb2GhmunEX0eou+L+m2hJNYA==", + "license": "Apache-2.0", "dependencies": { - "@smithy/node-config-provider": "^3.1.9", - "@smithy/property-provider": "^3.1.8", - "@smithy/types": "^3.6.0", - "@smithy/url-parser": "^3.0.8", "tslib": "^2.6.2" }, "engines": { @@ -3045,11 +4339,12 @@ } }, "node_modules/@smithy/hash-node": { - "version": "3.0.8", - "resolved": "https://registry.npmjs.org/@smithy/hash-node/-/hash-node-3.0.8.tgz", - "integrity": "sha512-tlNQYbfpWXHimHqrvgo14DrMAgUBua/cNoz9fMYcDmYej7MAmUcjav/QKQbFc3NrcPxeJ7QClER4tWZmfwoPng==", + "version": "3.0.10", + "resolved": "https://registry.npmjs.org/@smithy/hash-node/-/hash-node-3.0.10.tgz", + "integrity": "sha512-3zWGWCHI+FlJ5WJwx73Mw2llYR8aflVyZN5JhoqLxbdPZi6UyKSdCeXAWJw9ja22m6S6Tzz1KZ+kAaSwvydi0g==", + "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^3.6.0", + "@smithy/types": "^3.7.1", "@smithy/util-buffer-from": "^3.0.0", "@smithy/util-utf8": "^3.0.0", "tslib": "^2.6.2" @@ -3058,6 +4353,18 @@ "node": ">=16.0.0" } }, + "node_modules/@smithy/hash-node/node_modules/@smithy/types": { + "version": "3.7.1", + "resolved": "https://registry.npmjs.org/@smithy/types/-/types-3.7.1.tgz", + "integrity": "sha512-XKLcLXZY7sUQgvvWyeaL/qwNPp6V3dWcUjqrQKjSb+tzYiCy340R/c64LV5j+Tnb2GhmunEX0eou+L+m2hJNYA==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, "node_modules/@smithy/hash-stream-node": { "version": "3.1.7", "resolved": "https://registry.npmjs.org/@smithy/hash-stream-node/-/hash-stream-node-3.1.7.tgz", @@ -3072,12 +4379,25 @@ } }, "node_modules/@smithy/invalid-dependency": { - "version": "3.0.8", - "resolved": "https://registry.npmjs.org/@smithy/invalid-dependency/-/invalid-dependency-3.0.8.tgz", - "integrity": "sha512-7Qynk6NWtTQhnGTTZwks++nJhQ1O54Mzi7fz4PqZOiYXb4Z1Flpb2yRvdALoggTS8xjtohWUM+RygOtB30YL3Q==", + "version": "3.0.10", + "resolved": "https://registry.npmjs.org/@smithy/invalid-dependency/-/invalid-dependency-3.0.10.tgz", + "integrity": "sha512-Lp2L65vFi+cj0vFMu2obpPW69DU+6O5g3086lmI4XcnRCG8PxvpWC7XyaVwJCxsZFzueHjXnrOH/E0pl0zikfA==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + } + }, + "node_modules/@smithy/invalid-dependency/node_modules/@smithy/types": { + "version": "3.7.1", + "resolved": "https://registry.npmjs.org/@smithy/types/-/types-3.7.1.tgz", + "integrity": "sha512-XKLcLXZY7sUQgvvWyeaL/qwNPp6V3dWcUjqrQKjSb+tzYiCy340R/c64LV5j+Tnb2GhmunEX0eou+L+m2hJNYA==", + "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^3.6.0", "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" } }, "node_modules/@smithy/is-array-buffer": { @@ -3091,203 +4411,424 @@ "node": ">=16.0.0" } }, - "node_modules/@smithy/md5-js": { - "version": "3.0.8", - "resolved": "https://registry.npmjs.org/@smithy/md5-js/-/md5-js-3.0.8.tgz", - "integrity": "sha512-LwApfTK0OJ/tCyNUXqnWCKoE2b4rDSr4BJlDAVCkiWYeHESr+y+d5zlAanuLW6fnitVJRD/7d9/kN/ZM9Su4mA==", + "node_modules/@smithy/md5-js": { + "version": "3.0.8", + "resolved": "https://registry.npmjs.org/@smithy/md5-js/-/md5-js-3.0.8.tgz", + "integrity": "sha512-LwApfTK0OJ/tCyNUXqnWCKoE2b4rDSr4BJlDAVCkiWYeHESr+y+d5zlAanuLW6fnitVJRD/7d9/kN/ZM9Su4mA==", + "dependencies": { + "@smithy/types": "^3.6.0", + "@smithy/util-utf8": "^3.0.0", + "tslib": "^2.6.2" + } + }, + "node_modules/@smithy/middleware-content-length": { + "version": "3.0.12", + "resolved": "https://registry.npmjs.org/@smithy/middleware-content-length/-/middleware-content-length-3.0.12.tgz", + "integrity": "sha512-1mDEXqzM20yywaMDuf5o9ue8OkJ373lSPbaSjyEvkWdqELhFMyNNgKGWL/rCSf4KME8B+HlHKuR8u9kRj8HzEQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/protocol-http": "^4.1.7", + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@smithy/middleware-content-length/node_modules/@smithy/types": { + "version": "3.7.1", + "resolved": "https://registry.npmjs.org/@smithy/types/-/types-3.7.1.tgz", + "integrity": "sha512-XKLcLXZY7sUQgvvWyeaL/qwNPp6V3dWcUjqrQKjSb+tzYiCy340R/c64LV5j+Tnb2GhmunEX0eou+L+m2hJNYA==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@smithy/middleware-endpoint": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@smithy/middleware-endpoint/-/middleware-endpoint-3.2.4.tgz", + "integrity": "sha512-TybiW2LA3kYVd3e+lWhINVu1o26KJbBwOpADnf0L4x/35vLVica77XVR5hvV9+kWeTGeSJ3IHTcYxbRxlbwhsg==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/core": "^2.5.4", + "@smithy/middleware-serde": "^3.0.10", + "@smithy/node-config-provider": "^3.1.11", + "@smithy/shared-ini-file-loader": "^3.1.11", + "@smithy/types": "^3.7.1", + "@smithy/url-parser": "^3.0.10", + "@smithy/util-middleware": "^3.0.10", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@smithy/middleware-endpoint/node_modules/@smithy/types": { + "version": "3.7.1", + "resolved": "https://registry.npmjs.org/@smithy/types/-/types-3.7.1.tgz", + "integrity": "sha512-XKLcLXZY7sUQgvvWyeaL/qwNPp6V3dWcUjqrQKjSb+tzYiCy340R/c64LV5j+Tnb2GhmunEX0eou+L+m2hJNYA==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@smithy/middleware-endpoint/node_modules/@smithy/util-middleware": { + "version": "3.0.10", + "resolved": "https://registry.npmjs.org/@smithy/util-middleware/-/util-middleware-3.0.10.tgz", + "integrity": "sha512-eJO+/+RsrG2RpmY68jZdwQtnfsxjmPxzMlQpnHKjFPwrYqvlcT+fHdT+ZVwcjlWSrByOhGr9Ff2GG17efc192A==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@smithy/middleware-retry": { + "version": "3.0.28", + "resolved": "https://registry.npmjs.org/@smithy/middleware-retry/-/middleware-retry-3.0.28.tgz", + "integrity": "sha512-vK2eDfvIXG1U64FEUhYxoZ1JSj4XFbYWkK36iz02i3pFwWiDz1Q7jKhGTBCwx/7KqJNk4VS7d7cDLXFOvP7M+g==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/node-config-provider": "^3.1.11", + "@smithy/protocol-http": "^4.1.7", + "@smithy/service-error-classification": "^3.0.10", + "@smithy/smithy-client": "^3.4.5", + "@smithy/types": "^3.7.1", + "@smithy/util-middleware": "^3.0.10", + "@smithy/util-retry": "^3.0.10", + "tslib": "^2.6.2", + "uuid": "^9.0.1" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@smithy/middleware-retry/node_modules/@smithy/types": { + "version": "3.7.1", + "resolved": "https://registry.npmjs.org/@smithy/types/-/types-3.7.1.tgz", + "integrity": "sha512-XKLcLXZY7sUQgvvWyeaL/qwNPp6V3dWcUjqrQKjSb+tzYiCy340R/c64LV5j+Tnb2GhmunEX0eou+L+m2hJNYA==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@smithy/middleware-retry/node_modules/@smithy/util-middleware": { + "version": "3.0.10", + "resolved": "https://registry.npmjs.org/@smithy/util-middleware/-/util-middleware-3.0.10.tgz", + "integrity": "sha512-eJO+/+RsrG2RpmY68jZdwQtnfsxjmPxzMlQpnHKjFPwrYqvlcT+fHdT+ZVwcjlWSrByOhGr9Ff2GG17efc192A==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@smithy/middleware-serde": { + "version": "3.0.10", + "resolved": "https://registry.npmjs.org/@smithy/middleware-serde/-/middleware-serde-3.0.10.tgz", + "integrity": "sha512-MnAuhh+dD14F428ubSJuRnmRsfOpxSzvRhaGVTvd/lrUDE3kxzCCmH8lnVTvoNQnV2BbJ4c15QwZ3UdQBtFNZA==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@smithy/middleware-serde/node_modules/@smithy/types": { + "version": "3.7.1", + "resolved": "https://registry.npmjs.org/@smithy/types/-/types-3.7.1.tgz", + "integrity": "sha512-XKLcLXZY7sUQgvvWyeaL/qwNPp6V3dWcUjqrQKjSb+tzYiCy340R/c64LV5j+Tnb2GhmunEX0eou+L+m2hJNYA==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@smithy/middleware-stack": { + "version": "3.0.10", + "resolved": "https://registry.npmjs.org/@smithy/middleware-stack/-/middleware-stack-3.0.10.tgz", + "integrity": "sha512-grCHyoiARDBBGPyw2BeicpjgpsDFWZZxptbVKb3CRd/ZA15F/T6rZjCCuBUjJwdck1nwUuIxYtsS4H9DDpbP5w==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@smithy/middleware-stack/node_modules/@smithy/types": { + "version": "3.7.1", + "resolved": "https://registry.npmjs.org/@smithy/types/-/types-3.7.1.tgz", + "integrity": "sha512-XKLcLXZY7sUQgvvWyeaL/qwNPp6V3dWcUjqrQKjSb+tzYiCy340R/c64LV5j+Tnb2GhmunEX0eou+L+m2hJNYA==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@smithy/node-config-provider": { + "version": "3.1.11", + "resolved": "https://registry.npmjs.org/@smithy/node-config-provider/-/node-config-provider-3.1.11.tgz", + "integrity": "sha512-URq3gT3RpDikh/8MBJUB+QGZzfS7Bm6TQTqoh4CqE8NBuyPkWa5eUXj0XFcFfeZVgg3WMh1u19iaXn8FvvXxZw==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/property-provider": "^3.1.10", + "@smithy/shared-ini-file-loader": "^3.1.11", + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@smithy/node-config-provider/node_modules/@smithy/types": { + "version": "3.7.1", + "resolved": "https://registry.npmjs.org/@smithy/types/-/types-3.7.1.tgz", + "integrity": "sha512-XKLcLXZY7sUQgvvWyeaL/qwNPp6V3dWcUjqrQKjSb+tzYiCy340R/c64LV5j+Tnb2GhmunEX0eou+L+m2hJNYA==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@smithy/node-http-handler": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/@smithy/node-http-handler/-/node-http-handler-3.3.1.tgz", + "integrity": "sha512-fr+UAOMGWh6bn4YSEezBCpJn9Ukp9oR4D32sCjCo7U81evE11YePOQ58ogzyfgmjIO79YeOdfXXqr0jyhPQeMg==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/abort-controller": "^3.1.8", + "@smithy/protocol-http": "^4.1.7", + "@smithy/querystring-builder": "^3.0.10", + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@smithy/node-http-handler/node_modules/@smithy/types": { + "version": "3.7.1", + "resolved": "https://registry.npmjs.org/@smithy/types/-/types-3.7.1.tgz", + "integrity": "sha512-XKLcLXZY7sUQgvvWyeaL/qwNPp6V3dWcUjqrQKjSb+tzYiCy340R/c64LV5j+Tnb2GhmunEX0eou+L+m2hJNYA==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@smithy/property-provider": { + "version": "3.1.10", + "resolved": "https://registry.npmjs.org/@smithy/property-provider/-/property-provider-3.1.10.tgz", + "integrity": "sha512-n1MJZGTorTH2DvyTVj+3wXnd4CzjJxyXeOgnTlgNVFxaaMeT4OteEp4QrzF8p9ee2yg42nvyVK6R/awLCakjeQ==", + "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^3.6.0", - "@smithy/util-utf8": "^3.0.0", + "@smithy/types": "^3.7.1", "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" } }, - "node_modules/@smithy/middleware-content-length": { - "version": "3.0.10", - "resolved": "https://registry.npmjs.org/@smithy/middleware-content-length/-/middleware-content-length-3.0.10.tgz", - "integrity": "sha512-T4dIdCs1d/+/qMpwhJ1DzOhxCZjZHbHazEPJWdB4GDi2HjIZllVzeBEcdJUN0fomV8DURsgOyrbEUzg3vzTaOg==", + "node_modules/@smithy/property-provider/node_modules/@smithy/types": { + "version": "3.7.1", + "resolved": "https://registry.npmjs.org/@smithy/types/-/types-3.7.1.tgz", + "integrity": "sha512-XKLcLXZY7sUQgvvWyeaL/qwNPp6V3dWcUjqrQKjSb+tzYiCy340R/c64LV5j+Tnb2GhmunEX0eou+L+m2hJNYA==", + "license": "Apache-2.0", "dependencies": { - "@smithy/protocol-http": "^4.1.5", - "@smithy/types": "^3.6.0", "tslib": "^2.6.2" }, "engines": { "node": ">=16.0.0" } }, - "node_modules/@smithy/middleware-endpoint": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/@smithy/middleware-endpoint/-/middleware-endpoint-3.2.1.tgz", - "integrity": "sha512-wWO3xYmFm6WRW8VsEJ5oU6h7aosFXfszlz3Dj176pTij6o21oZnzkCLzShfmRaaCHDkBXWBdO0c4sQAvLFP6zA==", + "node_modules/@smithy/protocol-http": { + "version": "4.1.7", + "resolved": "https://registry.npmjs.org/@smithy/protocol-http/-/protocol-http-4.1.7.tgz", + "integrity": "sha512-FP2LepWD0eJeOTm0SjssPcgqAlDFzOmRXqXmGhfIM52G7Lrox/pcpQf6RP4F21k0+O12zaqQt5fCDOeBtqY6Cg==", + "license": "Apache-2.0", "dependencies": { - "@smithy/core": "^2.5.1", - "@smithy/middleware-serde": "^3.0.8", - "@smithy/node-config-provider": "^3.1.9", - "@smithy/shared-ini-file-loader": "^3.1.9", - "@smithy/types": "^3.6.0", - "@smithy/url-parser": "^3.0.8", - "@smithy/util-middleware": "^3.0.8", + "@smithy/types": "^3.7.1", "tslib": "^2.6.2" }, "engines": { "node": ">=16.0.0" } }, - "node_modules/@smithy/middleware-retry": { - "version": "3.0.25", - "resolved": "https://registry.npmjs.org/@smithy/middleware-retry/-/middleware-retry-3.0.25.tgz", - "integrity": "sha512-m1F70cPaMBML4HiTgCw5I+jFNtjgz5z5UdGnUbG37vw6kh4UvizFYjqJGHvicfgKMkDL6mXwyPp5mhZg02g5sg==", - "dependencies": { - "@smithy/node-config-provider": "^3.1.9", - "@smithy/protocol-http": "^4.1.5", - "@smithy/service-error-classification": "^3.0.8", - "@smithy/smithy-client": "^3.4.2", - "@smithy/types": "^3.6.0", - "@smithy/util-middleware": "^3.0.8", - "@smithy/util-retry": "^3.0.8", - "tslib": "^2.6.2", - "uuid": "^9.0.1" + "node_modules/@smithy/protocol-http/node_modules/@smithy/types": { + "version": "3.7.1", + "resolved": "https://registry.npmjs.org/@smithy/types/-/types-3.7.1.tgz", + "integrity": "sha512-XKLcLXZY7sUQgvvWyeaL/qwNPp6V3dWcUjqrQKjSb+tzYiCy340R/c64LV5j+Tnb2GhmunEX0eou+L+m2hJNYA==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" }, "engines": { "node": ">=16.0.0" } }, - "node_modules/@smithy/middleware-serde": { - "version": "3.0.8", - "resolved": "https://registry.npmjs.org/@smithy/middleware-serde/-/middleware-serde-3.0.8.tgz", - "integrity": "sha512-Xg2jK9Wc/1g/MBMP/EUn2DLspN8LNt+GMe7cgF+Ty3vl+Zvu+VeZU5nmhveU+H8pxyTsjrAkci8NqY6OuvZnjA==", + "node_modules/@smithy/querystring-builder": { + "version": "3.0.10", + "resolved": "https://registry.npmjs.org/@smithy/querystring-builder/-/querystring-builder-3.0.10.tgz", + "integrity": "sha512-nT9CQF3EIJtIUepXQuBFb8dxJi3WVZS3XfuDksxSCSn+/CzZowRLdhDn+2acbBv8R6eaJqPupoI/aRFIImNVPQ==", + "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^3.6.0", + "@smithy/types": "^3.7.1", + "@smithy/util-uri-escape": "^3.0.0", "tslib": "^2.6.2" }, "engines": { "node": ">=16.0.0" } }, - "node_modules/@smithy/middleware-stack": { - "version": "3.0.8", - "resolved": "https://registry.npmjs.org/@smithy/middleware-stack/-/middleware-stack-3.0.8.tgz", - "integrity": "sha512-d7ZuwvYgp1+3682Nx0MD3D/HtkmZd49N3JUndYWQXfRZrYEnCWYc8BHcNmVsPAp9gKvlurdg/mubE6b/rPS9MA==", + "node_modules/@smithy/querystring-builder/node_modules/@smithy/types": { + "version": "3.7.1", + "resolved": "https://registry.npmjs.org/@smithy/types/-/types-3.7.1.tgz", + "integrity": "sha512-XKLcLXZY7sUQgvvWyeaL/qwNPp6V3dWcUjqrQKjSb+tzYiCy340R/c64LV5j+Tnb2GhmunEX0eou+L+m2hJNYA==", + "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^3.6.0", "tslib": "^2.6.2" }, "engines": { "node": ">=16.0.0" } }, - "node_modules/@smithy/node-config-provider": { - "version": "3.1.9", - "resolved": "https://registry.npmjs.org/@smithy/node-config-provider/-/node-config-provider-3.1.9.tgz", - "integrity": "sha512-qRHoah49QJ71eemjuS/WhUXB+mpNtwHRWQr77J/m40ewBVVwvo52kYAmb7iuaECgGTTcYxHS4Wmewfwy++ueew==", + "node_modules/@smithy/querystring-parser": { + "version": "3.0.10", + "resolved": "https://registry.npmjs.org/@smithy/querystring-parser/-/querystring-parser-3.0.10.tgz", + "integrity": "sha512-Oa0XDcpo9SmjhiDD9ua2UyM3uU01ZTuIrNdZvzwUTykW1PM8o2yJvMh1Do1rY5sUQg4NDV70dMi0JhDx4GyxuQ==", + "license": "Apache-2.0", "dependencies": { - "@smithy/property-provider": "^3.1.8", - "@smithy/shared-ini-file-loader": "^3.1.9", - "@smithy/types": "^3.6.0", + "@smithy/types": "^3.7.1", "tslib": "^2.6.2" }, "engines": { "node": ">=16.0.0" } }, - "node_modules/@smithy/node-http-handler": { - "version": "3.2.5", - "resolved": "https://registry.npmjs.org/@smithy/node-http-handler/-/node-http-handler-3.2.5.tgz", - "integrity": "sha512-PkOwPNeKdvX/jCpn0A8n9/TyoxjGZB8WVoJmm9YzsnAgggTj4CrjpRHlTQw7dlLZ320n1mY1y+nTRUDViKi/3w==", + "node_modules/@smithy/querystring-parser/node_modules/@smithy/types": { + "version": "3.7.1", + "resolved": "https://registry.npmjs.org/@smithy/types/-/types-3.7.1.tgz", + "integrity": "sha512-XKLcLXZY7sUQgvvWyeaL/qwNPp6V3dWcUjqrQKjSb+tzYiCy340R/c64LV5j+Tnb2GhmunEX0eou+L+m2hJNYA==", + "license": "Apache-2.0", "dependencies": { - "@smithy/abort-controller": "^3.1.6", - "@smithy/protocol-http": "^4.1.5", - "@smithy/querystring-builder": "^3.0.8", - "@smithy/types": "^3.6.0", "tslib": "^2.6.2" }, "engines": { "node": ">=16.0.0" } }, - "node_modules/@smithy/property-provider": { - "version": "3.1.8", - "resolved": "https://registry.npmjs.org/@smithy/property-provider/-/property-provider-3.1.8.tgz", - "integrity": "sha512-ukNUyo6rHmusG64lmkjFeXemwYuKge1BJ8CtpVKmrxQxc6rhUX0vebcptFA9MmrGsnLhwnnqeH83VTU9hwOpjA==", + "node_modules/@smithy/service-error-classification": { + "version": "3.0.10", + "resolved": "https://registry.npmjs.org/@smithy/service-error-classification/-/service-error-classification-3.0.10.tgz", + "integrity": "sha512-zHe642KCqDxXLuhs6xmHVgRwy078RfqxP2wRDpIyiF8EmsWXptMwnMwbVa50lw+WOGNrYm9zbaEg0oDe3PTtvQ==", + "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^3.6.0", - "tslib": "^2.6.2" + "@smithy/types": "^3.7.1" }, "engines": { "node": ">=16.0.0" } }, - "node_modules/@smithy/protocol-http": { - "version": "4.1.5", - "resolved": "https://registry.npmjs.org/@smithy/protocol-http/-/protocol-http-4.1.5.tgz", - "integrity": "sha512-hsjtwpIemmCkm3ZV5fd/T0bPIugW1gJXwZ/hpuVubt2hEUApIoUTrf6qIdh9MAWlw0vjMrA1ztJLAwtNaZogvg==", + "node_modules/@smithy/service-error-classification/node_modules/@smithy/types": { + "version": "3.7.1", + "resolved": "https://registry.npmjs.org/@smithy/types/-/types-3.7.1.tgz", + "integrity": "sha512-XKLcLXZY7sUQgvvWyeaL/qwNPp6V3dWcUjqrQKjSb+tzYiCy340R/c64LV5j+Tnb2GhmunEX0eou+L+m2hJNYA==", + "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^3.6.0", "tslib": "^2.6.2" }, "engines": { "node": ">=16.0.0" } }, - "node_modules/@smithy/querystring-builder": { - "version": "3.0.8", - "resolved": "https://registry.npmjs.org/@smithy/querystring-builder/-/querystring-builder-3.0.8.tgz", - "integrity": "sha512-btYxGVqFUARbUrN6VhL9c3dnSviIwBYD9Rz1jHuN1hgh28Fpv2xjU1HeCeDJX68xctz7r4l1PBnFhGg1WBBPuA==", + "node_modules/@smithy/shared-ini-file-loader": { + "version": "3.1.11", + "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-3.1.11.tgz", + "integrity": "sha512-AUdrIZHFtUgmfSN4Gq9nHu3IkHMa1YDcN+s061Nfm+6pQ0mJy85YQDB0tZBCmls0Vuj22pLwDPmL92+Hvfwwlg==", + "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^3.6.0", - "@smithy/util-uri-escape": "^3.0.0", + "@smithy/types": "^3.7.1", "tslib": "^2.6.2" }, "engines": { "node": ">=16.0.0" } }, - "node_modules/@smithy/querystring-parser": { - "version": "3.0.8", - "resolved": "https://registry.npmjs.org/@smithy/querystring-parser/-/querystring-parser-3.0.8.tgz", - "integrity": "sha512-BtEk3FG7Ks64GAbt+JnKqwuobJNX8VmFLBsKIwWr1D60T426fGrV2L3YS5siOcUhhp6/Y6yhBw1PSPxA5p7qGg==", + "node_modules/@smithy/shared-ini-file-loader/node_modules/@smithy/types": { + "version": "3.7.1", + "resolved": "https://registry.npmjs.org/@smithy/types/-/types-3.7.1.tgz", + "integrity": "sha512-XKLcLXZY7sUQgvvWyeaL/qwNPp6V3dWcUjqrQKjSb+tzYiCy340R/c64LV5j+Tnb2GhmunEX0eou+L+m2hJNYA==", + "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^3.6.0", "tslib": "^2.6.2" }, "engines": { "node": ">=16.0.0" } }, - "node_modules/@smithy/service-error-classification": { - "version": "3.0.8", - "resolved": "https://registry.npmjs.org/@smithy/service-error-classification/-/service-error-classification-3.0.8.tgz", - "integrity": "sha512-uEC/kCCFto83bz5ZzapcrgGqHOh/0r69sZ2ZuHlgoD5kYgXJEThCoTuw/y1Ub3cE7aaKdznb+jD9xRPIfIwD7g==", + "node_modules/@smithy/signature-v4": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/@smithy/signature-v4/-/signature-v4-4.2.3.tgz", + "integrity": "sha512-pPSQQ2v2vu9vc8iew7sszLd0O09I5TRc5zhY71KA+Ao0xYazIG+uLeHbTJfIWGO3BGVLiXjUr3EEeCcEQLjpWQ==", + "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^3.6.0" + "@smithy/is-array-buffer": "^3.0.0", + "@smithy/protocol-http": "^4.1.7", + "@smithy/types": "^3.7.1", + "@smithy/util-hex-encoding": "^3.0.0", + "@smithy/util-middleware": "^3.0.10", + "@smithy/util-uri-escape": "^3.0.0", + "@smithy/util-utf8": "^3.0.0", + "tslib": "^2.6.2" }, "engines": { "node": ">=16.0.0" } }, - "node_modules/@smithy/shared-ini-file-loader": { - "version": "3.1.9", - "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-3.1.9.tgz", - "integrity": "sha512-/+OsJRNtoRbtsX0UpSgWVxFZLsJHo/4sTr+kBg/J78sr7iC+tHeOvOJrS5hCpVQ6sWBbhWLp1UNiuMyZhE6pmA==", + "node_modules/@smithy/signature-v4/node_modules/@smithy/types": { + "version": "3.7.1", + "resolved": "https://registry.npmjs.org/@smithy/types/-/types-3.7.1.tgz", + "integrity": "sha512-XKLcLXZY7sUQgvvWyeaL/qwNPp6V3dWcUjqrQKjSb+tzYiCy340R/c64LV5j+Tnb2GhmunEX0eou+L+m2hJNYA==", + "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^3.6.0", "tslib": "^2.6.2" }, "engines": { "node": ">=16.0.0" } }, - "node_modules/@smithy/signature-v4": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/@smithy/signature-v4/-/signature-v4-4.2.1.tgz", - "integrity": "sha512-NsV1jF4EvmO5wqmaSzlnTVetemBS3FZHdyc5CExbDljcyJCEEkJr8ANu2JvtNbVg/9MvKAWV44kTrGS+Pi4INg==", + "node_modules/@smithy/signature-v4/node_modules/@smithy/util-middleware": { + "version": "3.0.10", + "resolved": "https://registry.npmjs.org/@smithy/util-middleware/-/util-middleware-3.0.10.tgz", + "integrity": "sha512-eJO+/+RsrG2RpmY68jZdwQtnfsxjmPxzMlQpnHKjFPwrYqvlcT+fHdT+ZVwcjlWSrByOhGr9Ff2GG17efc192A==", + "license": "Apache-2.0", "dependencies": { - "@smithy/is-array-buffer": "^3.0.0", - "@smithy/protocol-http": "^4.1.5", - "@smithy/types": "^3.6.0", - "@smithy/util-hex-encoding": "^3.0.0", - "@smithy/util-middleware": "^3.0.8", - "@smithy/util-uri-escape": "^3.0.0", - "@smithy/util-utf8": "^3.0.0", + "@smithy/types": "^3.7.1", "tslib": "^2.6.2" }, "engines": { @@ -3295,16 +4836,29 @@ } }, "node_modules/@smithy/smithy-client": { - "version": "3.4.2", - "resolved": "https://registry.npmjs.org/@smithy/smithy-client/-/smithy-client-3.4.2.tgz", - "integrity": "sha512-dxw1BDxJiY9/zI3cBqfVrInij6ShjpV4fmGHesGZZUiP9OSE/EVfdwdRz0PgvkEvrZHpsj2htRaHJfftE8giBA==", - "dependencies": { - "@smithy/core": "^2.5.1", - "@smithy/middleware-endpoint": "^3.2.1", - "@smithy/middleware-stack": "^3.0.8", - "@smithy/protocol-http": "^4.1.5", - "@smithy/types": "^3.6.0", - "@smithy/util-stream": "^3.2.1", + "version": "3.4.5", + "resolved": "https://registry.npmjs.org/@smithy/smithy-client/-/smithy-client-3.4.5.tgz", + "integrity": "sha512-k0sybYT9zlP79sIKd1XGm4TmK0AS1nA2bzDHXx7m0nGi3RQ8dxxQUs4CPkSmQTKAo+KF9aINU3KzpGIpV7UoMw==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/core": "^2.5.4", + "@smithy/middleware-endpoint": "^3.2.4", + "@smithy/middleware-stack": "^3.0.10", + "@smithy/protocol-http": "^4.1.7", + "@smithy/types": "^3.7.1", + "@smithy/util-stream": "^3.3.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@smithy/smithy-client/node_modules/@smithy/types": { + "version": "3.7.1", + "resolved": "https://registry.npmjs.org/@smithy/types/-/types-3.7.1.tgz", + "integrity": "sha512-XKLcLXZY7sUQgvvWyeaL/qwNPp6V3dWcUjqrQKjSb+tzYiCy340R/c64LV5j+Tnb2GhmunEX0eou+L+m2hJNYA==", + "license": "Apache-2.0", + "dependencies": { "tslib": "^2.6.2" }, "engines": { @@ -3323,13 +4877,26 @@ } }, "node_modules/@smithy/url-parser": { - "version": "3.0.8", - "resolved": "https://registry.npmjs.org/@smithy/url-parser/-/url-parser-3.0.8.tgz", - "integrity": "sha512-4FdOhwpTW7jtSFWm7SpfLGKIBC9ZaTKG5nBF0wK24aoQKQyDIKUw3+KFWCQ9maMzrgTJIuOvOnsV2lLGW5XjTg==", + "version": "3.0.10", + "resolved": "https://registry.npmjs.org/@smithy/url-parser/-/url-parser-3.0.10.tgz", + "integrity": "sha512-j90NUalTSBR2NaZTuruEgavSdh8MLirf58LoGSk4AtQfyIymogIhgnGUU2Mga2bkMkpSoC9gxb74xBXL5afKAQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/querystring-parser": "^3.0.10", + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + } + }, + "node_modules/@smithy/url-parser/node_modules/@smithy/types": { + "version": "3.7.1", + "resolved": "https://registry.npmjs.org/@smithy/types/-/types-3.7.1.tgz", + "integrity": "sha512-XKLcLXZY7sUQgvvWyeaL/qwNPp6V3dWcUjqrQKjSb+tzYiCy340R/c64LV5j+Tnb2GhmunEX0eou+L+m2hJNYA==", + "license": "Apache-2.0", "dependencies": { - "@smithy/querystring-parser": "^3.0.8", - "@smithy/types": "^3.6.0", "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" } }, "node_modules/@smithy/util-base64": { @@ -3388,13 +4955,14 @@ } }, "node_modules/@smithy/util-defaults-mode-browser": { - "version": "3.0.25", - "resolved": "https://registry.npmjs.org/@smithy/util-defaults-mode-browser/-/util-defaults-mode-browser-3.0.25.tgz", - "integrity": "sha512-fRw7zymjIDt6XxIsLwfJfYUfbGoO9CmCJk6rjJ/X5cd20+d2Is7xjU5Kt/AiDt6hX8DAf5dztmfP5O82gR9emA==", - "dependencies": { - "@smithy/property-provider": "^3.1.8", - "@smithy/smithy-client": "^3.4.2", - "@smithy/types": "^3.6.0", + "version": "3.0.28", + "resolved": "https://registry.npmjs.org/@smithy/util-defaults-mode-browser/-/util-defaults-mode-browser-3.0.28.tgz", + "integrity": "sha512-6bzwAbZpHRFVJsOztmov5PGDmJYsbNSoIEfHSJJyFLzfBGCCChiO3od9k7E/TLgrCsIifdAbB9nqbVbyE7wRUw==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/property-provider": "^3.1.10", + "@smithy/smithy-client": "^3.4.5", + "@smithy/types": "^3.7.1", "bowser": "^2.11.0", "tslib": "^2.6.2" }, @@ -3402,30 +4970,68 @@ "node": ">= 10.0.0" } }, + "node_modules/@smithy/util-defaults-mode-browser/node_modules/@smithy/types": { + "version": "3.7.1", + "resolved": "https://registry.npmjs.org/@smithy/types/-/types-3.7.1.tgz", + "integrity": "sha512-XKLcLXZY7sUQgvvWyeaL/qwNPp6V3dWcUjqrQKjSb+tzYiCy340R/c64LV5j+Tnb2GhmunEX0eou+L+m2hJNYA==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, "node_modules/@smithy/util-defaults-mode-node": { - "version": "3.0.25", - "resolved": "https://registry.npmjs.org/@smithy/util-defaults-mode-node/-/util-defaults-mode-node-3.0.25.tgz", - "integrity": "sha512-H3BSZdBDiVZGzt8TG51Pd2FvFO0PAx/A0mJ0EH8a13KJ6iUCdYnw/Dk/MdC1kTd0eUuUGisDFaxXVXo4HHFL1g==", - "dependencies": { - "@smithy/config-resolver": "^3.0.10", - "@smithy/credential-provider-imds": "^3.2.5", - "@smithy/node-config-provider": "^3.1.9", - "@smithy/property-provider": "^3.1.8", - "@smithy/smithy-client": "^3.4.2", - "@smithy/types": "^3.6.0", + "version": "3.0.28", + "resolved": "https://registry.npmjs.org/@smithy/util-defaults-mode-node/-/util-defaults-mode-node-3.0.28.tgz", + "integrity": "sha512-78ENJDorV1CjOQselGmm3+z7Yqjj5HWCbjzh0Ixuq736dh1oEnD9sAttSBNSLlpZsX8VQnmERqA2fEFlmqWn8w==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/config-resolver": "^3.0.12", + "@smithy/credential-provider-imds": "^3.2.7", + "@smithy/node-config-provider": "^3.1.11", + "@smithy/property-provider": "^3.1.10", + "@smithy/smithy-client": "^3.4.5", + "@smithy/types": "^3.7.1", "tslib": "^2.6.2" }, "engines": { "node": ">= 10.0.0" } }, + "node_modules/@smithy/util-defaults-mode-node/node_modules/@smithy/types": { + "version": "3.7.1", + "resolved": "https://registry.npmjs.org/@smithy/types/-/types-3.7.1.tgz", + "integrity": "sha512-XKLcLXZY7sUQgvvWyeaL/qwNPp6V3dWcUjqrQKjSb+tzYiCy340R/c64LV5j+Tnb2GhmunEX0eou+L+m2hJNYA==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, "node_modules/@smithy/util-endpoints": { - "version": "2.1.4", - "resolved": "https://registry.npmjs.org/@smithy/util-endpoints/-/util-endpoints-2.1.4.tgz", - "integrity": "sha512-kPt8j4emm7rdMWQyL0F89o92q10gvCUa6sBkBtDJ7nV2+P7wpXczzOfoDJ49CKXe5CCqb8dc1W+ZdLlrKzSAnQ==", + "version": "2.1.6", + "resolved": "https://registry.npmjs.org/@smithy/util-endpoints/-/util-endpoints-2.1.6.tgz", + "integrity": "sha512-mFV1t3ndBh0yZOJgWxO9J/4cHZVn5UG1D8DeCc6/echfNkeEJWu9LD7mgGH5fHrEdR7LDoWw7PQO6QiGpHXhgA==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/node-config-provider": "^3.1.11", + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@smithy/util-endpoints/node_modules/@smithy/types": { + "version": "3.7.1", + "resolved": "https://registry.npmjs.org/@smithy/types/-/types-3.7.1.tgz", + "integrity": "sha512-XKLcLXZY7sUQgvvWyeaL/qwNPp6V3dWcUjqrQKjSb+tzYiCy340R/c64LV5j+Tnb2GhmunEX0eou+L+m2hJNYA==", + "license": "Apache-2.0", "dependencies": { - "@smithy/node-config-provider": "^3.1.9", - "@smithy/types": "^3.6.0", "tslib": "^2.6.2" }, "engines": { @@ -3456,12 +5062,25 @@ } }, "node_modules/@smithy/util-retry": { - "version": "3.0.8", - "resolved": "https://registry.npmjs.org/@smithy/util-retry/-/util-retry-3.0.8.tgz", - "integrity": "sha512-TCEhLnY581YJ+g1x0hapPz13JFqzmh/pMWL2KEFASC51qCfw3+Y47MrTmea4bUE5vsdxQ4F6/KFbUeSz22Q1ow==", + "version": "3.0.10", + "resolved": "https://registry.npmjs.org/@smithy/util-retry/-/util-retry-3.0.10.tgz", + "integrity": "sha512-1l4qatFp4PiU6j7UsbasUHL2VU023NRB/gfaa1M0rDqVrRN4g3mCArLRyH3OuktApA4ye+yjWQHjdziunw2eWA==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/service-error-classification": "^3.0.10", + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@smithy/util-retry/node_modules/@smithy/types": { + "version": "3.7.1", + "resolved": "https://registry.npmjs.org/@smithy/types/-/types-3.7.1.tgz", + "integrity": "sha512-XKLcLXZY7sUQgvvWyeaL/qwNPp6V3dWcUjqrQKjSb+tzYiCy340R/c64LV5j+Tnb2GhmunEX0eou+L+m2hJNYA==", + "license": "Apache-2.0", "dependencies": { - "@smithy/service-error-classification": "^3.0.8", - "@smithy/types": "^3.6.0", "tslib": "^2.6.2" }, "engines": { @@ -3469,13 +5088,14 @@ } }, "node_modules/@smithy/util-stream": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/@smithy/util-stream/-/util-stream-3.2.1.tgz", - "integrity": "sha512-R3ufuzJRxSJbE58K9AEnL/uSZyVdHzud9wLS8tIbXclxKzoe09CRohj2xV8wpx5tj7ZbiJaKYcutMm1eYgz/0A==", + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/@smithy/util-stream/-/util-stream-3.3.1.tgz", + "integrity": "sha512-Ff68R5lJh2zj+AUTvbAU/4yx+6QPRzg7+pI7M1FbtQHcRIp7xvguxVsQBKyB3fwiOwhAKu0lnNyYBaQfSW6TNw==", + "license": "Apache-2.0", "dependencies": { - "@smithy/fetch-http-handler": "^4.0.0", - "@smithy/node-http-handler": "^3.2.5", - "@smithy/types": "^3.6.0", + "@smithy/fetch-http-handler": "^4.1.1", + "@smithy/node-http-handler": "^3.3.1", + "@smithy/types": "^3.7.1", "@smithy/util-base64": "^3.0.0", "@smithy/util-buffer-from": "^3.0.0", "@smithy/util-hex-encoding": "^3.0.0", @@ -3487,17 +5107,30 @@ } }, "node_modules/@smithy/util-stream/node_modules/@smithy/fetch-http-handler": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/@smithy/fetch-http-handler/-/fetch-http-handler-4.0.0.tgz", - "integrity": "sha512-MLb1f5tbBO2X6K4lMEKJvxeLooyg7guq48C2zKr4qM7F2Gpkz4dc+hdSgu77pCJ76jVqFBjZczHYAs6dp15N+g==", + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/@smithy/fetch-http-handler/-/fetch-http-handler-4.1.1.tgz", + "integrity": "sha512-bH7QW0+JdX0bPBadXt8GwMof/jz0H28I84hU1Uet9ISpzUqXqRQ3fEZJ+ANPOhzSEczYvANNl3uDQDYArSFDtA==", + "license": "Apache-2.0", "dependencies": { - "@smithy/protocol-http": "^4.1.5", - "@smithy/querystring-builder": "^3.0.8", - "@smithy/types": "^3.6.0", + "@smithy/protocol-http": "^4.1.7", + "@smithy/querystring-builder": "^3.0.10", + "@smithy/types": "^3.7.1", "@smithy/util-base64": "^3.0.0", "tslib": "^2.6.2" } }, + "node_modules/@smithy/util-stream/node_modules/@smithy/types": { + "version": "3.7.1", + "resolved": "https://registry.npmjs.org/@smithy/types/-/types-3.7.1.tgz", + "integrity": "sha512-XKLcLXZY7sUQgvvWyeaL/qwNPp6V3dWcUjqrQKjSb+tzYiCy340R/c64LV5j+Tnb2GhmunEX0eou+L+m2hJNYA==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, "node_modules/@smithy/util-uri-escape": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/@smithy/util-uri-escape/-/util-uri-escape-3.0.0.tgz", @@ -3649,6 +5282,15 @@ "@types/node": "*" } }, + "node_modules/@types/buffers": { + "version": "0.1.31", + "resolved": "https://registry.npmjs.org/@types/buffers/-/buffers-0.1.31.tgz", + "integrity": "sha512-wEZBb3o0Kh5RAj3V172vJCcxaCV8C2HJ7YLBBlG5Mwue0g4uRg5LWv8C6ap8MyFbXE6UbYEuvtHY7oTWAPeXEw==", + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, "node_modules/@types/cacheable-request": { "version": "6.0.3", "resolved": "https://registry.npmjs.org/@types/cacheable-request/-/cacheable-request-6.0.3.tgz", @@ -4833,6 +6475,162 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/aws-msk-iam-sasl-signer-js": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/aws-msk-iam-sasl-signer-js/-/aws-msk-iam-sasl-signer-js-1.0.0.tgz", + "integrity": "sha512-L0Jk0k2XNHMSGipJ8rRdTq51KrH/gwrfZ39iKY9BWHGOAv7EygsG4qJC7lIRsbu5/ZHB886Z3WsOsFxqR2R4XQ==", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/sha256-js": "^4.0.0", + "@aws-sdk/client-sts": "^3.378.0", + "@aws-sdk/credential-providers": "^3.370.0", + "@aws-sdk/util-format-url": "^3.347.0", + "@smithy/signature-v4": "^2.0.1", + "@types/buffers": "0.1.31" + }, + "engines": { + "node": ">=14.x" + } + }, + "node_modules/aws-msk-iam-sasl-signer-js/node_modules/@aws-crypto/sha256-js": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@aws-crypto/sha256-js/-/sha256-js-4.0.0.tgz", + "integrity": "sha512-MHGJyjE7TX9aaqXj7zk2ppnFUOhaDs5sP+HtNS0evOxn72c+5njUmyJmpGd7TfyoDznZlHMmdo/xGUdu2NIjNQ==", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/util": "^4.0.0", + "@aws-sdk/types": "^3.222.0", + "tslib": "^1.11.1" + } + }, + "node_modules/aws-msk-iam-sasl-signer-js/node_modules/@aws-crypto/sha256-js/node_modules/tslib": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", + "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==", + "license": "0BSD" + }, + "node_modules/aws-msk-iam-sasl-signer-js/node_modules/@aws-crypto/util": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@aws-crypto/util/-/util-4.0.0.tgz", + "integrity": "sha512-2EnmPy2gsFZ6m8bwUQN4jq+IyXV3quHAcwPOS6ZA3k+geujiqI8aRokO2kFJe+idJ/P3v4qWI186rVMo0+zLDQ==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "^3.222.0", + "@aws-sdk/util-utf8-browser": "^3.0.0", + "tslib": "^1.11.1" + } + }, + "node_modules/aws-msk-iam-sasl-signer-js/node_modules/@aws-crypto/util/node_modules/tslib": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", + "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==", + "license": "0BSD" + }, + "node_modules/aws-msk-iam-sasl-signer-js/node_modules/@smithy/is-array-buffer": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@smithy/is-array-buffer/-/is-array-buffer-2.2.0.tgz", + "integrity": "sha512-GGP3O9QFD24uGeAXYUjwSTXARoqpZykHadOmA8G5vfJPK0/DC67qa//0qvqrJzL1xc8WQWX7/yc7fwudjPHPhA==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/aws-msk-iam-sasl-signer-js/node_modules/@smithy/signature-v4": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@smithy/signature-v4/-/signature-v4-2.3.0.tgz", + "integrity": "sha512-ui/NlpILU+6HAQBfJX8BBsDXuKSNrjTSuOYArRblcrErwKFutjrCNb/OExfVRyj9+26F9J+ZmfWT+fKWuDrH3Q==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/is-array-buffer": "^2.2.0", + "@smithy/types": "^2.12.0", + "@smithy/util-hex-encoding": "^2.2.0", + "@smithy/util-middleware": "^2.2.0", + "@smithy/util-uri-escape": "^2.2.0", + "@smithy/util-utf8": "^2.3.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/aws-msk-iam-sasl-signer-js/node_modules/@smithy/types": { + "version": "2.12.0", + "resolved": "https://registry.npmjs.org/@smithy/types/-/types-2.12.0.tgz", + "integrity": "sha512-QwYgloJ0sVNBeBuBs65cIkTbfzV/Q6ZNPCJ99EICFEdJYG50nGIY/uYXp+TbsdJReIuPr0a0kXmCvren3MbRRw==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/aws-msk-iam-sasl-signer-js/node_modules/@smithy/util-buffer-from": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@smithy/util-buffer-from/-/util-buffer-from-2.2.0.tgz", + "integrity": "sha512-IJdWBbTcMQ6DA0gdNhh/BwrLkDR+ADW5Kr1aZmd4k3DIF6ezMV4R2NIAmT08wQJ3yUK82thHWmC/TnK/wpMMIA==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/is-array-buffer": "^2.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/aws-msk-iam-sasl-signer-js/node_modules/@smithy/util-hex-encoding": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@smithy/util-hex-encoding/-/util-hex-encoding-2.2.0.tgz", + "integrity": "sha512-7iKXR+/4TpLK194pVjKiasIyqMtTYJsgKgM242Y9uzt5dhHnUDvMNb+3xIhRJ9QhvqGii/5cRUt4fJn3dtXNHQ==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/aws-msk-iam-sasl-signer-js/node_modules/@smithy/util-middleware": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@smithy/util-middleware/-/util-middleware-2.2.0.tgz", + "integrity": "sha512-L1qpleXf9QD6LwLCJ5jddGkgWyuSvWBkJwWAZ6kFkdifdso+sk3L3O1HdmPvCdnCK3IS4qWyPxev01QMnfHSBw==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^2.12.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/aws-msk-iam-sasl-signer-js/node_modules/@smithy/util-uri-escape": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@smithy/util-uri-escape/-/util-uri-escape-2.2.0.tgz", + "integrity": "sha512-jtmJMyt1xMD/d8OtbVJ2gFZOSKc+ueYJZPW20ULW1GOp/q/YIM0wNh+u8ZFao9UaIGz4WoPW8hC64qlWLIfoDA==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/aws-msk-iam-sasl-signer-js/node_modules/@smithy/util-utf8": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-2.3.0.tgz", + "integrity": "sha512-R8Rdn8Hy72KKcebgLiv8jQcQkXoLMOGGv5uI1/k0l+snqkOzQ1R0ChUBCxWMlBsFMekWjq0wRudIweFs7sKT5A==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/util-buffer-from": "^2.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, "node_modules/aws-sdk": { "version": "2.1691.0", "resolved": "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.1691.0.tgz", @@ -4862,6 +6660,12 @@ "uuid": "dist/bin/uuid" } }, + "node_modules/aws4": { + "version": "1.13.2", + "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.13.2.tgz", + "integrity": "sha512-lHe62zvbTB5eEABUVi/AwVh0ZKY9rMMDhmm+eeyuuUQbQ3+J+fONVQOZyj+DdrvD4BY33uYniyRJ4UJIaSKAfw==", + "license": "MIT" + }, "node_modules/axios": { "version": "1.7.7", "resolved": "https://registry.npmjs.org/axios/-/axios-1.7.7.tgz", @@ -9323,11 +11127,6 @@ "integrity": "sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==", "dev": true }, - "node_modules/hpagent": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/hpagent/-/hpagent-0.1.2.tgz", - "integrity": "sha512-ePqFXHtSQWAFXYmj+JtOTHr84iNrII4/QRlAAPPE+zqnKy4xJo7Ie1Y4kC7AdB+LxLxSTTzBMASsEcy0q8YyvQ==" - }, "node_modules/html-encoding-sniffer": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/html-encoding-sniffer/-/html-encoding-sniffer-4.0.0.tgz", @@ -11368,6 +13167,15 @@ "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==", "dev": true }, + "node_modules/json11": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/json11/-/json11-2.0.0.tgz", + "integrity": "sha512-VuKJKUSPEJape+daTm70Nx7vdcdorf4S6LCyN2z0jUVH4UrQ4ftXo2kC0bnHpCREmxHuHqCNVPA75BjI3CB6Ag==", + "license": "MIT", + "bin": { + "json11": "dist/cli.mjs" + } + }, "node_modules/json5": { "version": "0.5.1", "resolved": "https://registry.npmjs.org/json5/-/json5-0.5.1.tgz", diff --git a/aoe-web-backend/package.json b/aoe-web-backend/package.json index a7d057d01..3d691fb7a 100644 --- a/aoe-web-backend/package.json +++ b/aoe-web-backend/package.json @@ -32,7 +32,7 @@ "build": "npm run build-ts && npm run lint", "build-ts": "npx tsc && tsc-alias", "watch-ts": "npx tsc -w", - "lint": "npx eslint src/**/*.{json,ts} test/**/*.{json,ts}", + "lint": "npx eslint src/**/*.ts test/**/*.ts", "test": "cross-env DOTENV_CONFIG_PATH=test.env jest --detectOpenHandles", "test:ci": "npm run test", "test:compose": "docker compose -f docker/docker-compose.ci.yml up --abort-on-container-exit --build --force-recreate --remove-orphans", @@ -40,10 +40,11 @@ "debug:sequelize": "cross-env DEBUG=sequelize:* node dist/server.js" }, "dependencies": { - "@elastic/elasticsearch": "^7.9.1", "@lumieducation/h5p-express": "^9.3.2", "@lumieducation/h5p-server": "^9.3.2", + "@opensearch-project/opensearch": "^2.13.0", "@types/cookie-session": "^2.0.49", + "aws-msk-iam-sasl-signer-js": "^1.0.0", "aws-sdk": "^2.789.0", "bluebird": "^3.7.2", "body-parser": "^1.19.0", diff --git a/aoe-web-backend/sql/001_tables.sql b/aoe-web-backend/sql/001_tables.sql deleted file mode 100644 index 417468749..000000000 --- a/aoe-web-backend/sql/001_tables.sql +++ /dev/null @@ -1,383 +0,0 @@ --- DROP TABLE IF EXISTS EducationalMaterialCollectionEducationalMaterial CASCADE; --- DROP TABLE IF EXISTS UsersEducationalMaterialCollection CASCADE; --- DROP TABLE IF EXISTS IsBasedOn CASCADE; --- DROP TABLE IF EXISTS EducationalUse CASCADE; --- DROP TABLE IF EXISTS Record CASCADE; --- DROP TABLE IF EXISTS CollectionEducationalUse CASCADE; --- DROP TABLE IF EXISTS CollectionAlignmentObject CASCADE; --- DROP TABLE IF EXISTS CollectionEducationalFramework CASCADE; --- DROP TABLE IF EXISTS CollectionTopic CASCADE; --- DROP TABLE IF EXISTS CollectionEducationalLevel CASCADE; --- DROP TABLE IF EXISTS CollectionEducationalAudience CASCADE; --- DROP TABLE IF EXISTS CollectionLanguage CASCADE; --- DROP TABLE IF EXISTS CollectionKeyWord CASCADE; --- DROP TABLE IF EXISTS EducationalMaterialCollection CASCADE; --- DROP TABLE IF EXISTS KeyWord CASCADE; --- DROP TABLE IF EXISTS EducationalLevel CASCADE; --- DROP TABLE IF EXISTS AccessibilityControl CASCADE; --- DROP TABLE IF EXISTS AccessibilityAPI CASCADE; --- DROP TABLE IF EXISTS AccessibilityHazard CASCADE; --- DROP TABLE IF EXISTS AccessibilityFeature CASCADE; --- DROP TABLE IF EXISTS EducationalRole CASCADE; --- DROP TABLE IF EXISTS LearningResourceType CASCADE; --- DROP TABLE IF EXISTS AlignmentObject CASCADE; --- DROP TABLE IF EXISTS InLanguage CASCADE; --- DROP TABLE IF EXISTS License CASCADE; --- DROP TABLE IF EXISTS EducationalAudience CASCADE; --- DROP TABLE IF EXISTS Material CASCADE; --- DROP TABLE IF EXISTS EducationalMaterial CASCADE; --- DROP TABLE IF EXISTS Users CASCADE; --- DROP TABLE IF EXISTS Publisher CASCADE; --- DROP TABLE IF EXISTS MaterialName CASCADE; --- DROP TABLE IF EXISTS MaterialDescription CASCADE; --- DROP TABLE IF EXISTS Author CASCADE; --- DROP TABLE IF EXISTS MaterialDisplayName CASCADE; --- DROP TABLE IF EXISTS TemporaryRecord CASCADE; --- DROP TABLE IF EXISTS Thumbnail CASCADE; --- DROP TABLE IF EXISTS Prerequisites CASCADE; - -CREATE TYPE lang AS ENUM ('fi', 'en', 'sv'); - -CREATE TABLE Users ( - Id BIGSERIAL NOT NULL, - FirstName text NOT NULL, - LastName text NOT NULL, - UserName text NOT NULL, - PreferredLanguage lang DEFAULT 'fi' NOT NULL, - PreferredTargetName text NOT NULL, - PreferredAlignmentType text NOT NULL, - TermsOfUsage bool DEFAULT '0' NOT NULL, - PRIMARY KEY (UserName)); - -CREATE TABLE EducationalMaterial ( - Id BIGSERIAL NOT NULL, - CreatedAt timestamp with time zone DEFAULT CURRENT_TIMESTAMP NOT NULL, - PublishedAt timestamp with time zone, - UpdatedAt timestamp with time zone DEFAULT CURRENT_TIMESTAMP NOT NULL, - ArchivedAt timestamp with time zone, - --TechnicalName text DEFAULT '' NOT NULL, - TimeRequired text DEFAULT '' NOT NULL, - AgeRangeMin int4 DEFAULT -1 NOT NULL, - AgeRangeMax int4 DEFAULT -1 NOT NULL, - LicenseCode text DEFAULT '' NOT NULL, - Obsoleted int4 DEFAULT 0 NOT NULL, - OriginalPublishedAt timestamp with time zone DEFAULT CURRENT_TIMESTAMP NOT NULL, - UsersUserName text NOT NULL, - Expires timestamp with time zone, - SuitsAllEarlyChildhoodSubjects bool DEFAULT 'false' NOT NULL, - SuitsAllPrePrimarySubjects bool DEFAULT 'false' NOT NULL, - SuitsAllBasicStudySubjects bool DEFAULT 'false' NOT NULL, - SuitsAllUpperSecondarySubjects bool DEFAULT 'false' NOT NULL, - SuitsAllVocationalDegrees bool DEFAULT 'false' NOT NULL, - SuitsAllSelfMotivatedSubjects bool DEFAULT 'false' NOT NULL, - SuitsAllBranches bool DEFAULT 'false' NOT NULL, - PRIMARY KEY (Id)); - - -CREATE TABLE Material ( - Id BIGSERIAL NOT NULL, - Link text NOT NULL, - EducationalMaterialId int8 NOT NULL, - Obsoleted int4 DEFAULT 0 NOT NULL, - Priority int4 DEFAULT 0 NOT NULL, - MaterialLanguageKey lang DEFAULT 'fi' NOT NULL, - PRIMARY KEY (Id)); - - - CREATE TABLE EducationalAudience ( - Id BIGSERIAL NOT NULL, - EducationalRole text NOT NULL, - EducationalMaterialId int8 NOT NULL, - EducationalRoleKey text NOT NULL, - PRIMARY KEY (Id)); - -CREATE TABLE InLanguage ( - Id BIGSERIAL NOT NULL, - InLanguage text NOT NULL, - Url text NOT NULL, - EducationalMaterialId int8 NOT NULL, - PRIMARY KEY (Id)); -CREATE TABLE AlignmentObject ( - Id BIGSERIAL NOT NULL, - EducationalMaterialId int8 NOT NULL, - AlignmentType text NOT NULL, - TargetName text NOT NULL, - Source text NOT NULL, - EducationalFramework text DEFAULT '' NOT NULL, - ObjectKey text NOT NULL, - TargetUrl text, - PRIMARY KEY (Id)); - -CREATE TABLE LearningResourceType ( - Id BIGSERIAL NOT NULL, - Value text NOT NULL, - EducationalMaterialId int8 NOT NULL, - LearningResourceTypeKey text, - PRIMARY KEY (Id)); --- CREATE TABLE AccessibilityControl ( --- Id BIGSERIAL NOT NULL, --- Value text NOT NULL, --- EducationalMaterialId int8 NOT NULL, --- PRIMARY KEY (Id)); --- CREATE TABLE AccessibilityAPI ( --- Id BIGSERIAL NOT NULL, --- Value text NOT NULL, --- EducationalMaterialId int8 NOT NULL, --- PRIMARY KEY (Id)); -CREATE TABLE AccessibilityHazard ( - Id BIGSERIAL NOT NULL, - Value text NOT NULL, - EducationalMaterialId int8 NOT NULL, - AccessibilityHazardKey text NOT NULL, - PRIMARY KEY (Id)); - -CREATE TABLE AccessibilityFeature ( - Id BIGSERIAL NOT NULL, - Value text NOT NULL, - EducationalMaterialId int8 NOT NULL, - AccessibilityFeatureKey text NOT NULL, - PRIMARY KEY (Id)); - - CREATE TABLE EducationalLevel ( - Id BIGSERIAL NOT NULL, - Value text NOT NULL, - EducationalMaterialId int8 NOT NULL, - EducationalLevelKey text NOT NULL, - PRIMARY KEY (Id)); - -CREATE TABLE KeyWord ( - Id BIGSERIAL NOT NULL, - Value text NOT NULL, - EducationalMaterialId int8 NOT NULL, - KeywordKey text NOT NULL, - PRIMARY KEY (Id)); --- CREATE TABLE EducationalMaterialCollection ( --- Id BIGSERIAL NOT NULL, --- Name text NOT NULL, --- Type text NOT NULL, --- CreationDate date NOT NULL, --- Description text NOT NULL, --- CreatedBy text NOT NULL, --- TechnicalName text NOT NULL, --- AgeRangeMin int4 DEFAULT 0 NOT NULL, --- AgeRangeMax int4 DEFAULT 99 NOT NULL, --- PRIMARY KEY (Id)); --- CREATE TABLE CollectionKeyWord ( --- Id BIGSERIAL NOT NULL, --- Value text NOT NULL, --- EducationalMaterialCollectionId int8 NOT NULL, --- PRIMARY KEY (Id)); --- CREATE TABLE CollectionLanguage ( --- Id BIGSERIAL NOT NULL, --- Language text NOT NULL, --- EducationalMaterialCollectionId int8 NOT NULL, --- PRIMARY KEY (Id)); --- CREATE TABLE CollectionEducationalAudience ( --- Id BIGSERIAL NOT NULL, --- Audience text NOT NULL, --- EducationalMaterialCollectionId int8 NOT NULL, --- PRIMARY KEY (Id)); --- CREATE TABLE CollectionEducationalLevel ( --- Id BIGSERIAL NOT NULL, --- EducationalLevel text NOT NULL, --- EducationalMaterialCollectionId int8 NOT NULL, --- PRIMARY KEY (Id)); --- CREATE TABLE CollectionTopic ( --- Id BIGSERIAL NOT NULL, --- Topic text NOT NULL, --- EducationalMaterialCollectionId int8 NOT NULL, --- PRIMARY KEY (Id)); --- CREATE TABLE CollectionEducationalFramework ( --- Id BIGSERIAL NOT NULL, --- Framework text NOT NULL, --- EducationalMaterialCollectionId int8 NOT NULL, --- PRIMARY KEY (Id)); --- CREATE TABLE CollectionAlignmentObject ( --- Id BIGSERIAL NOT NULL, --- AlignmentType text NOT NULL, --- EducationalMaterialCollectionId int8 NOT NULL, --- PRIMARY KEY (Id)); --- CREATE TABLE CollectionEducationalUse ( --- Id BIGSERIAL NOT NULL, --- EducationalUse text NOT NULL, --- EducationalMaterialCollectionId int8 NOT NULL, --- PRIMARY KEY (Id)); -CREATE TABLE Record ( - Id BIGSERIAL NOT NULL, - FilePath text NOT NULL, - OriginalFileName text NOT NULL, - FileSize int4 NOT NULL, - MimeType text NOT NULL, - Format text NOT NULL, - MaterialId int8 NOT NULL, - FileKey text NOT NULL, - FileBucket text NOT NULL, - PRIMARY KEY (Id)); - - CREATE TABLE EducationalUse ( - Id BIGSERIAL NOT NULL, - Value text NOT NULL, - EducationalMaterialId int8 NOT NULL, - EducationalUseKey text NOT NULL, - PRIMARY KEY (Id)); - -CREATE TABLE IsBasedOn ( - Id BIGSERIAL NOT NULL, - Author text NOT NULL, - Url text NOT NULL, - MaterialName text NOT NULL, - EducationalMaterialId int8 NOT NULL, - PRIMARY KEY (Id)); --- CREATE TABLE UsersEducationalMaterialCollection ( --- EducationalMaterialCollectionId int8 NOT NULL, --- UsersUserName text NOT NULL, --- PRIMARY KEY (EducationalMaterialCollectionId, --- UsersUserName)); --- CREATE TABLE EducationalMaterialCollectionEducationalMaterial ( --- EducationalMaterialCollectionId int8 NOT NULL, --- EducationalMaterialId int8 NOT NULL, --- PRIMARY KEY (EducationalMaterialCollectionId, --- EducationalMaterialId)); -CREATE TABLE Publisher ( - Id BIGSERIAL NOT NULL, - Name text NOT NULL, - EducationalMaterialId int8 NOT NULL, - PublisherKey text NOT NULL, - PRIMARY KEY (Id)); - -CREATE TABLE MaterialDescription ( - Id BIGSERIAL NOT NULL, - Description text NOT NULL, - Language lang NOT NULL, - EducationalMaterialId int8 NOT NULL, - PRIMARY KEY (Id)); -CREATE TABLE MaterialName ( - Id BIGSERIAL NOT NULL, - MaterialName text DEFAULT '' NOT NULL, - Language lang NOT NULL, - Slug text DEFAULT '' NOT NULL, - EducationalMaterialId int8 NOT NULL, - PRIMARY KEY (Id)); - - CREATE TABLE Author ( - Id BIGSERIAL NOT NULL, - authorname text NOT NULL, - organization text NOT NULL, - EducationalMaterialId int8 NOT NULL, - organizationkey text NOT NULL, - PRIMARY KEY (Id)); - - CREATE TABLE MaterialDisplayName ( - Id BIGSERIAL NOT NULL, - DisplayName text NOT NULL, - Language lang NOT NULL, - MaterialId int8 NOT NULL, --- Slug text NOT NULL, - PRIMARY KEY (Id)); - - CREATE TABLE temporaryrecord ( - Id BIGSERIAL NOT NULL, - FilePath text NOT NULL, - OriginalFileName text NOT NULL, - Filesize int4 NOT NULL, - Mimetype text NOT NULL, - Format text NOT NULL, - FileName text NOT NULL, - MaterialId int8 NOT NULL, - CreatedAt timestamp with time zone DEFAULT CURRENT_TIMESTAMP NOT NULL, - PRIMARY KEY (Id)); - -CREATE TABLE Thumbnail ( - Id BIGSERIAL NOT NULL, - FilePath text NOT NULL, - MimeType text NOT NULL, - EducationalMaterialId int8 NOT NULL, - FileName text NOT NULL, - Obsoleted int4 DEFAULT 0 NOT NULL, - FileKey text NOT NULL, - FileBucket text NOT NULL, - PRIMARY KEY (Id)); - -CREATE TABLE Attachment ( - Id BIGSERIAL NOT NULL, - FilePath text NOT NULL, - OriginalFileName text NOT NULL, - FileSize int4 NOT NULL, - MimeType text NOT NULL, - Format text NOT NULL, - FileKey text NOT NULL, - FileBucket text NOT NULL, - DefaultFile bool NOT NULL, - Kind text NOT NULL, - Label text NOT NULL, - Srclang text NOT NULL, - MaterialId int8 NOT NULL, - PRIMARY KEY (Id)); - -CREATE TABLE TemporaryAttachment ( - Id BIGSERIAL NOT NULL, - FilePath text NOT NULL, - OriginalFileName text NOT NULL, - Filesize int4 NOT NULL, - Mimetype text NOT NULL, - Format text NOT NULL, - FileName text NOT NULL, - CreatedAt timestamp with time zone DEFAULT CURRENT_TIMESTAMP NOT NULL, - MaterialId int8 NOT NULL, - DefaultFile bool NOT NULL, - Kind text NOT NULL, - Label text NOT NULL, - Srclang text NOT NULL, - PRIMARY KEY (Id)); - -ALTER TABLE AlignmentObject ADD CONSTRAINT FKAlignmentObject FOREIGN KEY (EducationalMaterialId) REFERENCES EducationalMaterial (Id) ON DELETE Cascade; -ALTER TABLE EducationalMaterial ADD CONSTRAINT FKEducationalMaterial FOREIGN KEY (UsersUserName) REFERENCES Users (UserName) ON DELETE Restrict; -ALTER TABLE EducationalAudience ADD CONSTRAINT FKEducationalAudience FOREIGN KEY (EducationalMaterialId) REFERENCES EducationalMaterial (Id) ON DELETE Cascade; -ALTER TABLE LearningResourceType ADD CONSTRAINT FKLearningResourceType FOREIGN KEY (EducationalMaterialId) REFERENCES EducationalMaterial (Id) ON DELETE Cascade; -ALTER TABLE KeyWord ADD CONSTRAINT FKKeyWord FOREIGN KEY (EducationalMaterialId) REFERENCES EducationalMaterial (Id) ON DELETE Cascade; -ALTER TABLE EducationalUse ADD CONSTRAINT FKEducationalUse FOREIGN KEY (EducationalMaterialId) REFERENCES EducationalMaterial (Id) ON DELETE Cascade; -ALTER TABLE IsBasedOn ADD CONSTRAINT FKIsBasedOn FOREIGN KEY (EducationalMaterialId) REFERENCES EducationalMaterial (Id) ON DELETE Cascade; -ALTER TABLE InLanguage ADD CONSTRAINT FKInLanguage FOREIGN KEY (EducationalMaterialId) REFERENCES EducationalMaterial (Id) ON DELETE Cascade; -ALTER TABLE Material ADD CONSTRAINT FKMaterial FOREIGN KEY (EducationalMaterialId) REFERENCES EducationalMaterial (Id) ON DELETE Restrict; -ALTER TABLE Record ADD CONSTRAINT FKRecord FOREIGN KEY (MaterialId) REFERENCES Material (Id) ON DELETE Restrict; --- ALTER TABLE CollectionKeyWord ADD CONSTRAINT FKCollectionKeyWord FOREIGN KEY (EducationalMaterialCollectionId) REFERENCES EducationalMaterialCollection (Id) ON DELETE Cascade; --- ALTER TABLE CollectionLanguage ADD CONSTRAINT FKCollectionLanguage FOREIGN KEY (EducationalMaterialCollectionId) REFERENCES EducationalMaterialCollection (Id) ON DELETE Cascade; --- ALTER TABLE CollectionEducationalAudience ADD CONSTRAINT FKCollectionEducationalAudience FOREIGN KEY (EducationalMaterialCollectionId) REFERENCES EducationalMaterialCollection (Id) ON DELETE Cascade; --- ALTER TABLE CollectionEducationalLevel ADD CONSTRAINT FKCollectionEducationalLevel FOREIGN KEY (EducationalMaterialCollectionId) REFERENCES EducationalMaterialCollection (Id) ON DELETE Cascade; --- ALTER TABLE CollectionTopic ADD CONSTRAINT FKCollectionTopic FOREIGN KEY (EducationalMaterialCollectionId) REFERENCES EducationalMaterialCollection (Id) ON DELETE Cascade; --- ALTER TABLE CollectionEducationalFramework ADD CONSTRAINT FKCollectionEducationalFramework FOREIGN KEY (EducationalMaterialCollectionId) REFERENCES EducationalMaterialCollection (Id) ON DELETE Cascade; --- ALTER TABLE CollectionAlignmentObject ADD CONSTRAINT FKCollectionAlignmentObject FOREIGN KEY (EducationalMaterialCollectionId) REFERENCES EducationalMaterialCollection (Id) ON DELETE Cascade; --- ALTER TABLE CollectionEducationalUse ADD CONSTRAINT fk_CollectionEducationalUse FOREIGN KEY (EducationalMaterialCollectionId) REFERENCES EducationalMaterialCollection (Id) ON DELETE Cascade; --- ALTER TABLE UsersEducationalMaterialCollection ADD CONSTRAINT fk_UsersEMC FOREIGN KEY (UsersUserName) REFERENCES Users (username) ON DELETE Restrict; --- ALTER TABLE UsersEducationalMaterialCollection ADD CONSTRAINT fk_EMCUsers FOREIGN KEY (EducationalMaterialCollectionId) REFERENCES EducationalMaterialCollection (Id) ON DELETE Cascade; --- ALTER TABLE EducationalMaterialCollectionEducationalMaterial ADD CONSTRAINT fk_EMCMaterial FOREIGN KEY (EducationalMaterialCollectionId) REFERENCES EducationalMaterialCollection (Id) ON DELETE Cascade; --- ALTER TABLE EducationalMaterialCollectionEducationalMaterial ADD CONSTRAINT fk_MaterialEMC FOREIGN KEY (EducationalMaterialId) REFERENCES EducationalMaterial (Id) ON DELETE Restrict; -ALTER TABLE EducationalLevel ADD CONSTRAINT fk_EducationalLevel FOREIGN KEY (EducationalMaterialId) REFERENCES EducationalMaterial (Id) ON DELETE Cascade; -ALTER TABLE Publisher ADD CONSTRAINT fk_Publisher FOREIGN KEY (EducationalMaterialId) REFERENCES EducationalMaterial (Id) ON DELETE Cascade; -ALTER TABLE MaterialDescription ADD CONSTRAINT fk_Description FOREIGN KEY (EducationalMaterialId) REFERENCES EducationalMaterial (Id) ON DELETE Cascade; -ALTER TABLE MaterialName ADD CONSTRAINT fk_MaterialName FOREIGN KEY (EducationalMaterialId) REFERENCES EducationalMaterial (Id) ON DELETE Cascade; -ALTER TABLE AccessibilityFeature ADD CONSTRAINT fk_AccessibilityFeature FOREIGN KEY (EducationalMaterialId) REFERENCES EducationalMaterial (Id) ON DELETE Cascade; -ALTER TABLE AccessibilityHazard ADD CONSTRAINT fk_AccessibilityHazard FOREIGN KEY (EducationalMaterialId) REFERENCES EducationalMaterial (Id) ON DELETE Cascade; --- ALTER TABLE AccessibilityAPI ADD CONSTRAINT fk_AccessibilityAPI FOREIGN KEY (EducationalMaterialId) REFERENCES EducationalMaterial (Id); --- ALTER TABLE AccessibilityControl ADD CONSTRAINT fk_AccessibilityControl FOREIGN KEY (EducationalMaterialId) REFERENCES EducationalMaterial (Id); -ALTER TABLE Author ADD CONSTRAINT fk_author FOREIGN KEY (EducationalMaterialId) REFERENCES EducationalMaterial (Id) ON DELETE Cascade; -ALTER TABLE MaterialDisplayName ADD CONSTRAINT fk_MaterialDisplayName FOREIGN KEY (MaterialId) REFERENCES Material (Id) ON DELETE Cascade; --- ALTER TABLE temporaryrecord ADD CONSTRAINT fk_temporaryrecord FOREIGN KEY (EducationalMaterialId) REFERENCES EducationalMaterial (Id); -ALTER TABLE temporaryrecord ADD CONSTRAINT fk_temporaryrecord FOREIGN KEY (MaterialId) REFERENCES Material (Id) ON DELETE Restrict; -ALTER TABLE Thumbnail ADD CONSTRAINT fk_thumbnail FOREIGN KEY (EducationalMaterialId) REFERENCES EducationalMaterial (Id) ON DELETE Restrict; - -ALTER TABLE materialname ADD CONSTRAINT constraint_lang_id UNIQUE (language,educationalmaterialid); -ALTER TABLE materialdescription ADD CONSTRAINT constraint_materialdescription_lang_id UNIQUE (language,educationalmaterialid); -ALTER TABLE educationalaudience ADD CONSTRAINT constraint_educationalaudience UNIQUE (educationalrolekey,educationalmaterialid); -ALTER TABLE educationaluse ADD CONSTRAINT constraint_educationaluse UNIQUE (educationalusekey,educationalmaterialid); -ALTER TABLE learningresourcetype ADD CONSTRAINT constraint_learningresourcetype UNIQUE (learningresourcetypekey,educationalmaterialid); -ALTER TABLE inlanguage ADD CONSTRAINT constraint_inlanguage UNIQUE (inlanguage,educationalmaterialid); -ALTER TABLE keyword ADD CONSTRAINT constraint_keyword UNIQUE (keywordkey,educationalmaterialid); -ALTER TABLE publisher ADD CONSTRAINT constraint_publisher UNIQUE (PublisherKey,educationalmaterialid); -ALTER TABLE isbasedon ADD CONSTRAINT constraint_isbasedon UNIQUE (author, materialname,educationalmaterialid); -ALTER TABLE alignmentobject ADD CONSTRAINT constraint_alignmentobject UNIQUE (alignmentType, targetName, source, educationalmaterialid); -ALTER TABLE materialdisplayname ADD CONSTRAINT constraint_materialdisplayname UNIQUE (language, materialid); --- ALTER TABLE thumbnail ADD CONSTRAINT constraint_thumbnail UNIQUE (educationalmaterialid); -ALTER TABLE accessibilityfeature ADD CONSTRAINT constraint_accessibilityfeature UNIQUE (accessibilityfeaturekey,educationalmaterialid); -ALTER TABLE accessibilityhazard ADD CONSTRAINT constraint_accessibilityhazard UNIQUE (accessibilityhazardkey,educationalmaterialid); -ALTER TABLE EducationalLevel ADD CONSTRAINT constraint_EducationalLevel UNIQUE (EducationalLevelKey,educationalmaterialid); \ No newline at end of file diff --git a/aoe-web-backend/sql/002_metadata_changes.sql b/aoe-web-backend/sql/002_metadata_changes.sql deleted file mode 100644 index 43c02fd58..000000000 --- a/aoe-web-backend/sql/002_metadata_changes.sql +++ /dev/null @@ -1,320 +0,0 @@ -ALTER TABLE isbasedon ALTER COLUMN author DROP not null; -ALTER TABLE Material ALTER COLUMN MaterialLanguageKey TYPE TEXT; - -ALTER TABLE IF EXISTS isbasedon DROP CONSTRAINT constraint_isbasedon; -ALTER TABLE isbasedon ADD CONSTRAINT constraint_isbasedon UNIQUE (materialname,educationalmaterialid); - -CREATE TABLE IsBasedOnAuthor ( - Id BIGSERIAL NOT NULL, - AuthorName text NOT NULL, - IsBasedOnId int8 NOT NULL, - PRIMARY KEY (Id)); -ALTER TABLE IsBasedOnAuthor ADD CONSTRAINT FKIsBasedOnAuthor FOREIGN KEY (IsBasedOnId) REFERENCES IsBasedOn (Id); - -ALTER TABLE isbasedon DROP COLUMN author; - -ALTER TABLE attachment ADD COLUMN Obsoleted int4 DEFAULT 0 NOT NULL; - ---24.3.2020 feat-582 -ALTER TABLE educationalmaterial ADD COLUMN SuitsAllUpperSecondarySubjectsNew bool DEFAULT 'false' NOT NULL; - ---feat 592 -CREATE TABLE Rating ( - Id BIGSERIAL NOT NULL, - RatingContent int4, - RatingVisual int4, - FeedbackPositive varchar(1000), - FeedbackSuggest varchar(1000), - FeedbackPurpose varchar(1000), - EducationalMaterialId int8 NOT NULL, - UsersUserName text NOT NULL, - UpdatedAt timestamp DEFAULT CURRENT_TIMESTAMP NOT NULL, - PRIMARY KEY (Id)); -ALTER TABLE Rating ADD CONSTRAINT FKRatingUsers FOREIGN KEY (UsersUserName) REFERENCES Users (UserName); -ALTER TABLE Rating ADD CONSTRAINT FKRatingEducationalMaterial FOREIGN KEY (EducationalMaterialId) REFERENCES EducationalMaterial (Id); -ALTER TABLE Rating ADD CONSTRAINT constraint_Rating UNIQUE (UsersUserName,educationalmaterialid); -ALTER TABLE educationalmaterial ADD COLUMN RatingContentAverage NUMERIC (2, 1); -ALTER TABLE educationalmaterial ADD COLUMN RatingVisualAverage NUMERIC (2, 1); - ---feat 628 -ALTER TABLE educationalmaterial ALTER COLUMN agerangemin DROP DEFAULT; -ALTER TABLE educationalmaterial ALTER COLUMN agerangemin DROP NOT NULL; -ALTER TABLE educationalmaterial ALTER COLUMN agerangemax DROP DEFAULT; -ALTER TABLE educationalmaterial ALTER COLUMN agerangemax DROP NOT NULL; -ALTER TABLE educationalmaterial ALTER COLUMN expires DROP NOT NULL; -ALTER TABLE alignmentobject DROP CONSTRAINT constraint_alignmentobject; -ALTER TABLE alignmentobject ADD CONSTRAINT constraint_alignmentobject UNIQUE (alignmentType, objectkey, source, educationalmaterialid); - --- 2.0.0 - -CREATE TABLE VersionComposition ( - EducationalMaterialId int8 NOT NULL, - MaterialId int8 NOT NULL, - PublishedAt timestamp NOT NULL, - Priority int4, - PRIMARY KEY (EducationalMaterialId, - MaterialId, - PublishedAt)); -ALTER TABLE VersionComposition ADD CONSTRAINT FKMaterialVersion FOREIGN KEY (MaterialId) REFERENCES Material (Id); -ALTER TABLE VersionComposition ADD CONSTRAINT FKEducationalMaterialVersion FOREIGN KEY (EducationalMaterialId) REFERENCES EducationalMaterial (Id); - -INSERT INTO VersionComposition (EducationalMaterialId, PublishedAt, MaterialId, priority) -SELECT em.Id as EducationalMaterialId, em.PublishedAt::timestamp(3), m.Id as MaterialId, m.Priority -FROM EducationalMaterial AS em -JOIN Material AS m -ON em.Id = m.EducationalMaterialId -WHERE em.PublishedAt IS NOT NULL; ---ON CONFLICT (EducationalMaterialId, PublishedAt, MaterialId) DO NOTHING; - -CREATE TABLE AttachmentVersionComposition ( - VersionEducationalMaterialId int8 NOT NULL, - VersionMaterialId int8 NOT NULL, - VersionPublishedAt timestamp NOT NULL, - AttachmentId int8 NOT NULL, - PRIMARY KEY (VersionEducationalMaterialId, - VersionMaterialId, - VersionPublishedAt, - AttachmentId)); -ALTER TABLE AttachmentVersionComposition ADD CONSTRAINT FKVersionCompositionAttachment FOREIGN KEY (VersionEducationalMaterialId, VersionMaterialId, VersionPublishedAt) REFERENCES VersionComposition (EducationalMaterialId, MaterialId, PublishedAt); -ALTER TABLE AttachmentVersionComposition ADD CONSTRAINT FKAttachmentVersion FOREIGN KEY (AttachmentId) REFERENCES Attachment (Id); - - -INSERT INTO AttachmentVersionComposition (VersionEducationalMaterialId, VersionPublishedAt, VersionMaterialId, AttachmentId) -SELECT em.Id as EducationalMaterialId, em.PublishedAt::timestamp(3), m.Id as MaterialId, attachment.id -FROM EducationalMaterial AS em -JOIN Material AS m -ON em.Id = m.EducationalMaterialId -JOIN attachment -ON m.id = attachment.MaterialId -WHERE em.PublishedAt IS NOT NULL; - --- readable codes - -CREATE TABLE LicenseCode ( - Code text NOT NULL, - License text NOT NULL, - PRIMARY KEY (Code)); - -INSERT INTO LicenseCode (Code, License) VALUES ('CCBY4.0', 'CC BY 4.0'); -INSERT INTO LicenseCode (Code, License) VALUES ('CCBYNC4.0', 'CC BY-NC 4.0'); -INSERT INTO LicenseCode (Code, License) VALUES ('CCBYNCND4.0', 'CC BY-NC-ND 4.0'); -INSERT INTO LicenseCode (Code, License) VALUES ('CCBYNCSA4.0', 'CC BY-NC-SA 4.0'); -INSERT INTO LicenseCode (Code, License) VALUES ('CCBYND4.0', 'CC BY-ND 4.0'); -INSERT INTO LicenseCode (Code, License) VALUES ('CCBYSA4.0', 'CC BY-SA 4.0'); - --- feat-674 -ALTER TABLE TemporaryAttachment ADD COLUMN AttachmentId int8 NOT NULL; -ALTER TABLE TemporaryAttachment ADD CONSTRAINT FKTempAttachment FOREIGN KEY (AttachmentId) REFERENCES Attachment (Id); -ALTER TABLE Attachment ALTER COLUMN filebucket drop not null; -ALTER TABLE Attachment ALTER COLUMN filepath drop not null; -ALTER TABLE Attachment ALTER COLUMN filekey drop not null; -ALTER TABLE TemporaryAttachment DROP COLUMN materialid; - ---3.0.0 - --- Collection - -CREATE TABLE CollectionEducationalMaterial ( - CollectionId int8 NOT NULL, - EducationalMaterialId int8 NOT NULL, - PRIMARY KEY (CollectionId, - EducationalMaterialId)); -CREATE TABLE Collection ( - Id BIGSERIAL NOT NULL, - CreatedAt timestamp with time zone NOT NULL, - UpdatedAt timestamp with time zone, - PublishedAt timestamp with time zone, - CreatedBy text NOT NULL, - AgeRangeMin int4, - AgeRangeMax int4, - CollectionName text NOT NULL, - PRIMARY KEY (Id)); -CREATE TABLE UsersCollection ( - CollectionId int8 NOT NULL, - UsersUserName text NOT NULL, - PRIMARY KEY (CollectionId, - UsersUserName)); -ALTER TABLE CollectionEducationalMaterial ADD CONSTRAINT FKMaterialCollection FOREIGN KEY (EducationalMaterialId) REFERENCES EducationalMaterial (Id) ON DELETE Restrict; -ALTER TABLE UsersCollection ADD CONSTRAINT FKUsersCollection FOREIGN KEY (UsersUserName) REFERENCES Users (UserName); -ALTER TABLE CollectionEducationalMaterial ADD CONSTRAINT FKCollectionMaterial FOREIGN KEY (CollectionId) REFERENCES Collection (Id) ON DELETE Cascade; -ALTER TABLE UsersCollection ADD CONSTRAINT FKCollectionUsers FOREIGN KEY (CollectionId) REFERENCES Collection (Id) ON DELETE Cascade; - ---begin feat 112 - -ALTER TABLE Collection ADD COLUMN Description text; - -CREATE TABLE CollectionKeyWord ( - Id BIGSERIAL NOT NULL, - Value text NOT NULL, - CollectionId int8 NOT NULL, - KeywordKey text NOT NULL, - PRIMARY KEY (Id)); -CREATE TABLE CollectionAlignmentObject ( - Id BIGSERIAL NOT NULL, - AlignmentType text NOT NULL, - CollectionId int8 NOT NULL, - TargetName text NOT NULL, - Source text NOT NULL, - EducationalFramework text NOT NULL, - ObjectKey text NOT NULL, - TargetUrl text, - PRIMARY KEY (Id)); -CREATE TABLE CollectionEducationalUse ( - Id BIGSERIAL NOT NULL, - EducationalUseKey text NOT NULL, - CollectionId int8 NOT NULL, - Value text NOT NULL, - PRIMARY KEY (Id)); -CREATE TABLE CollectionLanguage ( - Id BIGSERIAL NOT NULL, - Language text NOT NULL, - CollectionId int8 NOT NULL, - PRIMARY KEY (Id)); -CREATE TABLE CollectionEducationalAudience ( - Id BIGSERIAL NOT NULL, - EducationalRole text NOT NULL, - CollectionId int8 NOT NULL, - EducationalRoleKey text NOT NULL, - PRIMARY KEY (Id)); -CREATE TABLE CollectionAccessibilityHazard ( - Id BIGSERIAL NOT NULL, - Value text NOT NULL, - AccessibilityHazardKey text NOT NULL, - CollectionId int8 NOT NULL, - PRIMARY KEY (Id)); -CREATE TABLE CollectionAccessibilityFeature ( - Id SERIAL NOT NULL, - Value text NOT NULL, - AccessibilityFeatureKey text NOT NULL, - CollectionId int8 NOT NULL, - PRIMARY KEY (Id)); - -ALTER TABLE CollectionAccessibilityHazard ADD CONSTRAINT FKCollectionAccessibilityHazard FOREIGN KEY (CollectionId) REFERENCES Collection (Id); -ALTER TABLE CollectionAccessibilityFeature ADD CONSTRAINT FKCollectionAccessibilityFeature FOREIGN KEY (CollectionId) REFERENCES Collection (Id); -ALTER TABLE CollectionEducationalAudience ADD CONSTRAINT FKCollectionEducationalAudience FOREIGN KEY (CollectionId) REFERENCES Collection (Id) ON DELETE Cascade; -ALTER TABLE CollectionEducationalUse ADD CONSTRAINT FKCollectionEducationalUse FOREIGN KEY (CollectionId) REFERENCES Collection (Id) ON DELETE Cascade; -ALTER TABLE CollectionAlignmentObject ADD CONSTRAINT FKCollectionAligmentObject FOREIGN KEY (CollectionId) REFERENCES Collection (Id) ON DELETE Cascade; -ALTER TABLE CollectionLanguage ADD CONSTRAINT FKCollectionLanguage FOREIGN KEY (CollectionId) REFERENCES Collection (Id) ON DELETE Cascade; -ALTER TABLE CollectionKeyWord ADD CONSTRAINT FKCollectionKeyWords FOREIGN KEY (CollectionId) REFERENCES Collection (Id) ON DELETE Cascade; - ---end feat 112 - --- faet 112 headings and priority -ALTER TABLE Collection ALTER COLUMN Description TYPE varchar(2000); -ALTER TABLE Collection ALTER COLUMN CollectionName TYPE varchar(255); -ALTER TABLE CollectionEducationalMaterial ADD COLUMN Priority int4 DEFAULT 0 NOT NULL; - -CREATE TABLE CollectionHeading ( - Id BIGSERIAL NOT NULL, - Heading varchar(255) NOT NULL, - Description varchar(2000), - Priority int4 DEFAULT 0 NOT NULL, - CollectionId int8 NOT NULL, - PRIMARY KEY (Id)); -ALTER TABLE CollectionHeading ADD CONSTRAINT FKCollectionHeading FOREIGN KEY (CollectionId) REFERENCES Collection (Id); - -CREATE TABLE CollectionEducationalLevel ( - Id BIGSERIAL NOT NULL, - EducationalLevelKey text NOT NULL, - CollectionId int8 NOT NULL, - Value text NOT NULL, - PRIMARY KEY (Id)); -ALTER TABLE CollectionEducationalLevel ADD CONSTRAINT FKCollectionEducationalLevel FOREIGN KEY (CollectionId) REFERENCES Collection (Id) ON DELETE Cascade; - --- CollectionAlignmentObject educationalframework can be null - -ALTER TABLE collectionalignmentobject ALTER COLUMN educationalframework drop not null; - ---feat 739 - -CREATE TABLE collectionthumbnail ( - id BIGSERIAL NOT NULL, - filepath text NOT NULL, - mimetype text NOT NULL, - filename text NOT NULL, - obsoleted int4 DEFAULT 0 NOT NULL, - filekey text NOT NULL, - filebucket text NOT NULL, - collectionid int8 NOT NULL, - PRIMARY KEY (id)); -ALTER TABLE collectionthumbnail ADD CONSTRAINT FKCollectionThumbnail FOREIGN KEY (collectionid) REFERENCES Collection (Id); - --- Change default priority -ALTER TABLE collectioneducationalmaterial ALTER COLUMN priority SET DEFAULT 999; -ALTER TABLE collectionheading ALTER COLUMN priority SET DEFAULT 999; - -ALTER TABLE educationalmaterial ADD COLUMN viewcounter int8 DEFAULT 0; -ALTER TABLE educationalmaterial ADD COLUMN downloadcounter int8 DEFAULT 0; -ALTER TABLE educationalmaterial ADD COLUMN counterupdatedat timestamp with time zone; - --- 6.0.0 - -ALTER TABLE users ADD COLUMN email text; -ALTER TABLE users ADD COLUMN verifiedemail bool DEFAULT false; -ALTER TABLE users ADD COLUMN newratings bool DEFAULT false; -ALTER TABLE users ADD COLUMN almostexpired bool DEFAULT false; -ALTER TABLE users ADD COLUMN termsupdated bool DEFAULT false; -ALTER TABLE users ADD COLUMN allowtransfer bool DEFAULT false; - --- 7.0.0 - -CREATE TABLE AccessibilityFeatureExtension ( - Id BIGSERIAL NOT NULL, - Value text NOT NULL, - AccessibilityFeatureKey text NOT NULL, - EducationalMaterialId int8 NOT NULL, - UsersUserName text NOT NULL, - PRIMARY KEY (Id)); -CREATE TABLE AccessibilityHazardExtension ( - Id BIGSERIAL NOT NULL, - Value text NOT NULL, - AccessibilityHazardKey text NOT NULL, - EducationalMaterialId int8 NOT NULL, - UsersUserName text NOT NULL, - PRIMARY KEY (Id)); -CREATE TABLE EducationalLevelExtension ( - Id BIGSERIAL NOT NULL, - Value text NOT NULL, - EducationalLevelKey text NOT NULL, - EducationalMaterialId int8 NOT NULL, - UsersUserName text NOT NULL, - PRIMARY KEY (Id)); -CREATE TABLE KeyWordExtension ( - Id BIGSERIAL NOT NULL, - Value text NOT NULL, - EducationalMaterialId int8 NOT NULL, - KeywordKey text NOT NULL, - UsersUserName text NOT NULL, - PRIMARY KEY (Id)); -ALTER TABLE AccessibilityFeatureExtension ADD CONSTRAINT fkAccessibilityFeatureExtension FOREIGN KEY (EducationalMaterialId) REFERENCES EducationalMaterial (Id); -ALTER TABLE AccessibilityHazardExtension ADD CONSTRAINT fkAccessibilityHazardExtension FOREIGN KEY (EducationalMaterialId) REFERENCES EducationalMaterial (Id); -ALTER TABLE EducationalLevelExtension ADD CONSTRAINT fkEducationalLevelExtension FOREIGN KEY (EducationalMaterialId) REFERENCES EducationalMaterial (Id); -ALTER TABLE KeyWordExtension ADD CONSTRAINT fkKeyWordExtension FOREIGN KEY (UsersUserName) REFERENCES Users (UserName); -ALTER TABLE AccessibilityFeatureExtension ADD CONSTRAINT fkUserAccessibilityFeatureExtension FOREIGN KEY (UsersUserName) REFERENCES Users (UserName); -ALTER TABLE AccessibilityHazardExtension ADD CONSTRAINT fkUsersAccessibiltyHazardExtension FOREIGN KEY (UsersUserName) REFERENCES Users (UserName); -ALTER TABLE EducationalLevelExtension ADD CONSTRAINT fkUsersEducationalLevelExtension FOREIGN KEY (UsersUserName) REFERENCES Users (UserName); -ALTER TABLE KeyWordExtension ADD CONSTRAINT fkUsersKeyWordExtension FOREIGN KEY (EducationalMaterialId) REFERENCES EducationalMaterial (Id); - -ALTER TABLE AccessibilityFeatureExtension ADD CONSTRAINT constraint_AccessibilityFeatureExtension UNIQUE (accessibilityfeaturekey,educationalmaterialid); -ALTER TABLE AccessibilityHazardExtension ADD CONSTRAINT constraint_AccessibilityHazardExtension UNIQUE (accessibilityhazardkey,educationalmaterialid); -ALTER TABLE EducationalLevelExtension ADD CONSTRAINT constraint_EducationalLevelExtension UNIQUE (educationallevelkey,educationalmaterialid); -ALTER TABLE KeyWordExtension ADD CONSTRAINT constraint_KeyWordExtension UNIQUE (keywordkey,educationalmaterialid); - --- feat-789 -ALTER TABLE record ADD COLUMN pdfkey text; - -CREATE TABLE aoeuser ( - username text NOT NULL, - PRIMARY KEY (username)); - -CREATE TABLE educationalMaterialVersion ( - educationalMaterialId int8 NOT NULL, - publishedAt timestamp NOT NULL, - urn text, - PRIMARY KEY (educationalMaterialId, - publishedAt)); - -INSERT INTO educationalmaterialversion(educationalmaterialid, publishedat) SELECT distinct educationalmaterialid, publishedat from versioncomposition; - -ALTER TABLE VersionComposition ADD CONSTRAINT fkversioncomposition FOREIGN KEY (EducationalMaterialId, PublishedAt) REFERENCES EducationalMaterialVersion (EducationalMaterialId, PublishedAt); -ALTER TABLE EducationalMaterialVersion ADD CONSTRAINT FKemversion FOREIGN KEY (EducationalMaterialId) REFERENCES EducationalMaterial (Id); diff --git a/aoe-web-backend/sql/README.md b/aoe-web-backend/sql/README.md deleted file mode 100644 index 9712fa09a..000000000 --- a/aoe-web-backend/sql/README.md +++ /dev/null @@ -1 +0,0 @@ -SQL data definition files are deprecated and will be replaced. diff --git a/aoe-web-backend/sql/schema.sql b/aoe-web-backend/sql/schema.sql new file mode 100644 index 000000000..16bcd0d56 --- /dev/null +++ b/aoe-web-backend/sql/schema.sql @@ -0,0 +1,3539 @@ +-- +-- PostgreSQL database dump +-- + +-- Dumped from database version 14.13 +-- Dumped by pg_dump version 14.15 (Homebrew) + +SET statement_timeout = 0; +SET lock_timeout = 0; +SET idle_in_transaction_session_timeout = 0; +SET client_encoding = 'UTF8'; +SET standard_conforming_strings = on; +SELECT pg_catalog.set_config('search_path', '', false); +SET check_function_bodies = false; +SET xmloption = content; +SET client_min_messages = warning; +SET row_security = off; + +-- +-- Name: lang; Type: TYPE; Schema: public; Owner: aoe_admin +-- + +CREATE TYPE public.lang AS ENUM ( + 'fi', + 'en', + 'sv' +); + + +ALTER TYPE public.lang OWNER TO aoe_admin; + +-- +-- Name: notificationtype; Type: TYPE; Schema: public; Owner: aoe_admin +-- + +CREATE TYPE public.notificationtype AS ENUM ( + 'ERROR', + 'INFO' +); + + +ALTER TYPE public.notificationtype OWNER TO aoe_admin; + +SET default_tablespace = ''; + +SET default_table_access_method = heap; + +-- +-- Name: accessibilityfeature; Type: TABLE; Schema: public; Owner: aoe_admin +-- + +CREATE TABLE public.accessibilityfeature ( + id bigint NOT NULL, + value text NOT NULL, + educationalmaterialid bigint NOT NULL, + accessibilityfeaturekey text NOT NULL +); + + +ALTER TABLE public.accessibilityfeature OWNER TO aoe_admin; + +-- +-- Name: accessibilityfeature_id_seq; Type: SEQUENCE; Schema: public; Owner: aoe_admin +-- + +CREATE SEQUENCE public.accessibilityfeature_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.accessibilityfeature_id_seq OWNER TO aoe_admin; + +-- +-- Name: accessibilityfeature_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: aoe_admin +-- + +ALTER SEQUENCE public.accessibilityfeature_id_seq OWNED BY public.accessibilityfeature.id; + + +-- +-- Name: accessibilityfeatureextension; Type: TABLE; Schema: public; Owner: aoe_admin +-- + +CREATE TABLE public.accessibilityfeatureextension ( + id bigint NOT NULL, + value text NOT NULL, + accessibilityfeaturekey text NOT NULL, + educationalmaterialid bigint NOT NULL, + usersusername text NOT NULL +); + + +ALTER TABLE public.accessibilityfeatureextension OWNER TO aoe_admin; + +-- +-- Name: accessibilityfeatureextension_id_seq; Type: SEQUENCE; Schema: public; Owner: aoe_admin +-- + +CREATE SEQUENCE public.accessibilityfeatureextension_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.accessibilityfeatureextension_id_seq OWNER TO aoe_admin; + +-- +-- Name: accessibilityfeatureextension_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: aoe_admin +-- + +ALTER SEQUENCE public.accessibilityfeatureextension_id_seq OWNED BY public.accessibilityfeatureextension.id; + + +-- +-- Name: accessibilityhazard; Type: TABLE; Schema: public; Owner: aoe_admin +-- + +CREATE TABLE public.accessibilityhazard ( + id bigint NOT NULL, + value text NOT NULL, + educationalmaterialid bigint NOT NULL, + accessibilityhazardkey text NOT NULL +); + + +ALTER TABLE public.accessibilityhazard OWNER TO aoe_admin; + +-- +-- Name: accessibilityhazard_id_seq; Type: SEQUENCE; Schema: public; Owner: aoe_admin +-- + +CREATE SEQUENCE public.accessibilityhazard_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.accessibilityhazard_id_seq OWNER TO aoe_admin; + +-- +-- Name: accessibilityhazard_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: aoe_admin +-- + +ALTER SEQUENCE public.accessibilityhazard_id_seq OWNED BY public.accessibilityhazard.id; + + +-- +-- Name: accessibilityhazardextension; Type: TABLE; Schema: public; Owner: aoe_admin +-- + +CREATE TABLE public.accessibilityhazardextension ( + id bigint NOT NULL, + value text NOT NULL, + accessibilityhazardkey text NOT NULL, + educationalmaterialid bigint NOT NULL, + usersusername text NOT NULL +); + + +ALTER TABLE public.accessibilityhazardextension OWNER TO aoe_admin; + +-- +-- Name: accessibilityhazardextension_id_seq; Type: SEQUENCE; Schema: public; Owner: aoe_admin +-- + +CREATE SEQUENCE public.accessibilityhazardextension_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.accessibilityhazardextension_id_seq OWNER TO aoe_admin; + +-- +-- Name: accessibilityhazardextension_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: aoe_admin +-- + +ALTER SEQUENCE public.accessibilityhazardextension_id_seq OWNED BY public.accessibilityhazardextension.id; + + +-- +-- Name: alignmentobject; Type: TABLE; Schema: public; Owner: aoe_admin +-- + +CREATE TABLE public.alignmentobject ( + id bigint NOT NULL, + educationalmaterialid bigint NOT NULL, + alignmenttype text NOT NULL, + targetname text NOT NULL, + source text NOT NULL, + educationalframework text DEFAULT ''::text NOT NULL, + objectkey text NOT NULL, + targeturl text +); + + +ALTER TABLE public.alignmentobject OWNER TO aoe_admin; + +-- +-- Name: alignmentobject_id_seq; Type: SEQUENCE; Schema: public; Owner: aoe_admin +-- + +CREATE SEQUENCE public.alignmentobject_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.alignmentobject_id_seq OWNER TO aoe_admin; + +-- +-- Name: alignmentobject_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: aoe_admin +-- + +ALTER SEQUENCE public.alignmentobject_id_seq OWNED BY public.alignmentobject.id; + + +-- +-- Name: aoeuser; Type: TABLE; Schema: public; Owner: aoe_admin +-- + +CREATE TABLE public.aoeuser ( + username character varying(255) NOT NULL +); + + +ALTER TABLE public.aoeuser OWNER TO aoe_admin; + +-- +-- Name: attachment; Type: TABLE; Schema: public; Owner: aoe_admin +-- + +CREATE TABLE public.attachment ( + id bigint NOT NULL, + filepath text, + originalfilename text NOT NULL, + filesize integer NOT NULL, + mimetype text NOT NULL, + format text, + filekey text, + filebucket text, + defaultfile boolean NOT NULL, + kind text NOT NULL, + label text NOT NULL, + srclang text NOT NULL, + materialid bigint NOT NULL, + obsoleted integer DEFAULT 0 NOT NULL +); + + +ALTER TABLE public.attachment OWNER TO aoe_admin; + +-- +-- Name: attachment_id_seq; Type: SEQUENCE; Schema: public; Owner: aoe_admin +-- + +CREATE SEQUENCE public.attachment_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.attachment_id_seq OWNER TO aoe_admin; + +-- +-- Name: attachment_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: aoe_admin +-- + +ALTER SEQUENCE public.attachment_id_seq OWNED BY public.attachment.id; + + +-- +-- Name: attachmentversioncomposition; Type: TABLE; Schema: public; Owner: aoe_admin +-- + +CREATE TABLE public.attachmentversioncomposition ( + versioneducationalmaterialid bigint NOT NULL, + versionmaterialid bigint NOT NULL, + versionpublishedat timestamp without time zone NOT NULL, + attachmentid bigint NOT NULL +); + + +ALTER TABLE public.attachmentversioncomposition OWNER TO aoe_admin; + +-- +-- Name: author; Type: TABLE; Schema: public; Owner: aoe_admin +-- + +CREATE TABLE public.author ( + id bigint NOT NULL, + authorname text NOT NULL, + organization text NOT NULL, + educationalmaterialid bigint NOT NULL, + organizationkey text NOT NULL +); + + +ALTER TABLE public.author OWNER TO aoe_admin; + +-- +-- Name: author_id_seq; Type: SEQUENCE; Schema: public; Owner: aoe_admin +-- + +CREATE SEQUENCE public.author_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.author_id_seq OWNER TO aoe_admin; + +-- +-- Name: author_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: aoe_admin +-- + +ALTER SEQUENCE public.author_id_seq OWNED BY public.author.id; + + +-- +-- Name: collection; Type: TABLE; Schema: public; Owner: aoe_admin +-- + +CREATE TABLE public.collection ( + id bigint NOT NULL, + createdat timestamp with time zone NOT NULL, + updatedat timestamp with time zone, + publishedat timestamp with time zone, + createdby text NOT NULL, + agerangemin integer, + agerangemax integer, + collectionname character varying(255) NOT NULL, + description character varying(2000) +); + + +ALTER TABLE public.collection OWNER TO aoe_admin; + +-- +-- Name: collection_id_seq; Type: SEQUENCE; Schema: public; Owner: aoe_admin +-- + +CREATE SEQUENCE public.collection_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.collection_id_seq OWNER TO aoe_admin; + +-- +-- Name: collection_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: aoe_admin +-- + +ALTER SEQUENCE public.collection_id_seq OWNED BY public.collection.id; + + +-- +-- Name: collectionaccessibilityfeature; Type: TABLE; Schema: public; Owner: aoe_admin +-- + +CREATE TABLE public.collectionaccessibilityfeature ( + id integer NOT NULL, + value text NOT NULL, + accessibilityfeaturekey text NOT NULL, + collectionid bigint NOT NULL +); + + +ALTER TABLE public.collectionaccessibilityfeature OWNER TO aoe_admin; + +-- +-- Name: collectionaccessibilityfeature_id_seq; Type: SEQUENCE; Schema: public; Owner: aoe_admin +-- + +CREATE SEQUENCE public.collectionaccessibilityfeature_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.collectionaccessibilityfeature_id_seq OWNER TO aoe_admin; + +-- +-- Name: collectionaccessibilityfeature_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: aoe_admin +-- + +ALTER SEQUENCE public.collectionaccessibilityfeature_id_seq OWNED BY public.collectionaccessibilityfeature.id; + + +-- +-- Name: collectionaccessibilityhazard; Type: TABLE; Schema: public; Owner: aoe_admin +-- + +CREATE TABLE public.collectionaccessibilityhazard ( + id bigint NOT NULL, + value text NOT NULL, + accessibilityhazardkey text NOT NULL, + collectionid bigint NOT NULL +); + + +ALTER TABLE public.collectionaccessibilityhazard OWNER TO aoe_admin; + +-- +-- Name: collectionaccessibilityhazard_id_seq; Type: SEQUENCE; Schema: public; Owner: aoe_admin +-- + +CREATE SEQUENCE public.collectionaccessibilityhazard_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.collectionaccessibilityhazard_id_seq OWNER TO aoe_admin; + +-- +-- Name: collectionaccessibilityhazard_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: aoe_admin +-- + +ALTER SEQUENCE public.collectionaccessibilityhazard_id_seq OWNED BY public.collectionaccessibilityhazard.id; + + +-- +-- Name: collectionalignmentobject; Type: TABLE; Schema: public; Owner: aoe_admin +-- + +CREATE TABLE public.collectionalignmentobject ( + id bigint NOT NULL, + alignmenttype text NOT NULL, + collectionid bigint NOT NULL, + targetname text NOT NULL, + source text NOT NULL, + educationalframework text, + objectkey text NOT NULL, + targeturl text +); + + +ALTER TABLE public.collectionalignmentobject OWNER TO aoe_admin; + +-- +-- Name: collectionalignmentobject_id_seq; Type: SEQUENCE; Schema: public; Owner: aoe_admin +-- + +CREATE SEQUENCE public.collectionalignmentobject_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.collectionalignmentobject_id_seq OWNER TO aoe_admin; + +-- +-- Name: collectionalignmentobject_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: aoe_admin +-- + +ALTER SEQUENCE public.collectionalignmentobject_id_seq OWNED BY public.collectionalignmentobject.id; + + +-- +-- Name: collectioneducationalaudience; Type: TABLE; Schema: public; Owner: aoe_admin +-- + +CREATE TABLE public.collectioneducationalaudience ( + id bigint NOT NULL, + educationalrole text NOT NULL, + collectionid bigint NOT NULL, + educationalrolekey text NOT NULL +); + + +ALTER TABLE public.collectioneducationalaudience OWNER TO aoe_admin; + +-- +-- Name: collectioneducationalaudience_id_seq; Type: SEQUENCE; Schema: public; Owner: aoe_admin +-- + +CREATE SEQUENCE public.collectioneducationalaudience_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.collectioneducationalaudience_id_seq OWNER TO aoe_admin; + +-- +-- Name: collectioneducationalaudience_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: aoe_admin +-- + +ALTER SEQUENCE public.collectioneducationalaudience_id_seq OWNED BY public.collectioneducationalaudience.id; + + +-- +-- Name: collectioneducationallevel; Type: TABLE; Schema: public; Owner: aoe_admin +-- + +CREATE TABLE public.collectioneducationallevel ( + id bigint NOT NULL, + educationallevelkey text NOT NULL, + collectionid bigint NOT NULL, + value text NOT NULL +); + + +ALTER TABLE public.collectioneducationallevel OWNER TO aoe_admin; + +-- +-- Name: collectioneducationallevel_id_seq; Type: SEQUENCE; Schema: public; Owner: aoe_admin +-- + +CREATE SEQUENCE public.collectioneducationallevel_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.collectioneducationallevel_id_seq OWNER TO aoe_admin; + +-- +-- Name: collectioneducationallevel_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: aoe_admin +-- + +ALTER SEQUENCE public.collectioneducationallevel_id_seq OWNED BY public.collectioneducationallevel.id; + + +-- +-- Name: collectioneducationalmaterial; Type: TABLE; Schema: public; Owner: aoe_admin +-- + +CREATE TABLE public.collectioneducationalmaterial ( + collectionid bigint NOT NULL, + educationalmaterialid bigint NOT NULL, + priority integer DEFAULT 999 NOT NULL +); + + +ALTER TABLE public.collectioneducationalmaterial OWNER TO aoe_admin; + +-- +-- Name: collectioneducationaluse; Type: TABLE; Schema: public; Owner: aoe_admin +-- + +CREATE TABLE public.collectioneducationaluse ( + id bigint NOT NULL, + educationalusekey text NOT NULL, + collectionid bigint NOT NULL, + value text NOT NULL +); + + +ALTER TABLE public.collectioneducationaluse OWNER TO aoe_admin; + +-- +-- Name: collectioneducationaluse_id_seq; Type: SEQUENCE; Schema: public; Owner: aoe_admin +-- + +CREATE SEQUENCE public.collectioneducationaluse_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.collectioneducationaluse_id_seq OWNER TO aoe_admin; + +-- +-- Name: collectioneducationaluse_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: aoe_admin +-- + +ALTER SEQUENCE public.collectioneducationaluse_id_seq OWNED BY public.collectioneducationaluse.id; + + +-- +-- Name: collectionheading; Type: TABLE; Schema: public; Owner: aoe_admin +-- + +CREATE TABLE public.collectionheading ( + id bigint NOT NULL, + heading character varying(255) NOT NULL, + description character varying(2000), + priority integer DEFAULT 999 NOT NULL, + collectionid bigint NOT NULL +); + + +ALTER TABLE public.collectionheading OWNER TO aoe_admin; + +-- +-- Name: collectionheading_id_seq; Type: SEQUENCE; Schema: public; Owner: aoe_admin +-- + +CREATE SEQUENCE public.collectionheading_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.collectionheading_id_seq OWNER TO aoe_admin; + +-- +-- Name: collectionheading_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: aoe_admin +-- + +ALTER SEQUENCE public.collectionheading_id_seq OWNED BY public.collectionheading.id; + + +-- +-- Name: collectionkeyword; Type: TABLE; Schema: public; Owner: aoe_admin +-- + +CREATE TABLE public.collectionkeyword ( + id bigint NOT NULL, + value text NOT NULL, + collectionid bigint NOT NULL, + keywordkey text NOT NULL +); + + +ALTER TABLE public.collectionkeyword OWNER TO aoe_admin; + +-- +-- Name: collectionkeyword_id_seq; Type: SEQUENCE; Schema: public; Owner: aoe_admin +-- + +CREATE SEQUENCE public.collectionkeyword_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.collectionkeyword_id_seq OWNER TO aoe_admin; + +-- +-- Name: collectionkeyword_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: aoe_admin +-- + +ALTER SEQUENCE public.collectionkeyword_id_seq OWNED BY public.collectionkeyword.id; + + +-- +-- Name: collectionlanguage; Type: TABLE; Schema: public; Owner: aoe_admin +-- + +CREATE TABLE public.collectionlanguage ( + id bigint NOT NULL, + language text NOT NULL, + collectionid bigint NOT NULL +); + + +ALTER TABLE public.collectionlanguage OWNER TO aoe_admin; + +-- +-- Name: collectionlanguage_id_seq; Type: SEQUENCE; Schema: public; Owner: aoe_admin +-- + +CREATE SEQUENCE public.collectionlanguage_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.collectionlanguage_id_seq OWNER TO aoe_admin; + +-- +-- Name: collectionlanguage_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: aoe_admin +-- + +ALTER SEQUENCE public.collectionlanguage_id_seq OWNED BY public.collectionlanguage.id; + + +-- +-- Name: collectionthumbnail; Type: TABLE; Schema: public; Owner: aoe_admin +-- + +CREATE TABLE public.collectionthumbnail ( + id bigint NOT NULL, + filepath text NOT NULL, + mimetype text NOT NULL, + filename text NOT NULL, + obsoleted integer DEFAULT 0 NOT NULL, + filekey text NOT NULL, + filebucket text NOT NULL, + collectionid bigint NOT NULL +); + + +ALTER TABLE public.collectionthumbnail OWNER TO aoe_admin; + +-- +-- Name: collectionthumbnail_id_seq; Type: SEQUENCE; Schema: public; Owner: aoe_admin +-- + +CREATE SEQUENCE public.collectionthumbnail_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.collectionthumbnail_id_seq OWNER TO aoe_admin; + +-- +-- Name: collectionthumbnail_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: aoe_admin +-- + +ALTER SEQUENCE public.collectionthumbnail_id_seq OWNED BY public.collectionthumbnail.id; + + +-- +-- Name: educationalaudience; Type: TABLE; Schema: public; Owner: aoe_admin +-- + +CREATE TABLE public.educationalaudience ( + id bigint NOT NULL, + educationalrole text NOT NULL, + educationalmaterialid bigint NOT NULL, + educationalrolekey text NOT NULL +); + + +ALTER TABLE public.educationalaudience OWNER TO aoe_admin; + +-- +-- Name: educationalaudience_id_seq; Type: SEQUENCE; Schema: public; Owner: aoe_admin +-- + +CREATE SEQUENCE public.educationalaudience_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.educationalaudience_id_seq OWNER TO aoe_admin; + +-- +-- Name: educationalaudience_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: aoe_admin +-- + +ALTER SEQUENCE public.educationalaudience_id_seq OWNED BY public.educationalaudience.id; + + +-- +-- Name: educationallevel; Type: TABLE; Schema: public; Owner: aoe_admin +-- + +CREATE TABLE public.educationallevel ( + id bigint NOT NULL, + value text NOT NULL, + educationalmaterialid bigint NOT NULL, + educationallevelkey text NOT NULL +); + + +ALTER TABLE public.educationallevel OWNER TO aoe_admin; + +-- +-- Name: educationallevel_id_seq; Type: SEQUENCE; Schema: public; Owner: aoe_admin +-- + +CREATE SEQUENCE public.educationallevel_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.educationallevel_id_seq OWNER TO aoe_admin; + +-- +-- Name: educationallevel_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: aoe_admin +-- + +ALTER SEQUENCE public.educationallevel_id_seq OWNED BY public.educationallevel.id; + + +-- +-- Name: educationallevelextension; Type: TABLE; Schema: public; Owner: aoe_admin +-- + +CREATE TABLE public.educationallevelextension ( + id bigint NOT NULL, + value text NOT NULL, + educationallevelkey text NOT NULL, + educationalmaterialid bigint NOT NULL, + usersusername text NOT NULL +); + + +ALTER TABLE public.educationallevelextension OWNER TO aoe_admin; + +-- +-- Name: educationallevelextension_id_seq; Type: SEQUENCE; Schema: public; Owner: aoe_admin +-- + +CREATE SEQUENCE public.educationallevelextension_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.educationallevelextension_id_seq OWNER TO aoe_admin; + +-- +-- Name: educationallevelextension_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: aoe_admin +-- + +ALTER SEQUENCE public.educationallevelextension_id_seq OWNED BY public.educationallevelextension.id; + + +-- +-- Name: educationalmaterial; Type: TABLE; Schema: public; Owner: aoe_admin +-- + +CREATE TABLE public.educationalmaterial ( + id bigint NOT NULL, + createdat timestamp with time zone DEFAULT CURRENT_TIMESTAMP NOT NULL, + publishedat timestamp with time zone, + updatedat timestamp with time zone DEFAULT CURRENT_TIMESTAMP NOT NULL, + archivedat timestamp with time zone, + timerequired text DEFAULT ''::text NOT NULL, + agerangemin integer, + agerangemax integer, + licensecode text DEFAULT ''::text NOT NULL, + obsoleted integer DEFAULT 0 NOT NULL, + originalpublishedat timestamp with time zone DEFAULT CURRENT_TIMESTAMP NOT NULL, + usersusername text NOT NULL, + expires timestamp with time zone, + suitsallearlychildhoodsubjects boolean DEFAULT false NOT NULL, + suitsallpreprimarysubjects boolean DEFAULT false NOT NULL, + suitsallbasicstudysubjects boolean DEFAULT false NOT NULL, + suitsalluppersecondarysubjects boolean DEFAULT false NOT NULL, + suitsallvocationaldegrees boolean DEFAULT false NOT NULL, + suitsallselfmotivatedsubjects boolean DEFAULT false NOT NULL, + suitsallbranches boolean DEFAULT false NOT NULL, + suitsalluppersecondarysubjectsnew boolean DEFAULT false NOT NULL, + ratingcontentaverage numeric(2,1), + ratingvisualaverage numeric(2,1), + viewcounter bigint DEFAULT 0, + downloadcounter bigint DEFAULT 0, + counterupdatedat timestamp with time zone +); + + +ALTER TABLE public.educationalmaterial OWNER TO aoe_admin; + +-- +-- Name: educationalmaterial_id_seq; Type: SEQUENCE; Schema: public; Owner: aoe_admin +-- + +CREATE SEQUENCE public.educationalmaterial_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.educationalmaterial_id_seq OWNER TO aoe_admin; + +-- +-- Name: educationalmaterial_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: aoe_admin +-- + +ALTER SEQUENCE public.educationalmaterial_id_seq OWNED BY public.educationalmaterial.id; + + +-- +-- Name: educationalmaterialversion; Type: TABLE; Schema: public; Owner: aoe_admin +-- + +CREATE TABLE public.educationalmaterialversion ( + educationalmaterialid bigint NOT NULL, + publishedat timestamp without time zone NOT NULL, + urn text +); + + +ALTER TABLE public.educationalmaterialversion OWNER TO aoe_admin; + +-- +-- Name: educationaluse; Type: TABLE; Schema: public; Owner: aoe_admin +-- + +CREATE TABLE public.educationaluse ( + id bigint NOT NULL, + value text NOT NULL, + educationalmaterialid bigint NOT NULL, + educationalusekey text NOT NULL +); + + +ALTER TABLE public.educationaluse OWNER TO aoe_admin; + +-- +-- Name: educationaluse_id_seq; Type: SEQUENCE; Schema: public; Owner: aoe_admin +-- + +CREATE SEQUENCE public.educationaluse_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.educationaluse_id_seq OWNER TO aoe_admin; + +-- +-- Name: educationaluse_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: aoe_admin +-- + +ALTER SEQUENCE public.educationaluse_id_seq OWNED BY public.educationaluse.id; + + +-- +-- Name: inlanguage; Type: TABLE; Schema: public; Owner: aoe_admin +-- + +CREATE TABLE public.inlanguage ( + id bigint NOT NULL, + inlanguage text NOT NULL, + url text NOT NULL, + educationalmaterialid bigint NOT NULL +); + + +ALTER TABLE public.inlanguage OWNER TO aoe_admin; + +-- +-- Name: inlanguage_id_seq; Type: SEQUENCE; Schema: public; Owner: aoe_admin +-- + +CREATE SEQUENCE public.inlanguage_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.inlanguage_id_seq OWNER TO aoe_admin; + +-- +-- Name: inlanguage_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: aoe_admin +-- + +ALTER SEQUENCE public.inlanguage_id_seq OWNED BY public.inlanguage.id; + + +-- +-- Name: isbasedon; Type: TABLE; Schema: public; Owner: aoe_admin +-- + +CREATE TABLE public.isbasedon ( + id bigint NOT NULL, + author text, + url text NOT NULL, + materialname text NOT NULL, + educationalmaterialid bigint NOT NULL +); + + +ALTER TABLE public.isbasedon OWNER TO aoe_admin; + +-- +-- Name: isbasedon_id_seq; Type: SEQUENCE; Schema: public; Owner: aoe_admin +-- + +CREATE SEQUENCE public.isbasedon_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.isbasedon_id_seq OWNER TO aoe_admin; + +-- +-- Name: isbasedon_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: aoe_admin +-- + +ALTER SEQUENCE public.isbasedon_id_seq OWNED BY public.isbasedon.id; + + +-- +-- Name: isbasedonauthor; Type: TABLE; Schema: public; Owner: aoe_admin +-- + +CREATE TABLE public.isbasedonauthor ( + id bigint NOT NULL, + authorname text NOT NULL, + isbasedonid bigint NOT NULL +); + + +ALTER TABLE public.isbasedonauthor OWNER TO aoe_admin; + +-- +-- Name: isbasedonauthor_id_seq; Type: SEQUENCE; Schema: public; Owner: aoe_admin +-- + +CREATE SEQUENCE public.isbasedonauthor_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.isbasedonauthor_id_seq OWNER TO aoe_admin; + +-- +-- Name: isbasedonauthor_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: aoe_admin +-- + +ALTER SEQUENCE public.isbasedonauthor_id_seq OWNED BY public.isbasedonauthor.id; + + +-- +-- Name: keyword; Type: TABLE; Schema: public; Owner: aoe_admin +-- + +CREATE TABLE public.keyword ( + id bigint NOT NULL, + value text NOT NULL, + educationalmaterialid bigint NOT NULL, + keywordkey text NOT NULL +); + + +ALTER TABLE public.keyword OWNER TO aoe_admin; + +-- +-- Name: keyword_id_seq; Type: SEQUENCE; Schema: public; Owner: aoe_admin +-- + +CREATE SEQUENCE public.keyword_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.keyword_id_seq OWNER TO aoe_admin; + +-- +-- Name: keyword_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: aoe_admin +-- + +ALTER SEQUENCE public.keyword_id_seq OWNED BY public.keyword.id; + + +-- +-- Name: keywordextension; Type: TABLE; Schema: public; Owner: aoe_admin +-- + +CREATE TABLE public.keywordextension ( + id bigint NOT NULL, + value text NOT NULL, + educationalmaterialid bigint NOT NULL, + keywordkey text NOT NULL, + usersusername text NOT NULL +); + + +ALTER TABLE public.keywordextension OWNER TO aoe_admin; + +-- +-- Name: keywordextension_id_seq; Type: SEQUENCE; Schema: public; Owner: aoe_admin +-- + +CREATE SEQUENCE public.keywordextension_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.keywordextension_id_seq OWNER TO aoe_admin; + +-- +-- Name: keywordextension_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: aoe_admin +-- + +ALTER SEQUENCE public.keywordextension_id_seq OWNED BY public.keywordextension.id; + + +-- +-- Name: learningresourcetype; Type: TABLE; Schema: public; Owner: aoe_admin +-- + +CREATE TABLE public.learningresourcetype ( + id bigint NOT NULL, + value text NOT NULL, + educationalmaterialid bigint NOT NULL, + learningresourcetypekey text +); + + +ALTER TABLE public.learningresourcetype OWNER TO aoe_admin; + +-- +-- Name: learningresourcetype_id_seq; Type: SEQUENCE; Schema: public; Owner: aoe_admin +-- + +CREATE SEQUENCE public.learningresourcetype_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.learningresourcetype_id_seq OWNER TO aoe_admin; + +-- +-- Name: learningresourcetype_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: aoe_admin +-- + +ALTER SEQUENCE public.learningresourcetype_id_seq OWNED BY public.learningresourcetype.id; + + +-- +-- Name: licensecode; Type: TABLE; Schema: public; Owner: aoe_admin +-- + +CREATE TABLE public.licensecode ( + code text NOT NULL, + license text NOT NULL +); + + +ALTER TABLE public.licensecode OWNER TO aoe_admin; + +-- +-- Name: material; Type: TABLE; Schema: public; Owner: aoe_admin +-- + +CREATE TABLE public.material ( + id bigint NOT NULL, + link text NOT NULL, + educationalmaterialid bigint NOT NULL, + obsoleted integer DEFAULT 0 NOT NULL, + priority integer DEFAULT 0 NOT NULL, + materiallanguagekey text DEFAULT 'fi'::public.lang NOT NULL +); + + +ALTER TABLE public.material OWNER TO aoe_admin; + +-- +-- Name: material_id_seq; Type: SEQUENCE; Schema: public; Owner: aoe_admin +-- + +CREATE SEQUENCE public.material_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.material_id_seq OWNER TO aoe_admin; + +-- +-- Name: material_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: aoe_admin +-- + +ALTER SEQUENCE public.material_id_seq OWNED BY public.material.id; + + +-- +-- Name: materialdescription; Type: TABLE; Schema: public; Owner: aoe_admin +-- + +CREATE TABLE public.materialdescription ( + id bigint NOT NULL, + description text NOT NULL, + language public.lang NOT NULL, + educationalmaterialid bigint NOT NULL +); + + +ALTER TABLE public.materialdescription OWNER TO aoe_admin; + +-- +-- Name: materialdescription_id_seq; Type: SEQUENCE; Schema: public; Owner: aoe_admin +-- + +CREATE SEQUENCE public.materialdescription_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.materialdescription_id_seq OWNER TO aoe_admin; + +-- +-- Name: materialdescription_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: aoe_admin +-- + +ALTER SEQUENCE public.materialdescription_id_seq OWNED BY public.materialdescription.id; + + +-- +-- Name: materialdisplayname; Type: TABLE; Schema: public; Owner: aoe_admin +-- + +CREATE TABLE public.materialdisplayname ( + id bigint NOT NULL, + displayname text NOT NULL, + language public.lang NOT NULL, + materialid bigint NOT NULL +); + + +ALTER TABLE public.materialdisplayname OWNER TO aoe_admin; + +-- +-- Name: materialdisplayname_id_seq; Type: SEQUENCE; Schema: public; Owner: aoe_admin +-- + +CREATE SEQUENCE public.materialdisplayname_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.materialdisplayname_id_seq OWNER TO aoe_admin; + +-- +-- Name: materialdisplayname_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: aoe_admin +-- + +ALTER SEQUENCE public.materialdisplayname_id_seq OWNED BY public.materialdisplayname.id; + + +-- +-- Name: materialname; Type: TABLE; Schema: public; Owner: aoe_admin +-- + +CREATE TABLE public.materialname ( + id bigint NOT NULL, + materialname text DEFAULT ''::text NOT NULL, + language public.lang NOT NULL, + slug text DEFAULT ''::text NOT NULL, + educationalmaterialid bigint NOT NULL +); + + +ALTER TABLE public.materialname OWNER TO aoe_admin; + +-- +-- Name: materialname_id_seq; Type: SEQUENCE; Schema: public; Owner: aoe_admin +-- + +CREATE SEQUENCE public.materialname_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.materialname_id_seq OWNER TO aoe_admin; + +-- +-- Name: materialname_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: aoe_admin +-- + +ALTER SEQUENCE public.materialname_id_seq OWNED BY public.materialname.id; + + +-- +-- Name: notification; Type: TABLE; Schema: public; Owner: aoe_admin +-- + +CREATE TABLE public.notification ( + nf_id bigint NOT NULL, + nf_text character varying(1500) NOT NULL, + nf_type public.notificationtype NOT NULL, + nf_created_at timestamp with time zone DEFAULT CURRENT_TIMESTAMP, + nf_show_since timestamp with time zone DEFAULT CURRENT_TIMESTAMP, + nf_show_until timestamp with time zone, + nf_disabled boolean DEFAULT false, + nf_username character varying(255) NOT NULL +); + + +ALTER TABLE public.notification OWNER TO aoe_admin; + +-- +-- Name: notification_nf_id_seq; Type: SEQUENCE; Schema: public; Owner: aoe_admin +-- + +CREATE SEQUENCE public.notification_nf_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.notification_nf_id_seq OWNER TO aoe_admin; + +-- +-- Name: notification_nf_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: aoe_admin +-- + +ALTER SEQUENCE public.notification_nf_id_seq OWNED BY public.notification.nf_id; + + +-- +-- Name: publisher; Type: TABLE; Schema: public; Owner: aoe_admin +-- + +CREATE TABLE public.publisher ( + id bigint NOT NULL, + name text NOT NULL, + educationalmaterialid bigint NOT NULL, + publisherkey text NOT NULL +); + + +ALTER TABLE public.publisher OWNER TO aoe_admin; + +-- +-- Name: publisher_id_seq; Type: SEQUENCE; Schema: public; Owner: aoe_admin +-- + +CREATE SEQUENCE public.publisher_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.publisher_id_seq OWNER TO aoe_admin; + +-- +-- Name: publisher_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: aoe_admin +-- + +ALTER SEQUENCE public.publisher_id_seq OWNED BY public.publisher.id; + + +-- +-- Name: rating; Type: TABLE; Schema: public; Owner: aoe_admin +-- + +CREATE TABLE public.rating ( + id bigint NOT NULL, + ratingcontent integer, + ratingvisual integer, + feedbackpositive character varying(1000), + feedbacksuggest character varying(1000), + feedbackpurpose character varying(1000), + educationalmaterialid bigint NOT NULL, + usersusername text NOT NULL, + updatedat timestamp without time zone DEFAULT CURRENT_TIMESTAMP NOT NULL +); + + +ALTER TABLE public.rating OWNER TO aoe_admin; + +-- +-- Name: rating_id_seq; Type: SEQUENCE; Schema: public; Owner: aoe_admin +-- + +CREATE SEQUENCE public.rating_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.rating_id_seq OWNER TO aoe_admin; + +-- +-- Name: rating_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: aoe_admin +-- + +ALTER SEQUENCE public.rating_id_seq OWNED BY public.rating.id; + + +-- +-- Name: record; Type: TABLE; Schema: public; Owner: aoe_admin +-- + +CREATE TABLE public.record ( + id bigint NOT NULL, + filepath text, + originalfilename text NOT NULL, + filesize bigint NOT NULL, + mimetype text NOT NULL, + format text, + materialid bigint NOT NULL, + filekey text, + filebucket text, + pdfkey text +); + + +ALTER TABLE public.record OWNER TO aoe_admin; + +-- +-- Name: record_id_seq; Type: SEQUENCE; Schema: public; Owner: aoe_admin +-- + +CREATE SEQUENCE public.record_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.record_id_seq OWNER TO aoe_admin; + +-- +-- Name: record_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: aoe_admin +-- + +ALTER SEQUENCE public.record_id_seq OWNED BY public.record.id; + + +-- +-- Name: temporaryattachment; Type: TABLE; Schema: public; Owner: aoe_admin +-- + +CREATE TABLE public.temporaryattachment ( + id bigint NOT NULL, + filepath text NOT NULL, + originalfilename text NOT NULL, + filesize integer NOT NULL, + mimetype text NOT NULL, + format text, + filename text NOT NULL, + createdat timestamp with time zone DEFAULT CURRENT_TIMESTAMP NOT NULL, + defaultfile boolean NOT NULL, + kind text NOT NULL, + label text NOT NULL, + srclang text NOT NULL, + attachmentid bigint NOT NULL +); + + +ALTER TABLE public.temporaryattachment OWNER TO aoe_admin; + +-- +-- Name: temporaryattachment_id_seq; Type: SEQUENCE; Schema: public; Owner: aoe_admin +-- + +CREATE SEQUENCE public.temporaryattachment_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.temporaryattachment_id_seq OWNER TO aoe_admin; + +-- +-- Name: temporaryattachment_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: aoe_admin +-- + +ALTER SEQUENCE public.temporaryattachment_id_seq OWNED BY public.temporaryattachment.id; + + +-- +-- Name: temporaryrecord; Type: TABLE; Schema: public; Owner: aoe_admin +-- + +CREATE TABLE public.temporaryrecord ( + id bigint NOT NULL, + filepath text NOT NULL, + originalfilename text NOT NULL, + filesize integer NOT NULL, + mimetype text NOT NULL, + format text NOT NULL, + filename text NOT NULL, + materialid bigint NOT NULL, + createdat timestamp with time zone DEFAULT CURRENT_TIMESTAMP NOT NULL +); + + +ALTER TABLE public.temporaryrecord OWNER TO aoe_admin; + +-- +-- Name: temporaryrecord_id_seq; Type: SEQUENCE; Schema: public; Owner: aoe_admin +-- + +CREATE SEQUENCE public.temporaryrecord_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.temporaryrecord_id_seq OWNER TO aoe_admin; + +-- +-- Name: temporaryrecord_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: aoe_admin +-- + +ALTER SEQUENCE public.temporaryrecord_id_seq OWNED BY public.temporaryrecord.id; + + +-- +-- Name: thumbnail; Type: TABLE; Schema: public; Owner: aoe_admin +-- + +CREATE TABLE public.thumbnail ( + id bigint NOT NULL, + filepath text NOT NULL, + mimetype text NOT NULL, + educationalmaterialid bigint NOT NULL, + filename text NOT NULL, + obsoleted integer DEFAULT 0 NOT NULL, + filekey text NOT NULL, + filebucket text NOT NULL +); + + +ALTER TABLE public.thumbnail OWNER TO aoe_admin; + +-- +-- Name: thumbnail_id_seq; Type: SEQUENCE; Schema: public; Owner: aoe_admin +-- + +CREATE SEQUENCE public.thumbnail_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.thumbnail_id_seq OWNER TO aoe_admin; + +-- +-- Name: thumbnail_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: aoe_admin +-- + +ALTER SEQUENCE public.thumbnail_id_seq OWNED BY public.thumbnail.id; + + +-- +-- Name: users; Type: TABLE; Schema: public; Owner: aoe_admin +-- + +CREATE TABLE public.users ( + id bigint NOT NULL, + firstname text NOT NULL, + lastname text NOT NULL, + username text NOT NULL, + preferredlanguage public.lang DEFAULT 'fi'::public.lang NOT NULL, + preferredtargetname text NOT NULL, + preferredalignmenttype text NOT NULL, + termsofusage boolean DEFAULT false NOT NULL, + email text, + verifiedemail boolean DEFAULT false, + newratings boolean DEFAULT false, + almostexpired boolean DEFAULT false, + termsupdated boolean DEFAULT false, + allowtransfer boolean DEFAULT false +); + + +ALTER TABLE public.users OWNER TO aoe_admin; + +-- +-- Name: users_id_seq; Type: SEQUENCE; Schema: public; Owner: aoe_admin +-- + +CREATE SEQUENCE public.users_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.users_id_seq OWNER TO aoe_admin; + +-- +-- Name: users_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: aoe_admin +-- + +ALTER SEQUENCE public.users_id_seq OWNED BY public.users.id; + + +-- +-- Name: userscollection; Type: TABLE; Schema: public; Owner: aoe_admin +-- + +CREATE TABLE public.userscollection ( + collectionid bigint NOT NULL, + usersusername text NOT NULL +); + + +ALTER TABLE public.userscollection OWNER TO aoe_admin; + +-- +-- Name: versioncomposition; Type: TABLE; Schema: public; Owner: aoe_admin +-- + +CREATE TABLE public.versioncomposition ( + educationalmaterialid bigint NOT NULL, + materialid bigint NOT NULL, + publishedat timestamp without time zone NOT NULL, + priority integer +); + + +ALTER TABLE public.versioncomposition OWNER TO aoe_admin; + +-- +-- Name: accessibilityfeature id; Type: DEFAULT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.accessibilityfeature ALTER COLUMN id SET DEFAULT nextval('public.accessibilityfeature_id_seq'::regclass); + + +-- +-- Name: accessibilityfeatureextension id; Type: DEFAULT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.accessibilityfeatureextension ALTER COLUMN id SET DEFAULT nextval('public.accessibilityfeatureextension_id_seq'::regclass); + + +-- +-- Name: accessibilityhazard id; Type: DEFAULT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.accessibilityhazard ALTER COLUMN id SET DEFAULT nextval('public.accessibilityhazard_id_seq'::regclass); + + +-- +-- Name: accessibilityhazardextension id; Type: DEFAULT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.accessibilityhazardextension ALTER COLUMN id SET DEFAULT nextval('public.accessibilityhazardextension_id_seq'::regclass); + + +-- +-- Name: alignmentobject id; Type: DEFAULT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.alignmentobject ALTER COLUMN id SET DEFAULT nextval('public.alignmentobject_id_seq'::regclass); + + +-- +-- Name: attachment id; Type: DEFAULT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.attachment ALTER COLUMN id SET DEFAULT nextval('public.attachment_id_seq'::regclass); + + +-- +-- Name: author id; Type: DEFAULT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.author ALTER COLUMN id SET DEFAULT nextval('public.author_id_seq'::regclass); + + +-- +-- Name: collection id; Type: DEFAULT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.collection ALTER COLUMN id SET DEFAULT nextval('public.collection_id_seq'::regclass); + + +-- +-- Name: collectionaccessibilityfeature id; Type: DEFAULT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.collectionaccessibilityfeature ALTER COLUMN id SET DEFAULT nextval('public.collectionaccessibilityfeature_id_seq'::regclass); + + +-- +-- Name: collectionaccessibilityhazard id; Type: DEFAULT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.collectionaccessibilityhazard ALTER COLUMN id SET DEFAULT nextval('public.collectionaccessibilityhazard_id_seq'::regclass); + + +-- +-- Name: collectionalignmentobject id; Type: DEFAULT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.collectionalignmentobject ALTER COLUMN id SET DEFAULT nextval('public.collectionalignmentobject_id_seq'::regclass); + + +-- +-- Name: collectioneducationalaudience id; Type: DEFAULT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.collectioneducationalaudience ALTER COLUMN id SET DEFAULT nextval('public.collectioneducationalaudience_id_seq'::regclass); + + +-- +-- Name: collectioneducationallevel id; Type: DEFAULT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.collectioneducationallevel ALTER COLUMN id SET DEFAULT nextval('public.collectioneducationallevel_id_seq'::regclass); + + +-- +-- Name: collectioneducationaluse id; Type: DEFAULT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.collectioneducationaluse ALTER COLUMN id SET DEFAULT nextval('public.collectioneducationaluse_id_seq'::regclass); + + +-- +-- Name: collectionheading id; Type: DEFAULT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.collectionheading ALTER COLUMN id SET DEFAULT nextval('public.collectionheading_id_seq'::regclass); + + +-- +-- Name: collectionkeyword id; Type: DEFAULT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.collectionkeyword ALTER COLUMN id SET DEFAULT nextval('public.collectionkeyword_id_seq'::regclass); + + +-- +-- Name: collectionlanguage id; Type: DEFAULT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.collectionlanguage ALTER COLUMN id SET DEFAULT nextval('public.collectionlanguage_id_seq'::regclass); + + +-- +-- Name: collectionthumbnail id; Type: DEFAULT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.collectionthumbnail ALTER COLUMN id SET DEFAULT nextval('public.collectionthumbnail_id_seq'::regclass); + + +-- +-- Name: educationalaudience id; Type: DEFAULT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.educationalaudience ALTER COLUMN id SET DEFAULT nextval('public.educationalaudience_id_seq'::regclass); + + +-- +-- Name: educationallevel id; Type: DEFAULT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.educationallevel ALTER COLUMN id SET DEFAULT nextval('public.educationallevel_id_seq'::regclass); + + +-- +-- Name: educationallevelextension id; Type: DEFAULT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.educationallevelextension ALTER COLUMN id SET DEFAULT nextval('public.educationallevelextension_id_seq'::regclass); + + +-- +-- Name: educationalmaterial id; Type: DEFAULT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.educationalmaterial ALTER COLUMN id SET DEFAULT nextval('public.educationalmaterial_id_seq'::regclass); + + +-- +-- Name: educationaluse id; Type: DEFAULT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.educationaluse ALTER COLUMN id SET DEFAULT nextval('public.educationaluse_id_seq'::regclass); + + +-- +-- Name: inlanguage id; Type: DEFAULT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.inlanguage ALTER COLUMN id SET DEFAULT nextval('public.inlanguage_id_seq'::regclass); + + +-- +-- Name: isbasedon id; Type: DEFAULT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.isbasedon ALTER COLUMN id SET DEFAULT nextval('public.isbasedon_id_seq'::regclass); + + +-- +-- Name: isbasedonauthor id; Type: DEFAULT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.isbasedonauthor ALTER COLUMN id SET DEFAULT nextval('public.isbasedonauthor_id_seq'::regclass); + + +-- +-- Name: keyword id; Type: DEFAULT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.keyword ALTER COLUMN id SET DEFAULT nextval('public.keyword_id_seq'::regclass); + + +-- +-- Name: keywordextension id; Type: DEFAULT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.keywordextension ALTER COLUMN id SET DEFAULT nextval('public.keywordextension_id_seq'::regclass); + + +-- +-- Name: learningresourcetype id; Type: DEFAULT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.learningresourcetype ALTER COLUMN id SET DEFAULT nextval('public.learningresourcetype_id_seq'::regclass); + + +-- +-- Name: material id; Type: DEFAULT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.material ALTER COLUMN id SET DEFAULT nextval('public.material_id_seq'::regclass); + + +-- +-- Name: materialdescription id; Type: DEFAULT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.materialdescription ALTER COLUMN id SET DEFAULT nextval('public.materialdescription_id_seq'::regclass); + + +-- +-- Name: materialdisplayname id; Type: DEFAULT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.materialdisplayname ALTER COLUMN id SET DEFAULT nextval('public.materialdisplayname_id_seq'::regclass); + + +-- +-- Name: materialname id; Type: DEFAULT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.materialname ALTER COLUMN id SET DEFAULT nextval('public.materialname_id_seq'::regclass); + + +-- +-- Name: notification nf_id; Type: DEFAULT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.notification ALTER COLUMN nf_id SET DEFAULT nextval('public.notification_nf_id_seq'::regclass); + + +-- +-- Name: publisher id; Type: DEFAULT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.publisher ALTER COLUMN id SET DEFAULT nextval('public.publisher_id_seq'::regclass); + + +-- +-- Name: rating id; Type: DEFAULT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.rating ALTER COLUMN id SET DEFAULT nextval('public.rating_id_seq'::regclass); + + +-- +-- Name: record id; Type: DEFAULT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.record ALTER COLUMN id SET DEFAULT nextval('public.record_id_seq'::regclass); + + +-- +-- Name: temporaryattachment id; Type: DEFAULT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.temporaryattachment ALTER COLUMN id SET DEFAULT nextval('public.temporaryattachment_id_seq'::regclass); + + +-- +-- Name: temporaryrecord id; Type: DEFAULT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.temporaryrecord ALTER COLUMN id SET DEFAULT nextval('public.temporaryrecord_id_seq'::regclass); + + +-- +-- Name: thumbnail id; Type: DEFAULT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.thumbnail ALTER COLUMN id SET DEFAULT nextval('public.thumbnail_id_seq'::regclass); + + +-- +-- Name: users id; Type: DEFAULT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.users ALTER COLUMN id SET DEFAULT nextval('public.users_id_seq'::regclass); + + +-- +-- Name: accessibilityfeature accessibilityfeature_pkey; Type: CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.accessibilityfeature + ADD CONSTRAINT accessibilityfeature_pkey PRIMARY KEY (id); + + +-- +-- Name: accessibilityfeatureextension accessibilityfeatureextension_pkey; Type: CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.accessibilityfeatureextension + ADD CONSTRAINT accessibilityfeatureextension_pkey PRIMARY KEY (id); + + +-- +-- Name: accessibilityhazard accessibilityhazard_pkey; Type: CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.accessibilityhazard + ADD CONSTRAINT accessibilityhazard_pkey PRIMARY KEY (id); + + +-- +-- Name: accessibilityhazardextension accessibilityhazardextension_pkey; Type: CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.accessibilityhazardextension + ADD CONSTRAINT accessibilityhazardextension_pkey PRIMARY KEY (id); + + +-- +-- Name: alignmentobject alignmentobject_pkey; Type: CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.alignmentobject + ADD CONSTRAINT alignmentobject_pkey PRIMARY KEY (id); + + +-- +-- Name: aoeuser aoeuser_pkey; Type: CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.aoeuser + ADD CONSTRAINT aoeuser_pkey PRIMARY KEY (username); + + +-- +-- Name: attachment attachment_pkey; Type: CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.attachment + ADD CONSTRAINT attachment_pkey PRIMARY KEY (id); + + +-- +-- Name: attachmentversioncomposition attachmentversioncomposition_pkey; Type: CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.attachmentversioncomposition + ADD CONSTRAINT attachmentversioncomposition_pkey PRIMARY KEY (versioneducationalmaterialid, versionmaterialid, versionpublishedat, attachmentid); + + +-- +-- Name: author author_pkey; Type: CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.author + ADD CONSTRAINT author_pkey PRIMARY KEY (id); + + +-- +-- Name: collection collection_pkey; Type: CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.collection + ADD CONSTRAINT collection_pkey PRIMARY KEY (id); + + +-- +-- Name: collectionaccessibilityfeature collectionaccessibilityfeature_pkey; Type: CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.collectionaccessibilityfeature + ADD CONSTRAINT collectionaccessibilityfeature_pkey PRIMARY KEY (id); + + +-- +-- Name: collectionaccessibilityhazard collectionaccessibilityhazard_pkey; Type: CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.collectionaccessibilityhazard + ADD CONSTRAINT collectionaccessibilityhazard_pkey PRIMARY KEY (id); + + +-- +-- Name: collectionalignmentobject collectionalignmentobject_pkey; Type: CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.collectionalignmentobject + ADD CONSTRAINT collectionalignmentobject_pkey PRIMARY KEY (id); + + +-- +-- Name: collectioneducationalaudience collectioneducationalaudience_pkey; Type: CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.collectioneducationalaudience + ADD CONSTRAINT collectioneducationalaudience_pkey PRIMARY KEY (id); + + +-- +-- Name: collectioneducationallevel collectioneducationallevel_pkey; Type: CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.collectioneducationallevel + ADD CONSTRAINT collectioneducationallevel_pkey PRIMARY KEY (id); + + +-- +-- Name: collectioneducationalmaterial collectioneducationalmaterial_pkey; Type: CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.collectioneducationalmaterial + ADD CONSTRAINT collectioneducationalmaterial_pkey PRIMARY KEY (collectionid, educationalmaterialid); + + +-- +-- Name: collectioneducationaluse collectioneducationaluse_pkey; Type: CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.collectioneducationaluse + ADD CONSTRAINT collectioneducationaluse_pkey PRIMARY KEY (id); + + +-- +-- Name: collectionheading collectionheading_pkey; Type: CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.collectionheading + ADD CONSTRAINT collectionheading_pkey PRIMARY KEY (id); + + +-- +-- Name: collectionkeyword collectionkeyword_pkey; Type: CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.collectionkeyword + ADD CONSTRAINT collectionkeyword_pkey PRIMARY KEY (id); + + +-- +-- Name: collectionlanguage collectionlanguage_pkey; Type: CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.collectionlanguage + ADD CONSTRAINT collectionlanguage_pkey PRIMARY KEY (id); + + +-- +-- Name: collectionthumbnail collectionthumbnail_pkey; Type: CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.collectionthumbnail + ADD CONSTRAINT collectionthumbnail_pkey PRIMARY KEY (id); + + +-- +-- Name: accessibilityfeature constraint_accessibilityfeature; Type: CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.accessibilityfeature + ADD CONSTRAINT constraint_accessibilityfeature UNIQUE (accessibilityfeaturekey, educationalmaterialid); + + +-- +-- Name: accessibilityfeatureextension constraint_accessibilityfeatureextension; Type: CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.accessibilityfeatureextension + ADD CONSTRAINT constraint_accessibilityfeatureextension UNIQUE (accessibilityfeaturekey, educationalmaterialid); + + +-- +-- Name: accessibilityhazard constraint_accessibilityhazard; Type: CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.accessibilityhazard + ADD CONSTRAINT constraint_accessibilityhazard UNIQUE (accessibilityhazardkey, educationalmaterialid); + + +-- +-- Name: accessibilityhazardextension constraint_accessibilityhazardextension; Type: CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.accessibilityhazardextension + ADD CONSTRAINT constraint_accessibilityhazardextension UNIQUE (accessibilityhazardkey, educationalmaterialid); + + +-- +-- Name: alignmentobject constraint_alignmentobject; Type: CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.alignmentobject + ADD CONSTRAINT constraint_alignmentobject UNIQUE (alignmenttype, objectkey, source, educationalmaterialid); + + +-- +-- Name: educationalaudience constraint_educationalaudience; Type: CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.educationalaudience + ADD CONSTRAINT constraint_educationalaudience UNIQUE (educationalrolekey, educationalmaterialid); + + +-- +-- Name: educationallevel constraint_educationallevel; Type: CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.educationallevel + ADD CONSTRAINT constraint_educationallevel UNIQUE (educationallevelkey, educationalmaterialid); + + +-- +-- Name: educationallevelextension constraint_educationallevelextension; Type: CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.educationallevelextension + ADD CONSTRAINT constraint_educationallevelextension UNIQUE (educationallevelkey, educationalmaterialid); + + +-- +-- Name: educationaluse constraint_educationaluse; Type: CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.educationaluse + ADD CONSTRAINT constraint_educationaluse UNIQUE (educationalusekey, educationalmaterialid); + + +-- +-- Name: inlanguage constraint_inlanguage; Type: CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.inlanguage + ADD CONSTRAINT constraint_inlanguage UNIQUE (inlanguage, educationalmaterialid); + + +-- +-- Name: isbasedon constraint_isbasedon; Type: CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.isbasedon + ADD CONSTRAINT constraint_isbasedon UNIQUE (materialname, educationalmaterialid); + + +-- +-- Name: keyword constraint_keyword; Type: CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.keyword + ADD CONSTRAINT constraint_keyword UNIQUE (keywordkey, educationalmaterialid); + + +-- +-- Name: keywordextension constraint_keywordextension; Type: CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.keywordextension + ADD CONSTRAINT constraint_keywordextension UNIQUE (keywordkey, educationalmaterialid); + + +-- +-- Name: materialname constraint_lang_id; Type: CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.materialname + ADD CONSTRAINT constraint_lang_id UNIQUE (language, educationalmaterialid); + + +-- +-- Name: learningresourcetype constraint_learningresourcetype; Type: CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.learningresourcetype + ADD CONSTRAINT constraint_learningresourcetype UNIQUE (learningresourcetypekey, educationalmaterialid); + + +-- +-- Name: materialdescription constraint_materialdescription_lang_id; Type: CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.materialdescription + ADD CONSTRAINT constraint_materialdescription_lang_id UNIQUE (language, educationalmaterialid); + + +-- +-- Name: materialdisplayname constraint_materialdisplayname; Type: CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.materialdisplayname + ADD CONSTRAINT constraint_materialdisplayname UNIQUE (language, materialid); + + +-- +-- Name: publisher constraint_publisher; Type: CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.publisher + ADD CONSTRAINT constraint_publisher UNIQUE (publisherkey, educationalmaterialid); + + +-- +-- Name: rating constraint_rating; Type: CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.rating + ADD CONSTRAINT constraint_rating UNIQUE (usersusername, educationalmaterialid); + + +-- +-- Name: educationalaudience educationalaudience_pkey; Type: CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.educationalaudience + ADD CONSTRAINT educationalaudience_pkey PRIMARY KEY (id); + + +-- +-- Name: educationallevel educationallevel_pkey; Type: CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.educationallevel + ADD CONSTRAINT educationallevel_pkey PRIMARY KEY (id); + + +-- +-- Name: educationallevelextension educationallevelextension_pkey; Type: CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.educationallevelextension + ADD CONSTRAINT educationallevelextension_pkey PRIMARY KEY (id); + + +-- +-- Name: educationalmaterial educationalmaterial_pkey; Type: CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.educationalmaterial + ADD CONSTRAINT educationalmaterial_pkey PRIMARY KEY (id); + + +-- +-- Name: educationalmaterialversion educationalmaterialversion_pkey; Type: CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.educationalmaterialversion + ADD CONSTRAINT educationalmaterialversion_pkey PRIMARY KEY (educationalmaterialid, publishedat); + + +-- +-- Name: educationaluse educationaluse_pkey; Type: CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.educationaluse + ADD CONSTRAINT educationaluse_pkey PRIMARY KEY (id); + + +-- +-- Name: inlanguage inlanguage_pkey; Type: CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.inlanguage + ADD CONSTRAINT inlanguage_pkey PRIMARY KEY (id); + + +-- +-- Name: isbasedon isbasedon_pkey; Type: CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.isbasedon + ADD CONSTRAINT isbasedon_pkey PRIMARY KEY (id); + + +-- +-- Name: isbasedonauthor isbasedonauthor_pkey; Type: CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.isbasedonauthor + ADD CONSTRAINT isbasedonauthor_pkey PRIMARY KEY (id); + + +-- +-- Name: keyword keyword_pkey; Type: CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.keyword + ADD CONSTRAINT keyword_pkey PRIMARY KEY (id); + + +-- +-- Name: keywordextension keywordextension_pkey; Type: CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.keywordextension + ADD CONSTRAINT keywordextension_pkey PRIMARY KEY (id); + + +-- +-- Name: learningresourcetype learningresourcetype_pkey; Type: CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.learningresourcetype + ADD CONSTRAINT learningresourcetype_pkey PRIMARY KEY (id); + + +-- +-- Name: licensecode licensecode_pkey; Type: CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.licensecode + ADD CONSTRAINT licensecode_pkey PRIMARY KEY (code); + + +-- +-- Name: material material_pkey; Type: CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.material + ADD CONSTRAINT material_pkey PRIMARY KEY (id); + + +-- +-- Name: materialdescription materialdescription_pkey; Type: CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.materialdescription + ADD CONSTRAINT materialdescription_pkey PRIMARY KEY (id); + + +-- +-- Name: materialdisplayname materialdisplayname_pkey; Type: CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.materialdisplayname + ADD CONSTRAINT materialdisplayname_pkey PRIMARY KEY (id); + + +-- +-- Name: materialname materialname_pkey; Type: CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.materialname + ADD CONSTRAINT materialname_pkey PRIMARY KEY (id); + + +-- +-- Name: notification notification_pkey; Type: CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.notification + ADD CONSTRAINT notification_pkey PRIMARY KEY (nf_id); + + +-- +-- Name: publisher publisher_pkey; Type: CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.publisher + ADD CONSTRAINT publisher_pkey PRIMARY KEY (id); + + +-- +-- Name: rating rating_pkey; Type: CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.rating + ADD CONSTRAINT rating_pkey PRIMARY KEY (id); + + +-- +-- Name: record record_pkey; Type: CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.record + ADD CONSTRAINT record_pkey PRIMARY KEY (id); + + +-- +-- Name: temporaryattachment temporaryattachment_pkey; Type: CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.temporaryattachment + ADD CONSTRAINT temporaryattachment_pkey PRIMARY KEY (id); + + +-- +-- Name: temporaryrecord temporaryrecord_pkey; Type: CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.temporaryrecord + ADD CONSTRAINT temporaryrecord_pkey PRIMARY KEY (id); + + +-- +-- Name: thumbnail thumbnail_pkey; Type: CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.thumbnail + ADD CONSTRAINT thumbnail_pkey PRIMARY KEY (id); + + +-- +-- Name: users users_pkey; Type: CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.users + ADD CONSTRAINT users_pkey PRIMARY KEY (username); + + +-- +-- Name: userscollection userscollection_pkey; Type: CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.userscollection + ADD CONSTRAINT userscollection_pkey PRIMARY KEY (collectionid, usersusername); + + +-- +-- Name: versioncomposition versioncomposition_pkey; Type: CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.versioncomposition + ADD CONSTRAINT versioncomposition_pkey PRIMARY KEY (educationalmaterialid, materialid, publishedat); + + +-- +-- Name: accessibilityfeature fk_accessibilityfeature; Type: FK CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.accessibilityfeature + ADD CONSTRAINT fk_accessibilityfeature FOREIGN KEY (educationalmaterialid) REFERENCES public.educationalmaterial(id) ON DELETE CASCADE; + + +-- +-- Name: accessibilityhazard fk_accessibilityhazard; Type: FK CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.accessibilityhazard + ADD CONSTRAINT fk_accessibilityhazard FOREIGN KEY (educationalmaterialid) REFERENCES public.educationalmaterial(id) ON DELETE CASCADE; + + +-- +-- Name: author fk_author; Type: FK CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.author + ADD CONSTRAINT fk_author FOREIGN KEY (educationalmaterialid) REFERENCES public.educationalmaterial(id) ON DELETE CASCADE; + + +-- +-- Name: materialdescription fk_description; Type: FK CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.materialdescription + ADD CONSTRAINT fk_description FOREIGN KEY (educationalmaterialid) REFERENCES public.educationalmaterial(id) ON DELETE CASCADE; + + +-- +-- Name: educationallevel fk_educationallevel; Type: FK CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.educationallevel + ADD CONSTRAINT fk_educationallevel FOREIGN KEY (educationalmaterialid) REFERENCES public.educationalmaterial(id) ON DELETE CASCADE; + + +-- +-- Name: materialdisplayname fk_materialdisplayname; Type: FK CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.materialdisplayname + ADD CONSTRAINT fk_materialdisplayname FOREIGN KEY (materialid) REFERENCES public.material(id) ON DELETE CASCADE; + + +-- +-- Name: materialname fk_materialname; Type: FK CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.materialname + ADD CONSTRAINT fk_materialname FOREIGN KEY (educationalmaterialid) REFERENCES public.educationalmaterial(id) ON DELETE CASCADE; + + +-- +-- Name: notification fk_notification_aoeuser; Type: FK CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.notification + ADD CONSTRAINT fk_notification_aoeuser FOREIGN KEY (nf_username) REFERENCES public.aoeuser(username) ON UPDATE CASCADE; + + +-- +-- Name: publisher fk_publisher; Type: FK CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.publisher + ADD CONSTRAINT fk_publisher FOREIGN KEY (educationalmaterialid) REFERENCES public.educationalmaterial(id) ON DELETE CASCADE; + + +-- +-- Name: temporaryrecord fk_temporaryrecord; Type: FK CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.temporaryrecord + ADD CONSTRAINT fk_temporaryrecord FOREIGN KEY (materialid) REFERENCES public.material(id) ON DELETE RESTRICT; + + +-- +-- Name: thumbnail fk_thumbnail; Type: FK CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.thumbnail + ADD CONSTRAINT fk_thumbnail FOREIGN KEY (educationalmaterialid) REFERENCES public.educationalmaterial(id) ON DELETE RESTRICT; + + +-- +-- Name: accessibilityfeatureextension fkaccessibilityfeatureextension; Type: FK CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.accessibilityfeatureextension + ADD CONSTRAINT fkaccessibilityfeatureextension FOREIGN KEY (educationalmaterialid) REFERENCES public.educationalmaterial(id); + + +-- +-- Name: accessibilityhazardextension fkaccessibilityhazardextension; Type: FK CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.accessibilityhazardextension + ADD CONSTRAINT fkaccessibilityhazardextension FOREIGN KEY (educationalmaterialid) REFERENCES public.educationalmaterial(id); + + +-- +-- Name: alignmentobject fkalignmentobject; Type: FK CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.alignmentobject + ADD CONSTRAINT fkalignmentobject FOREIGN KEY (educationalmaterialid) REFERENCES public.educationalmaterial(id) ON DELETE CASCADE; + + +-- +-- Name: attachmentversioncomposition fkattachmentversion; Type: FK CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.attachmentversioncomposition + ADD CONSTRAINT fkattachmentversion FOREIGN KEY (attachmentid) REFERENCES public.attachment(id); + + +-- +-- Name: collectionaccessibilityfeature fkcollectionaccessibilityfeature; Type: FK CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.collectionaccessibilityfeature + ADD CONSTRAINT fkcollectionaccessibilityfeature FOREIGN KEY (collectionid) REFERENCES public.collection(id); + + +-- +-- Name: collectionaccessibilityhazard fkcollectionaccessibilityhazard; Type: FK CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.collectionaccessibilityhazard + ADD CONSTRAINT fkcollectionaccessibilityhazard FOREIGN KEY (collectionid) REFERENCES public.collection(id); + + +-- +-- Name: collectionalignmentobject fkcollectionaligmentobject; Type: FK CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.collectionalignmentobject + ADD CONSTRAINT fkcollectionaligmentobject FOREIGN KEY (collectionid) REFERENCES public.collection(id) ON DELETE CASCADE; + + +-- +-- Name: collectioneducationalaudience fkcollectioneducationalaudience; Type: FK CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.collectioneducationalaudience + ADD CONSTRAINT fkcollectioneducationalaudience FOREIGN KEY (collectionid) REFERENCES public.collection(id) ON DELETE CASCADE; + + +-- +-- Name: collectioneducationallevel fkcollectioneducationallevel; Type: FK CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.collectioneducationallevel + ADD CONSTRAINT fkcollectioneducationallevel FOREIGN KEY (collectionid) REFERENCES public.collection(id) ON DELETE CASCADE; + + +-- +-- Name: collectioneducationaluse fkcollectioneducationaluse; Type: FK CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.collectioneducationaluse + ADD CONSTRAINT fkcollectioneducationaluse FOREIGN KEY (collectionid) REFERENCES public.collection(id) ON DELETE CASCADE; + + +-- +-- Name: collectionheading fkcollectionheading; Type: FK CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.collectionheading + ADD CONSTRAINT fkcollectionheading FOREIGN KEY (collectionid) REFERENCES public.collection(id); + + +-- +-- Name: collectionkeyword fkcollectionkeywords; Type: FK CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.collectionkeyword + ADD CONSTRAINT fkcollectionkeywords FOREIGN KEY (collectionid) REFERENCES public.collection(id) ON DELETE CASCADE; + + +-- +-- Name: collectionlanguage fkcollectionlanguage; Type: FK CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.collectionlanguage + ADD CONSTRAINT fkcollectionlanguage FOREIGN KEY (collectionid) REFERENCES public.collection(id) ON DELETE CASCADE; + + +-- +-- Name: collectioneducationalmaterial fkcollectionmaterial; Type: FK CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.collectioneducationalmaterial + ADD CONSTRAINT fkcollectionmaterial FOREIGN KEY (collectionid) REFERENCES public.collection(id) ON DELETE CASCADE; + + +-- +-- Name: collectionthumbnail fkcollectionthumbnail; Type: FK CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.collectionthumbnail + ADD CONSTRAINT fkcollectionthumbnail FOREIGN KEY (collectionid) REFERENCES public.collection(id); + + +-- +-- Name: userscollection fkcollectionusers; Type: FK CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.userscollection + ADD CONSTRAINT fkcollectionusers FOREIGN KEY (collectionid) REFERENCES public.collection(id) ON DELETE CASCADE; + + +-- +-- Name: educationalaudience fkeducationalaudience; Type: FK CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.educationalaudience + ADD CONSTRAINT fkeducationalaudience FOREIGN KEY (educationalmaterialid) REFERENCES public.educationalmaterial(id) ON DELETE CASCADE; + + +-- +-- Name: educationallevelextension fkeducationallevelextension; Type: FK CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.educationallevelextension + ADD CONSTRAINT fkeducationallevelextension FOREIGN KEY (educationalmaterialid) REFERENCES public.educationalmaterial(id); + + +-- +-- Name: educationalmaterial fkeducationalmaterial; Type: FK CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.educationalmaterial + ADD CONSTRAINT fkeducationalmaterial FOREIGN KEY (usersusername) REFERENCES public.users(username) ON DELETE RESTRICT; + + +-- +-- Name: versioncomposition fkeducationalmaterialversion; Type: FK CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.versioncomposition + ADD CONSTRAINT fkeducationalmaterialversion FOREIGN KEY (educationalmaterialid) REFERENCES public.educationalmaterial(id); + + +-- +-- Name: educationaluse fkeducationaluse; Type: FK CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.educationaluse + ADD CONSTRAINT fkeducationaluse FOREIGN KEY (educationalmaterialid) REFERENCES public.educationalmaterial(id) ON DELETE CASCADE; + + +-- +-- Name: educationalmaterialversion fkemversion; Type: FK CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.educationalmaterialversion + ADD CONSTRAINT fkemversion FOREIGN KEY (educationalmaterialid) REFERENCES public.educationalmaterial(id); + + +-- +-- Name: inlanguage fkinlanguage; Type: FK CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.inlanguage + ADD CONSTRAINT fkinlanguage FOREIGN KEY (educationalmaterialid) REFERENCES public.educationalmaterial(id) ON DELETE CASCADE; + + +-- +-- Name: isbasedon fkisbasedon; Type: FK CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.isbasedon + ADD CONSTRAINT fkisbasedon FOREIGN KEY (educationalmaterialid) REFERENCES public.educationalmaterial(id) ON DELETE CASCADE; + + +-- +-- Name: isbasedonauthor fkisbasedonauthor; Type: FK CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.isbasedonauthor + ADD CONSTRAINT fkisbasedonauthor FOREIGN KEY (isbasedonid) REFERENCES public.isbasedon(id) ON DELETE CASCADE; + + +-- +-- Name: keyword fkkeyword; Type: FK CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.keyword + ADD CONSTRAINT fkkeyword FOREIGN KEY (educationalmaterialid) REFERENCES public.educationalmaterial(id) ON DELETE CASCADE; + + +-- +-- Name: keywordextension fkkeywordextension; Type: FK CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.keywordextension + ADD CONSTRAINT fkkeywordextension FOREIGN KEY (usersusername) REFERENCES public.users(username); + + +-- +-- Name: learningresourcetype fklearningresourcetype; Type: FK CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.learningresourcetype + ADD CONSTRAINT fklearningresourcetype FOREIGN KEY (educationalmaterialid) REFERENCES public.educationalmaterial(id) ON DELETE CASCADE; + + +-- +-- Name: material fkmaterial; Type: FK CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.material + ADD CONSTRAINT fkmaterial FOREIGN KEY (educationalmaterialid) REFERENCES public.educationalmaterial(id) ON DELETE RESTRICT; + + +-- +-- Name: collectioneducationalmaterial fkmaterialcollection; Type: FK CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.collectioneducationalmaterial + ADD CONSTRAINT fkmaterialcollection FOREIGN KEY (educationalmaterialid) REFERENCES public.educationalmaterial(id) ON DELETE RESTRICT; + + +-- +-- Name: versioncomposition fkmaterialversion; Type: FK CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.versioncomposition + ADD CONSTRAINT fkmaterialversion FOREIGN KEY (materialid) REFERENCES public.material(id); + + +-- +-- Name: rating fkratingeducationalmaterial; Type: FK CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.rating + ADD CONSTRAINT fkratingeducationalmaterial FOREIGN KEY (educationalmaterialid) REFERENCES public.educationalmaterial(id); + + +-- +-- Name: rating fkratingusers; Type: FK CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.rating + ADD CONSTRAINT fkratingusers FOREIGN KEY (usersusername) REFERENCES public.users(username); + + +-- +-- Name: record fkrecord; Type: FK CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.record + ADD CONSTRAINT fkrecord FOREIGN KEY (materialid) REFERENCES public.material(id) ON DELETE RESTRICT; + + +-- +-- Name: temporaryattachment fktempattachment; Type: FK CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.temporaryattachment + ADD CONSTRAINT fktempattachment FOREIGN KEY (attachmentid) REFERENCES public.attachment(id); + + +-- +-- Name: accessibilityfeatureextension fkuseraccessibilityfeatureextension; Type: FK CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.accessibilityfeatureextension + ADD CONSTRAINT fkuseraccessibilityfeatureextension FOREIGN KEY (usersusername) REFERENCES public.users(username); + + +-- +-- Name: accessibilityhazardextension fkusersaccessibiltyhazardextension; Type: FK CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.accessibilityhazardextension + ADD CONSTRAINT fkusersaccessibiltyhazardextension FOREIGN KEY (usersusername) REFERENCES public.users(username); + + +-- +-- Name: userscollection fkuserscollection; Type: FK CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.userscollection + ADD CONSTRAINT fkuserscollection FOREIGN KEY (usersusername) REFERENCES public.users(username); + + +-- +-- Name: educationallevelextension fkuserseducationallevelextension; Type: FK CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.educationallevelextension + ADD CONSTRAINT fkuserseducationallevelextension FOREIGN KEY (usersusername) REFERENCES public.users(username); + + +-- +-- Name: keywordextension fkuserskeywordextension; Type: FK CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.keywordextension + ADD CONSTRAINT fkuserskeywordextension FOREIGN KEY (educationalmaterialid) REFERENCES public.educationalmaterial(id); + + +-- +-- Name: versioncomposition fkversioncomposition; Type: FK CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.versioncomposition + ADD CONSTRAINT fkversioncomposition FOREIGN KEY (educationalmaterialid, publishedat) REFERENCES public.educationalmaterialversion(educationalmaterialid, publishedat); + + +-- +-- Name: attachmentversioncomposition fkversioncompositionattachment; Type: FK CONSTRAINT; Schema: public; Owner: aoe_admin +-- + +ALTER TABLE ONLY public.attachmentversioncomposition + ADD CONSTRAINT fkversioncompositionattachment FOREIGN KEY (versioneducationalmaterialid, versionmaterialid, versionpublishedat) REFERENCES public.versioncomposition(educationalmaterialid, materialid, publishedat); + + +-- +-- Name: TABLE accessibilityfeature; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON TABLE public.accessibilityfeature TO reporter; + + +-- +-- Name: SEQUENCE accessibilityfeature_id_seq; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON SEQUENCE public.accessibilityfeature_id_seq TO reporter; + + +-- +-- Name: TABLE accessibilityfeatureextension; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON TABLE public.accessibilityfeatureextension TO reporter; + + +-- +-- Name: SEQUENCE accessibilityfeatureextension_id_seq; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON SEQUENCE public.accessibilityfeatureextension_id_seq TO reporter; + + +-- +-- Name: TABLE accessibilityhazard; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON TABLE public.accessibilityhazard TO reporter; + + +-- +-- Name: SEQUENCE accessibilityhazard_id_seq; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON SEQUENCE public.accessibilityhazard_id_seq TO reporter; + + +-- +-- Name: TABLE accessibilityhazardextension; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON TABLE public.accessibilityhazardextension TO reporter; + + +-- +-- Name: SEQUENCE accessibilityhazardextension_id_seq; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON SEQUENCE public.accessibilityhazardextension_id_seq TO reporter; + + +-- +-- Name: TABLE alignmentobject; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON TABLE public.alignmentobject TO reporter; + + +-- +-- Name: SEQUENCE alignmentobject_id_seq; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON SEQUENCE public.alignmentobject_id_seq TO reporter; + + +-- +-- Name: TABLE aoeuser; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON TABLE public.aoeuser TO reporter; + + +-- +-- Name: TABLE attachment; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON TABLE public.attachment TO reporter; + + +-- +-- Name: SEQUENCE attachment_id_seq; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON SEQUENCE public.attachment_id_seq TO reporter; + + +-- +-- Name: TABLE attachmentversioncomposition; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON TABLE public.attachmentversioncomposition TO reporter; + + +-- +-- Name: TABLE author; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON TABLE public.author TO reporter; + + +-- +-- Name: SEQUENCE author_id_seq; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON SEQUENCE public.author_id_seq TO reporter; + + +-- +-- Name: TABLE collection; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON TABLE public.collection TO reporter; + + +-- +-- Name: SEQUENCE collection_id_seq; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON SEQUENCE public.collection_id_seq TO reporter; + + +-- +-- Name: TABLE collectionaccessibilityfeature; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON TABLE public.collectionaccessibilityfeature TO reporter; + + +-- +-- Name: SEQUENCE collectionaccessibilityfeature_id_seq; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON SEQUENCE public.collectionaccessibilityfeature_id_seq TO reporter; + + +-- +-- Name: TABLE collectionaccessibilityhazard; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON TABLE public.collectionaccessibilityhazard TO reporter; + + +-- +-- Name: SEQUENCE collectionaccessibilityhazard_id_seq; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON SEQUENCE public.collectionaccessibilityhazard_id_seq TO reporter; + + +-- +-- Name: TABLE collectionalignmentobject; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON TABLE public.collectionalignmentobject TO reporter; + + +-- +-- Name: SEQUENCE collectionalignmentobject_id_seq; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON SEQUENCE public.collectionalignmentobject_id_seq TO reporter; + + +-- +-- Name: TABLE collectioneducationalaudience; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON TABLE public.collectioneducationalaudience TO reporter; + + +-- +-- Name: SEQUENCE collectioneducationalaudience_id_seq; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON SEQUENCE public.collectioneducationalaudience_id_seq TO reporter; + + +-- +-- Name: TABLE collectioneducationallevel; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON TABLE public.collectioneducationallevel TO reporter; + + +-- +-- Name: SEQUENCE collectioneducationallevel_id_seq; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON SEQUENCE public.collectioneducationallevel_id_seq TO reporter; + + +-- +-- Name: TABLE collectioneducationalmaterial; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON TABLE public.collectioneducationalmaterial TO reporter; + + +-- +-- Name: TABLE collectioneducationaluse; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON TABLE public.collectioneducationaluse TO reporter; + + +-- +-- Name: SEQUENCE collectioneducationaluse_id_seq; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON SEQUENCE public.collectioneducationaluse_id_seq TO reporter; + + +-- +-- Name: TABLE collectionheading; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON TABLE public.collectionheading TO reporter; + + +-- +-- Name: SEQUENCE collectionheading_id_seq; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON SEQUENCE public.collectionheading_id_seq TO reporter; + + +-- +-- Name: TABLE collectionkeyword; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON TABLE public.collectionkeyword TO reporter; + + +-- +-- Name: SEQUENCE collectionkeyword_id_seq; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON SEQUENCE public.collectionkeyword_id_seq TO reporter; + + +-- +-- Name: TABLE collectionlanguage; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON TABLE public.collectionlanguage TO reporter; + + +-- +-- Name: SEQUENCE collectionlanguage_id_seq; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON SEQUENCE public.collectionlanguage_id_seq TO reporter; + + +-- +-- Name: TABLE collectionthumbnail; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON TABLE public.collectionthumbnail TO reporter; + + +-- +-- Name: SEQUENCE collectionthumbnail_id_seq; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON SEQUENCE public.collectionthumbnail_id_seq TO reporter; + + +-- +-- Name: TABLE educationalaudience; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON TABLE public.educationalaudience TO reporter; + + +-- +-- Name: SEQUENCE educationalaudience_id_seq; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON SEQUENCE public.educationalaudience_id_seq TO reporter; + + +-- +-- Name: TABLE educationallevel; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON TABLE public.educationallevel TO reporter; + + +-- +-- Name: SEQUENCE educationallevel_id_seq; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON SEQUENCE public.educationallevel_id_seq TO reporter; + + +-- +-- Name: TABLE educationallevelextension; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON TABLE public.educationallevelextension TO reporter; + + +-- +-- Name: SEQUENCE educationallevelextension_id_seq; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON SEQUENCE public.educationallevelextension_id_seq TO reporter; + + +-- +-- Name: TABLE educationalmaterial; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON TABLE public.educationalmaterial TO reporter; + + +-- +-- Name: SEQUENCE educationalmaterial_id_seq; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON SEQUENCE public.educationalmaterial_id_seq TO reporter; + + +-- +-- Name: TABLE educationalmaterialversion; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON TABLE public.educationalmaterialversion TO reporter; + + +-- +-- Name: TABLE educationaluse; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON TABLE public.educationaluse TO reporter; + + +-- +-- Name: SEQUENCE educationaluse_id_seq; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON SEQUENCE public.educationaluse_id_seq TO reporter; + + +-- +-- Name: TABLE inlanguage; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON TABLE public.inlanguage TO reporter; + + +-- +-- Name: SEQUENCE inlanguage_id_seq; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON SEQUENCE public.inlanguage_id_seq TO reporter; + + +-- +-- Name: TABLE isbasedon; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON TABLE public.isbasedon TO reporter; + + +-- +-- Name: SEQUENCE isbasedon_id_seq; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON SEQUENCE public.isbasedon_id_seq TO reporter; + + +-- +-- Name: TABLE isbasedonauthor; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON TABLE public.isbasedonauthor TO reporter; + + +-- +-- Name: SEQUENCE isbasedonauthor_id_seq; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON SEQUENCE public.isbasedonauthor_id_seq TO reporter; + + +-- +-- Name: TABLE keyword; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON TABLE public.keyword TO reporter; + + +-- +-- Name: SEQUENCE keyword_id_seq; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON SEQUENCE public.keyword_id_seq TO reporter; + + +-- +-- Name: TABLE keywordextension; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON TABLE public.keywordextension TO reporter; + + +-- +-- Name: SEQUENCE keywordextension_id_seq; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON SEQUENCE public.keywordextension_id_seq TO reporter; + + +-- +-- Name: TABLE learningresourcetype; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON TABLE public.learningresourcetype TO reporter; + + +-- +-- Name: SEQUENCE learningresourcetype_id_seq; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON SEQUENCE public.learningresourcetype_id_seq TO reporter; + + +-- +-- Name: TABLE licensecode; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON TABLE public.licensecode TO reporter; + + +-- +-- Name: TABLE material; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON TABLE public.material TO reporter; + + +-- +-- Name: SEQUENCE material_id_seq; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON SEQUENCE public.material_id_seq TO reporter; + + +-- +-- Name: TABLE materialdescription; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON TABLE public.materialdescription TO reporter; + + +-- +-- Name: SEQUENCE materialdescription_id_seq; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON SEQUENCE public.materialdescription_id_seq TO reporter; + + +-- +-- Name: TABLE materialdisplayname; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON TABLE public.materialdisplayname TO reporter; + + +-- +-- Name: SEQUENCE materialdisplayname_id_seq; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON SEQUENCE public.materialdisplayname_id_seq TO reporter; + + +-- +-- Name: TABLE materialname; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON TABLE public.materialname TO reporter; + + +-- +-- Name: SEQUENCE materialname_id_seq; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON SEQUENCE public.materialname_id_seq TO reporter; + + +-- +-- Name: TABLE notification; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON TABLE public.notification TO reporter; + + +-- +-- Name: TABLE publisher; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON TABLE public.publisher TO reporter; + + +-- +-- Name: SEQUENCE publisher_id_seq; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON SEQUENCE public.publisher_id_seq TO reporter; + + +-- +-- Name: TABLE rating; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON TABLE public.rating TO reporter; + + +-- +-- Name: SEQUENCE rating_id_seq; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON SEQUENCE public.rating_id_seq TO reporter; + + +-- +-- Name: TABLE record; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON TABLE public.record TO reporter; + + +-- +-- Name: SEQUENCE record_id_seq; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON SEQUENCE public.record_id_seq TO reporter; + + +-- +-- Name: TABLE temporaryattachment; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON TABLE public.temporaryattachment TO reporter; + + +-- +-- Name: SEQUENCE temporaryattachment_id_seq; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON SEQUENCE public.temporaryattachment_id_seq TO reporter; + + +-- +-- Name: TABLE temporaryrecord; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON TABLE public.temporaryrecord TO reporter; + + +-- +-- Name: SEQUENCE temporaryrecord_id_seq; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON SEQUENCE public.temporaryrecord_id_seq TO reporter; + + +-- +-- Name: TABLE thumbnail; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON TABLE public.thumbnail TO reporter; + + +-- +-- Name: SEQUENCE thumbnail_id_seq; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON SEQUENCE public.thumbnail_id_seq TO reporter; + + +-- +-- Name: TABLE users; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON TABLE public.users TO reporter; + + +-- +-- Name: SEQUENCE users_id_seq; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON SEQUENCE public.users_id_seq TO reporter; + + +-- +-- Name: TABLE userscollection; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON TABLE public.userscollection TO reporter; + + +-- +-- Name: TABLE versioncomposition; Type: ACL; Schema: public; Owner: aoe_admin +-- + +GRANT SELECT ON TABLE public.versioncomposition TO reporter; + + +-- +-- PostgreSQL database dump complete +-- + diff --git a/aoe-web-backend/src/api/routes-root/h5p.ts b/aoe-web-backend/src/api/routes-root/h5p.ts index a693fda7f..151b3554c 100644 --- a/aoe-web-backend/src/api/routes-root/h5p.ts +++ b/aoe-web-backend/src/api/routes-root/h5p.ts @@ -12,7 +12,6 @@ export default (router: Router): void => { const moduleRoot = '/h5p'; router.get(`${moduleRoot}/play/:keyS3`, downloadAndRenderH5P); - // router.get('/h5p/content/:id/:file(*)', getH5PContent); // H5P application state not supported for anonymous users in AOE. router.get(`${moduleRoot}/contentUserData/:contentID/state/:stateID`, (_req: Request, res: Response): void => { diff --git a/aoe-web-backend/src/api/routes-v1/download.ts b/aoe-web-backend/src/api/routes-v1/download.ts index 5074e976a..866f40c9f 100644 --- a/aoe-web-backend/src/api/routes-v1/download.ts +++ b/aoe-web-backend/src/api/routes-v1/download.ts @@ -14,7 +14,6 @@ import { NextFunction, Request, Response, Router } from 'express'; * @param router express.Router */ export default (router: Router): void => { - // TODO: Add regex validation router.get('/download/:filename', downloadPreviewFile); // Single file download and save to user's workstation. diff --git a/aoe-web-backend/src/api/routes-v1/legacy.ts b/aoe-web-backend/src/api/routes-v1/legacy.ts index f355580d4..306473d25 100644 --- a/aoe-web-backend/src/api/routes-v1/legacy.ts +++ b/aoe-web-backend/src/api/routes-v1/legacy.ts @@ -30,7 +30,6 @@ export default (router: Router): void => { router.get('/aoeUsers', hasAccessToAOE, getAoeUsers); router.post('/changeUser', hasAccessToAOE, changeMaterialUser); - // TODO: Unused endpoint? router.get('/material', db.getMaterial); router.post( '/material/attachment/:materialId', @@ -53,7 +52,6 @@ export default (router: Router): void => { router.get('/pdf/content/:key', downloadPdfFromAllas); router.get('/recentmaterial', db.getRecentMaterial); - // TODO: Duplicate functionality with DELETE /material/:edumaterialid - endpoint used by administrator archiving functionality router.delete('/removeMaterial/:id', hasAccessToAOE, removeEducationalMaterial); router.get('/thumbnail/:id', downloadEmThumbnail); diff --git a/aoe-web-backend/src/app.ts b/aoe-web-backend/src/app.ts index ab243ee6f..471711e77 100755 --- a/aoe-web-backend/src/app.ts +++ b/aoe-web-backend/src/app.ts @@ -15,13 +15,38 @@ import compression from 'compression'; import flash from 'connect-flash'; import cors, { CorsOptions } from 'cors'; import express, { Express, NextFunction, Request, Response, Router } from 'express'; -import session, { SessionOptions } from 'express-session'; import { createProxyMiddleware } from 'http-proxy-middleware'; import lusca from 'lusca'; -import memorystore from 'memorystore'; +import passport from 'passport'; +import connectRedis from 'connect-redis'; +import session, { SessionOptions } from 'express-session'; +import clientRedis from '@resource/redisClient'; const app: Express = express(); -const MemoryStore = memorystore(session); + +const RedisStore = connectRedis(session); + +app.use( + session({ + store: new RedisStore({ client: clientRedis, logErrors: true }), + resave: config.SESSION_CONFIG_OPTIONS.resave as boolean, + rolling: config.SESSION_CONFIG_OPTIONS.rolling as boolean, + saveUninitialized: config.SESSION_CONFIG_OPTIONS.saveUninitialized as boolean, + secret: config.SESSION_CONFIG_OPTIONS.secret as string, + proxy: config.SESSION_CONFIG_OPTIONS.proxy, + cookie: { + domain: config.SESSION_COOKIE_OPTIONS.domain, + httpOnly: config.SESSION_COOKIE_OPTIONS.httpOnly, + maxAge: config.SESSION_COOKIE_OPTIONS.maxAge, + sameSite: config.SESSION_COOKIE_OPTIONS.sameSite, + path: config.SESSION_COOKIE_OPTIONS.path, + secure: config.SESSION_COOKIE_OPTIONS.secure, + }, + } as SessionOptions), +); + +app.use(passport.initialize()); +app.use(passport.session()); app.disable('x-powered-by'); @@ -61,25 +86,7 @@ app.use(compression()); app.use(flash()); app.use(morganLogger); -if (process.env.NODE_ENV === 'localhost') { - try { - // Add a development helper module (dev.ts). - require('./dev').devHelper(app); - } catch (error) { - winstonLogger.debug('Development helper module (dev.ts) not available.'); - } -} - -// Initialize session management and OIDC authorization -app.use( - session({ - resave: config.SESSION_CONFIG_OPTIONS.resave as boolean, - saveUninitialized: config.SESSION_CONFIG_OPTIONS.saveUninitialized as boolean, - secret: config.SESSION_CONFIG_OPTIONS.secret as string, - store: new MemoryStore({ checkPeriod: 86400000 }), // Prune expired entries every 24h - } as SessionOptions), -); -oidc.sessionInit(app); +// Initialize OIDC authorization oidc.authInit(app); // Initialize H5P editor @@ -97,7 +104,7 @@ app.use( logLevel: 'debug', logProvider: () => winstonLogger, changeOrigin: true, - pathRewrite: (path: string) => path.replace('/v2', ''), + pathRewrite: (path: string) => path.replace('api/v2', 'analytics/api'), }), ); @@ -107,11 +114,11 @@ app.use('/favicon.ico', express.static('./views/favicon.ico')); app.use('/', apiRouterRoot); app.use('/api/v1/', apiRouterV1); app.use('/api/v2/', apiRouterV2); -app.use('/h5p/content', express.static('/webdata/h5p/content')); -app.use('/h5p/core', express.static('/webdata/h5p/core')); -app.use('/h5p/editor', express.static('/webdata/h5p/editor')); -app.use('/h5p/libraries', express.static('/webdata/h5p/libraries')); - +app.use('/h5p/content', express.static(config.MEDIA_FILE_PROCESS.h5pPathContent)); +app.use('/h5p/core', express.static(config.MEDIA_FILE_PROCESS.h5pPathCore)); +app.use('/h5p/editor', express.static(config.MEDIA_FILE_PROCESS.h5pPathEditor)); +app.use('/h5p/libraries', express.static(config.MEDIA_FILE_PROCESS.h5pPathLibraries)); +app.use('/content/', express.static(config.MEDIA_FILE_PROCESS.htmlFolder)); app.use(lusca.xframe('SAMEORIGIN')); app.use(lusca.xssProtection); app.use((err, req: Response, res: NextFunction): void => { diff --git a/aoe-web-backend/src/config/index.ts b/aoe-web-backend/src/config/index.ts index 69aad47e2..a777015f0 100644 --- a/aoe-web-backend/src/config/index.ts +++ b/aoe-web-backend/src/config/index.ts @@ -7,11 +7,7 @@ const missingEnvs: string[] = []; process.env.NODE_ENV || missingEnvs.push('NODE_ENV'); process.env.PORT_LISTEN || missingEnvs.push('PORT_LISTEN'); process.env.LOG_LEVEL || missingEnvs.push('LOG_LEVEL'); -process.env.TEST_RUN || missingEnvs.push('TEST_RUN'); process.env.CLOUD_STORAGE_ENABLED || missingEnvs.push('CLOUD_STORAGE_ENABLED'); -process.env.CLOUD_STORAGE_ACCESS_KEY || missingEnvs.push('CLOUD_STORAGE_ACCESS_KEY'); -process.env.CLOUD_STORAGE_ACCESS_SECRET || missingEnvs.push('CLOUD_STORAGE_ACCESS_SECRET'); -process.env.CLOUD_STORAGE_API || missingEnvs.push('CLOUD_STORAGE_API'); process.env.CLOUD_STORAGE_REGION || missingEnvs.push('CLOUD_STORAGE_REGION'); process.env.CLOUD_STORAGE_BUCKET || missingEnvs.push('CLOUD_STORAGE_BUCKET'); process.env.CLOUD_STORAGE_BUCKET_PDF || missingEnvs.push('CLOUD_STORAGE_BUCKET_PDF'); @@ -32,6 +28,7 @@ process.env.KAFKA_BROKER_SERVERS || missingEnvs.push('KAFKA_BROKER_SERVERS'); process.env.KAFKA_BROKER_TOPIC_MATERIAL_ACTIVITY || missingEnvs.push('KAFKA_BROKER_TOPIC_MATERIAL_ACTIVITY'); process.env.KAFKA_BROKER_TOPIC_SEARCH_REQUESTS || missingEnvs.push('KAFKA_BROKER_TOPIC_SEARCH_REQUESTS'); process.env.KAFKA_CLIENT_ID || missingEnvs.push('KAFKA_CLIENT_ID'); +process.env.KAFKA_CLIENT_REGION || missingEnvs.push('KAFKA_CLIENT_REGION'); process.env.CONVERSION_TO_PDF_API || missingEnvs.push('CONVERSION_TO_PDF_API'); process.env.CONVERSION_TO_PDF_ENABLED || missingEnvs.push('CONVERSION_TO_PDF_ENABLED'); process.env.POSTGRESQL_HOST || missingEnvs.push('POSTGRESQL_HOST'); @@ -40,22 +37,16 @@ process.env.POSTGRESQL_DATA || missingEnvs.push('POSTGRESQL_DATA'); process.env.REDIS_HOST || missingEnvs.push('REDIS_HOST'); process.env.REDIS_PORT || missingEnvs.push('REDIS_PORT'); process.env.REDIS_PASS || missingEnvs.push('REDIS_PASS'); +process.env.REDIS_USE_TLS || missingEnvs.push('REDIS_USE_TLS'); process.env.SERVER_CONFIG_OAIPMH_ANALYTICS_URL || missingEnvs.push('SERVER_CONFIG_OAIPMH_ANALYTICS_URL'); process.env.STREAM_ENABLED || missingEnvs.push('STREAM_ENABLED'); process.env.STREAM_FILESIZE_MIN || missingEnvs.push('STREAM_FILESIZE_MIN'); process.env.STREAM_REDIRECT_URI || missingEnvs.push('STREAM_REDIRECT_URI'); process.env.STREAM_STATUS_HOST || missingEnvs.push('STREAM_STATUS_HOST'); process.env.STREAM_STATUS_PATH || missingEnvs.push('STREAM_STATUS_PATH'); -process.env.PID_API_KEY || missingEnvs.push('PID_API_KEY'); -process.env.PID_SERVICE_URL || missingEnvs.push('PID_SERVICE_URL'); - -if (process.env.TEST_RUN === 'true') { - process.env.PG_USER || missingEnvs.push('POSTGRES_USER'); - process.env.PG_PASS || missingEnvs.push('POSTGRES_PASSWORD'); -} else { - process.env.PG_USER || missingEnvs.push('POSTGRES_USER_SECONDARY'); - process.env.PG_PASS || missingEnvs.push('POSTGRES_PASSWORD_SECONDARY'); -} +process.env.STREAM_STATUS_HOST_HTTPS_ENABLED || missingEnvs.push('STREAM_STATUS_HOST_HTTPS_ENABLED'); +process.env.PG_USER || missingEnvs.push('PG_USER'); +process.env.PG_PASS || missingEnvs.push('PG_PASS'); if (missingEnvs.length > 0) { winstonLogger.error('All required environment variables are not available: %s', missingEnvs); @@ -69,14 +60,10 @@ export default { logLevel: process.env.LOG_LEVEL as string, nodeEnv: process.env.NODE_ENV as string, portListen: parseInt(process.env.PORT_LISTEN as string, 10) as number, - testRun: ((process.env.TEST_RUN as string).toLowerCase() === 'true') as boolean, } as const, // Cloud storage configurations. CLOUD_STORAGE_CONFIG: { - accessKey: process.env.CLOUD_STORAGE_ACCESS_KEY as string, - accessSecret: process.env.CLOUD_STORAGE_ACCESS_SECRET as string, - apiURL: process.env.CLOUD_STORAGE_API as string, region: process.env.CLOUD_STORAGE_REGION as string, bucket: process.env.CLOUD_STORAGE_BUCKET as string, bucketPDF: process.env.CLOUD_STORAGE_BUCKET_PDF as string, @@ -107,6 +94,7 @@ export default { topicMaterialActivity: process.env.KAFKA_BROKER_TOPIC_MATERIAL_ACTIVITY as string, topicSearchRequests: process.env.KAFKA_BROKER_TOPIC_SEARCH_REQUESTS as string, clientId: process.env.KAFKA_CLIENT_ID as string, + region: process.env.KAFKA_CLIENT_REGION as string, } as const, // Configuration for PostgreSQL database connections. @@ -122,14 +110,14 @@ export default { REDIS_OPTIONS: { host: process.env.REDIS_HOST as string, port: parseInt(process.env.REDIS_PORT as string, 10) as number, + username: process.env.REDIS_USERNAME as string, pass: process.env.REDIS_PASS as string, + protocol: process.env.REDIS_USE_TLS != 'true' ? 'redis' : 'rediss', } as const, // AOE server and service component general purpose configurations. SERVER_CONFIG_OPTIONS: { oaipmhAnalyticsURL: process.env.SERVER_CONFIG_OAIPMH_ANALYTICS_URL as string, - pidApiKey: process.env.PID_API_KEY as string, - pidServiceURL: process.env.PID_SERVICE_URL as string, } as const, // Session management conventions to handle session initialization and persistence. @@ -156,8 +144,6 @@ export default { mimeTypeArr: ['audio/mp4', 'audio/mpeg', 'audio/x-m4a', 'video/mp4'] as string[], minFileSize: parseInt(process.env.STREAM_FILESIZE_MIN, 10) as number, redirectUri: process.env.STREAM_REDIRECT_URI as string, - statusHost: process.env.STREAM_STATUS_HOST as string, - statusPath: process.env.STREAM_STATUS_PATH as string, streamEnabled: (process.env.STREAM_ENABLED === '1') as boolean, } as const, @@ -165,5 +151,7 @@ export default { STREAM_STATUS_REQUEST: { host: process.env.STREAM_STATUS_HOST as string, path: process.env.STREAM_STATUS_PATH as string, + port: process.env.STREAM_STATUS_PORT as string, + httpsEnabled: (process.env.STREAM_STATUS_HOST_HTTPS_ENABLED === '1') as boolean, } as const, } as const; diff --git a/aoe-web-backend/src/domain/aoeModels.ts b/aoe-web-backend/src/domain/aoeModels.ts index ebe52373f..0cb711455 100644 --- a/aoe-web-backend/src/domain/aoeModels.ts +++ b/aoe-web-backend/src/domain/aoeModels.ts @@ -19,6 +19,32 @@ export const commonSettings: ModelOptions = { timestamps: false, }; +export const Urn = sequelize.define( + 'urn', + { + id: { + field: 'id', + type: DataTypes.INTEGER, + primaryKey: true, + autoIncrement: true, + }, + material_url: { + field: 'material_url', + type: DataTypes.STRING, + allowNull: false, + unique: true, + }, + }, + { + indexes: [ + { + unique: true, + fields: ['material_url'], + }, + ], + } && (commonSettings as ModelOptions), +); + export const AOEUser = sequelize.define( 'aoeuser', { diff --git a/aoe-web-backend/src/helpers/officeToPdfConverter.ts b/aoe-web-backend/src/helpers/officeToPdfConverter.ts index 1f031c7ca..fa8b33eed 100644 --- a/aoe-web-backend/src/helpers/officeToPdfConverter.ts +++ b/aoe-web-backend/src/helpers/officeToPdfConverter.ts @@ -117,54 +117,6 @@ export const downloadPdfFromAllas = async (req: Request, res: Response, next: Ne } }; -// export async function convertOfficeToPdf(req: Request, res: Response, next: NextFunction) { -// try { -// if (!req.params.key) { -// next(new ErrorHandler("400", "key missing")); -// } -// winstonLogger.debug("readstreamfrompouta"); -// const params = { -// "Bucket" : process.env.BUCKET_NAME, -// "Key" : req.params.key -// }; -// const folderpath = process.env.HTML_FOLDER + "/" + req.params.key; -// const filename = req.params.key.substring(0, req.params.key.lastIndexOf(".")) + ".pdf"; -// winstonLogger.debug("filename: " + filename); -// const stream = await directoryDownloadFromStorage(params); -// stream.on("error", function(e) { -// winstonLogger.error(e); -// next(new ErrorHandler(e.statusCode, e.message || "Error in download")); -// }); -// stream.pipe(fs.createWriteStream(folderpath)); -// stream.on("end", async function() { -// try { -// winstonLogger.debug("starting convertOfficeFileToPDF"); -// winstonLogger.debug(folderpath); -// winstonLogger.debug(filename); -// const path = await convertOfficeFileToPDF(folderpath, filename); -// winstonLogger.debug("starting createReadStream: " + path); -// const readstream = fs.createReadStream(path); -// readstream.on("error", function(e) { -// winstonLogger.error(e); -// next(new ErrorHandler(e.statusCode, "Error in sending pdf")); -// }); -// res.header("Content-Disposition", contentDisposition(filename)); -// readstream.pipe(res); -// // res.status(200).json(d); -// // outstream.pipe(res); -// } -// catch (error) { -// winstonLogger.error(error); -// next(new ErrorHandler(error.statusCode, "Issue showing pdf")); -// } -// }); -// } -// catch (error) { -// winstonLogger.error(error); -// next(new ErrorHandler(error.statusCode, "Issue showing pdf")); -// } -// } - /** * Convert an office format file to PDF format. * @param {string} filepath File path of the original office format file. diff --git a/aoe-web-backend/src/metadataEngine/dataMapping.ts b/aoe-web-backend/src/metadataEngine/dataMapping.ts index 7dd75920b..4a925a657 100755 --- a/aoe-web-backend/src/metadataEngine/dataMapping.ts +++ b/aoe-web-backend/src/metadataEngine/dataMapping.ts @@ -1,5 +1,3 @@ -import winstonLogger from '@util/winstonLogger'; - async function createMaterialObject(indata: any) { let obj: any = {}; let data = await createEducationalMaterialObject(indata); @@ -38,7 +36,6 @@ async function createMaterialObject(indata: any) { async function createPropertyNameList(obj: any, str: string) { const list: any = []; Object.getOwnPropertyNames(obj).forEach(function (val: any, idx, array) { - // startsWith val.includes(str) if (val.startsWith(str)) { list.push(val); } diff --git a/aoe-web-backend/src/metadataEngine/xlsxHandler.ts b/aoe-web-backend/src/metadataEngine/xlsxHandler.ts deleted file mode 100755 index 5436ec246..000000000 --- a/aoe-web-backend/src/metadataEngine/xlsxHandler.ts +++ /dev/null @@ -1,281 +0,0 @@ -// handling and validating xlsx file -import apiQ from '@query/apiQueries'; -import winstonLogger from '@util/winstonLogger'; -import { Request, Response } from 'express'; -import multer from 'multer'; -import xlsx, { ParsingOptions } from 'xlsx'; -import mapper from './dataMapping'; - -const fileFilter = (req: any, file: any, cb: any) => { - if (file.mimetype === 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet') { - cb(undefined, true); - } else { - cb(undefined, false); - } -}; - -const storage = multer.diskStorage({ - destination: function (req: any, file: any, cb: any) { - cb(undefined, 'uploads/'); - }, - filename: function (req: any, file: any, cb: any) { - const datetimestamp = Date.now(); - cb( - undefined, - file.fieldname + - '-' + - datetimestamp + - '.' + - file.originalname.split('.')[file.originalname.split('.').length - 1], - ); - }, -}); - -const maxFileSize = 5 * 1024 * 1024; -const upload = multer({ - storage: storage, - limits: { fileSize: maxFileSize }, - fileFilter: fileFilter, -}); - -const fs = require('fs'); - -async function createPropertyNameList(obj: any, str: string) { - const list: any = []; - Object.getOwnPropertyNames(obj).forEach(function (val: any, idx, array) { - if (val.includes(str)) { - list.push(val); - } - }); - return list; -} - -async function validate(data: any) { - const licensekoodisto = [{ type: 'cc' }, { type: 'cc-ra' }, { type: 'cc-dd' }, { type: 'mit' }]; - const o: any = {}; - const key = 'error'; - o[key] = []; - for (const d in data) { - const avainsanaList: any = await createPropertyNameList(data[d], 'avainsana'); - const row = Number(d) + 2; - // validate mandatory fields - if (data[d].nimi === undefined) { - o[key].push(createMessage(row, 'nimi', 'nimi cannot be empty')); - } - if (data[d].julkaisuajankohta === undefined) { - o[key].push(createMessage(row, 'julkaisuajankohta', 'julkaisuajankohta cannot be empty')); - } else { - const da = data[d].julkaisuajankohta.toString(); - if (da.length !== 10) { - o[key].push( - createMessage(row, 'julkaisuajankohta', 'Field must contain 8 characters. Correct format is yyyymmd'), - ); - } else if (!/\d\d[.]\d\d[.]\d\d\d\d/.test(da)) { - o[key].push(createMessage(row, 'julkaisuajankohta', 'Only numbers are allowed. Correct format is yyyymmdd')); - } - } - if (data[d].linkki === undefined) { - o[key].push(createMessage(row, 'linkki', 'linkki cannot be empty')); - } - if (data[d].tekija === undefined) { - o[key].push(createMessage(row, 'tekija', 'tekija cannot be empty')); - } - if (data[d].organisaatio === undefined) { - o[key].push(createMessage(row, 'organisaatio', 'organisaatio cannot be empty')); - } - if (data[d].lisenssi === undefined) { - o[key].push(createMessage(row, 'lisenssi', 'lisenssi cannot be empty')); - } - if (data[d].kieli === undefined) { - o[key].push(createMessage(row, 'kieli', 'kieli cannot be empty')); - } else { - // validate koodisto here - // const result = validateKoodistoCode(data[d].lisenssi, licensekoodisto); - // if (!result) { - // o[key].push(createMessage(row, "lisenssi", "not found from koodisto service")); - // } - } - // check that row does not have dublicate data - if (await hasDuplicates(avainsanaList, data[d])) { - o[key].push(createMessage(row, 'avainsana', 'avainsana has dublicate value')); - } - const kohderyhmaList: any = await createPropertyNameList(data[d], 'kohderyhma'); - if (await hasDuplicates(kohderyhmaList, data[d])) { - o[key].push(createMessage(row, 'kohderyhma', 'kohderyhma has dublicate value')); - } - const oppimateriaalityyppiList: any = await createPropertyNameList(data[d], 'oppimateriaalityyppi'); - if (await hasDuplicates(oppimateriaalityyppiList, data[d])) { - o[key].push(createMessage(row, 'oppimateriaalityyppi', 'oppimateriaalityyppi has dublicate value')); - } - const saavutettavuusList: any = await createPropertyNameList(data[d], 'saavutettavuus'); - if (await hasDuplicates(saavutettavuusList, data[d])) { - o[key].push(createMessage(row, 'saavutettavuus', 'saavutettavuus has dublicate value')); - } - const oppiasteList: any = await createPropertyNameList(data[d], 'oppiaste'); - if (await hasDuplicates(oppiasteList, data[d])) { - o[key].push(createMessage(row, 'oppiaste', 'oppiaste has dublicate value')); - } - const kayttotapaList: any = await createPropertyNameList(data[d], 'kaytto_opetuksessa'); - if (await hasDuplicates(kayttotapaList, data[d])) { - o[key].push(createMessage(row, 'kaytto_opetuksessa', 'kaytto_opetuksessa has dublicate value')); - } - const julkaisijaList: any = await createPropertyNameList(data[d], 'julkaisija'); - if (await hasDuplicates(julkaisijaList, data[d])) { - o[key].push(createMessage(row, 'julkaisija', 'julkaisija has dublicate value')); - } - const opettaaList: any = await createPropertyNameList(data[d], 'opettaa'); - if (await hasDuplicates(opettaaList, data[d])) { - o[key].push(createMessage(row, 'opettaa', 'opettaa has dublicate value')); - } - const arvioiList: any = await createPropertyNameList(data[d], 'arvioi'); - if (await hasDuplicates(arvioiList, data[d])) { - o[key].push(createMessage(row, 'arvioi', 'arvioi has dublicate value')); - } - // const vaikeustasoList: any = await createPropertyNameList(data[d], "vaikeustaso"); - // if (await hasDuplicates(vaikeustasoList, data[d])) { - // o[key].push(createMessage(row, "vaikeustaso", "vaikeustaso has dublicate value")); - // } - const koulutusasteList: any = await createPropertyNameList(data[d], 'koulutusaste'); - if (await hasDuplicates(koulutusasteList, data[d])) { - o[key].push(createMessage(row, 'koulutusaste', 'koulutusaste has dublicate value')); - } - const oppiaineList: any = await createPropertyNameList(data[d], 'oppiaine'); - if (await hasDuplicates(oppiaineList, data[d])) { - o[key].push(createMessage(row, 'oppiaine', 'oppiaine has dublicate value')); - } - const alkutasovaatimusList: any = await createPropertyNameList(data[d], 'alkutasovaatimus'); - if (await hasDuplicates(alkutasovaatimusList, data[d])) { - o[key].push(createMessage(row, 'alkutasovaatimus', 'alkutasovaatimus has dublicate value')); - } - const lukutaitovaatimusList: any = await createPropertyNameList(data[d], 'lukutaitovaatimus'); - if (await hasDuplicates(lukutaitovaatimusList, data[d])) { - o[key].push(createMessage(row, 'lukutaitovaatimus', 'lukutaitovaatimus has dublicate value')); - } - const edeltavaOsaaminenList: any = await createPropertyNameList(data[d], 'edeltava_osaaminen'); - if (await hasDuplicates(edeltavaOsaaminenList, data[d])) { - o[key].push(createMessage(row, 'edeltava_osaaminen', 'edeltava_osaaminen has dublicate value')); - } - const saavutettavuudenTukitoiminnotList: any = await createPropertyNameList( - data[d], - 'saavutettavuuden_tukitoiminnot', - ); - if (await hasDuplicates(saavutettavuudenTukitoiminnotList, data[d])) { - o[key].push( - createMessage(row, 'saavutettavuuden_tukitoiminnot', 'saavutettavuuden_tukitoiminnot has dublicate value'), - ); - } - const saavutettavuudenEsteetList: any = await createPropertyNameList(data[d], 'saavutettavuuden_esteet'); - if (await hasDuplicates(saavutettavuudenEsteetList, data[d])) { - o[key].push(createMessage(row, 'saavutettavuuden_esteet', 'saavutettavuuden_esteet has dublicate value')); - } - const vaikeustasoKielissaList: any = await createPropertyNameList(data[d], 'vaikeustaso_kielissa'); - if (await hasDuplicates(vaikeustasoKielissaList, data[d])) { - o[key].push(createMessage(row, 'vaikeustaso_kielissa', 'vaikeustaso_kielissa has dublicate value')); - } - } - return o; -} - -async function hasDuplicates(array: any, obj: any) { - const map: any = {}; - for (let i = 0; i < array.length; ++i) { - const value = obj[array[i]]; - // if (value !== "" && value !== 0) { - if (map[value]) { - winstonLogger.debug('dublicate:' + value); - return true; - } - map[value] = true; - // } - } - return false; -} - -function createMessage(row: number, column: string, reason: string) { - const data = { - row: row, - column: column, - reason: reason, - }; - return data; -} - -async function uploadXlsx(req: Request, res: Response) { - try { - const contentType = req.headers['content-type']; - if (contentType.startsWith('multipart/form-data')) { - try { - upload.single('xlsxfile')(req, res, async function () { - try { - if ((req).file === undefined) { - return res.status(400).send('Xlsx file expected. Max file size: ' + maxFileSize / 1024 / 1024 + 'MB'); - } - const options: ParsingOptions = { type: 'string' }; - const wb = xlsx.readFile((req).file.path, options); - const sheetNameList = wb.SheetNames; - const data = xlsx.utils.sheet_to_json(wb.Sheets['metadata']); - if (data.length === 0) { - return res.status(400).send('Cannot find data in metadata sheet'); - } - // validate data - const obj: any = await validate(data); - const key = 'error'; - if (Object.keys(obj[key]).length > 0) { - fs.unlinkSync((req).file.path); - return res.status(400).json(obj); - } - // insert to database - const o: any = {}; - const rowkey = 'row'; - o[rowkey] = []; - for (const d in data) { - winstonLogger.debug('inserting row ' + (Number(d) + 2)); - const materialobj = await mapper.createMaterialObject(data[d]); - await apiQ.insertEducationalMaterial(materialobj, function (err: any, result: any) { - if (err) { - o[rowkey].push({ - row: Number(d) + 2, - result: 'error', - }); - } else { - o[rowkey].push({ - row: Number(d) + 2, - result: 'success', - }); - } - }); - } - fs.unlinkSync((req).file.path); - res.status(200).json(o); - } catch (err) { - winstonLogger.error('Error in uploadXlsx(): %o', err); - fs.unlinkSync((req).file.path); - res.status(500).send('Error in file handling. Xlsx file expected'); - } - }); - } catch (err) { - winstonLogger.debug('Error in uploadXlsx(): %o', err); - fs.unlinkSync((req).file.path); - res.status(500).send('error'); - } - } else { - res.status(400).send('Not file found'); - } - } catch (error) { - winstonLogger.debug('Error in uploadXlsx(): ', error); - res.status(500).send('error'); - } -} - -// function validateKoodistoCode(str: String, koodisto: any) { -// for (const key in koodisto) { -// if (koodisto[key].type === str) { -// return true; -// } -// } -// return false; -// } - -module.exports = { - uploadXlsx: uploadXlsx, -}; diff --git a/aoe-web-backend/src/query/analyticsQueries.ts b/aoe-web-backend/src/query/analyticsQueries.ts index 53196d93f..c97ad7bd4 100644 --- a/aoe-web-backend/src/query/analyticsQueries.ts +++ b/aoe-web-backend/src/query/analyticsQueries.ts @@ -36,14 +36,3 @@ export const getPopularityQuery = 'from educationalmaterial where id = $1) as a, ' + '(select (SELECT EXTRACT(DAY FROM (select sum(now() - publishedat) from educationalmaterial where id = $1))) as b))' + 'as c;'; - -export async function getPopularity(id: string): Promise { - try { - const response = await db.task(async (t: any) => { - return await t.oneOrNone(getPopularityQuery, [id]); - }); - return response.popularity; - } catch (error) { - throw new Error(error); - } -} diff --git a/aoe-web-backend/src/query/apiQueries.ts b/aoe-web-backend/src/query/apiQueries.ts index 17520387b..8fd48c123 100644 --- a/aoe-web-backend/src/query/apiQueries.ts +++ b/aoe-web-backend/src/query/apiQueries.ts @@ -70,7 +70,6 @@ export async function getMaterial(req: Request, res: Response, next: NextFunctio } catch (err) { winstonLogger.error(err); next(new ErrorHandler(500, 'Issue getting materials ')); - // res.status(500).send("getting materials not succeeded"); } } @@ -230,10 +229,6 @@ export const getEducationalMaterialMetadata = async ( response = await t.any(query, [eduMaterialId]); } else { if (req.params.publishedat) { - // query = "select attachment.id, filepath, originalfilename, filesize, mimetype, format, filekey, - // filebucket, defaultfile, kind, label, srclang, materialid from material - // inner join attachment on material.id = attachment.materialid - // where material.educationalmaterialid = $1 and material.obsoleted = 0 and attachment.obsoleted = 0;"; query = 'SELECT attachment.id, filepath, originalfilename, filesize, mimetype, filekey, ' + 'filebucket, defaultfile, kind, label, srclang, materialid FROM attachmentversioncomposition AS v ' + @@ -251,10 +246,7 @@ export const getEducationalMaterialMetadata = async ( } } queries.push(response); - // const TYPE_TIMESTAMP = 1114; - // const TYPE_TIMESTAMPTZ = 1184; - // use raw date in version - // pgp.pg.types.setTypeParser(TYPE_TIMESTAMP, str => str); + query = 'SELECT DISTINCT publishedat ' + 'FROM versioncomposition ' + @@ -262,9 +254,6 @@ export const getEducationalMaterialMetadata = async ( 'ORDER BY publishedat DESC'; response = await t.any(query, [eduMaterialId]); queries.push(response); - // pgp.pg.types.setTypeParser(TYPE_TIMESTAMP, parseDate); - // const popularity = await t.one(getPopularityQuery, [eduMaterialId]); - // queries.push(popularity); if (req.params.publishedat) { query = 'SELECT urn FROM educationalmaterialversion WHERE educationalmaterialid = $1 AND publishedat = $2'; response = await t.oneOrNone(query, [eduMaterialId, req.params.publishedat]); @@ -289,7 +278,6 @@ export const getEducationalMaterialMetadata = async ( if (req.session?.passport && req.session?.passport.user && req.session?.passport.user.uid) { owner = await isOwner(eduMaterialId.toString(), req.session?.passport.user.uid); } - // winstonLogger.debug(owner); // add displayname object to material object for (const element of data[14]) { const nameobj = { @@ -312,7 +300,6 @@ export const getEducationalMaterialMetadata = async ( } jsonObj.id = data[0][0].id; jsonObj.materials = data[14]; - // winstonLogger.debug("The jsonObj before first check: " + JSON.stringify(jsonObj)); for (const i in jsonObj.materials) { let ext = ''; if (jsonObj.materials[i] && jsonObj.materials[i]['originalfilename']) { @@ -357,15 +344,14 @@ export const getEducationalMaterialMetadata = async ( * mimetype = text/html + result */ jsonObj.materials[i]['mimetype'] = 'text/html'; - jsonObj.materials[i]['filepath'] = process.env.HTML_BASE_URL + result; - // winstonLogger.debug("The jsonObj: " + JSON.stringify(jsonObj)); + jsonObj.materials[i]['filepath'] = + process.env.HTML_BASE_URL + result.replace(config.MEDIA_FILE_PROCESS.htmlFolder, '/content'); } else if (result != false) { /** * This means the function the returned true, but the mimetype was already text/html so we dont have to change it * Simply return the result to the frontend, which means we have to to the query here and push the response thereafter */ jsonObj.materials[i]['filepath'] = result; - // winstonLogger.debug("The jsonObj: " + JSON.stringify(jsonObj)); } } } @@ -401,7 +387,6 @@ export const getEducationalMaterialMetadata = async ( jsonObj.educationalAlignment = data[13]; jsonObj.educationalLevels = data[8]; jsonObj.educationalUses = data[9]; - // jsonObj.inLanguage = data[13]; jsonObj.accessibilityFeatures = data[5]; jsonObj.accessibilityHazards = data[6]; const license: any = {}; @@ -694,7 +679,6 @@ export const insertDataToDescription = async ( description: any, ): Promise => { const queries = []; - // const query = "INSERT INTO materialdisplayname (displayname, language, materialid) (SELECT $1,$2,$3 where $3 in (select id from material where educationalmaterialid = $4)) ON CONFLICT (language, materialid) DO UPDATE Set displayname = $1;"; const query = 'INSERT ' + 'INTO materialdescription ' + @@ -803,17 +787,10 @@ export const updateMaterial = async (metadata: EducationalMaterialMetadata, emid .tx(async (t: any) => { let query; const queries: any = []; - // let params = req.params; const materialname = metadata.name; - const nameparams = []; let response; winstonLogger.debug('Update metadata in updateMaterial(): ' + JSON.stringify(metadata)); - // let arr = metadata.name; - if (materialname == undefined) { - // query = "DELETE FROM materialname where educationalmaterialid = $1;"; - // response = await t.any(query, [req.params.id]); - // queries.push(response); - } else { + if (materialname !== undefined) { queries.push(await insertEducationalMaterialName(materialname, emid, t)); } @@ -995,7 +972,6 @@ export const updateMaterial = async (metadata: EducationalMaterialMetadata, emid } } // isBasedOn - params = []; let isBasedonArr = []; if (metadata.isBasedOn) { isBasedonArr = metadata.isBasedOn.externals; @@ -1080,13 +1056,8 @@ export const updateMaterial = async (metadata: EducationalMaterialMetadata, emid ], { table: 'alignmentobject' }, ); - // data input values: - // winstonLogger.debug(arr); + const values: any = []; - // const updateValues: Array = []; - // for ( let i = 0; i < arr.length; i += 1) { - // alignmentObjectArr[i].educationalmaterialid = emid; - // } alignmentObjectArr.forEach(async (element: any) => { const obj = { alignmenttype: element.alignmentType, @@ -1098,7 +1069,6 @@ export const updateMaterial = async (metadata: EducationalMaterialMetadata, emid targeturl: element.targetUrl, }; values.push(obj); - // updateValues.push({educationalframework : ((element.educationalFramework == undefined) ? "" : element.educationalFramework)}); }); query = pgp.helpers.insert(values, cs) + @@ -1106,7 +1076,6 @@ export const updateMaterial = async (metadata: EducationalMaterialMetadata, emid queries.push(await t.any(query)); } // Author - params = []; const authorArr = metadata.authors; query = 'DELETE FROM author where educationalmaterialid = $1;'; response = await t.any(query, [emid]); @@ -1126,13 +1095,8 @@ export const updateMaterial = async (metadata: EducationalMaterialMetadata, emid } // File details - params = []; const fileDetailArr = metadata.fileDetails; - if (fileDetailArr === undefined) { - // query = "DELETE FROM materialdisplayname where materialid = $1;"; - // response = await t.any(query, [emid]); - // queries.push(response); - } else { + if (fileDetailArr !== undefined) { for (const element of fileDetailArr) { const dnresult = await fh.insertDataToDisplayName(t, emid, element.id, element); queries.push(dnresult); @@ -1172,7 +1136,6 @@ export const updateMaterial = async (metadata: EducationalMaterialMetadata, emid for (const element of arr) { query = 'INSERT INTO accessibilityfeature (accessibilityfeaturekey, value, educationalmaterialid) VALUES ($1,$2,$3) ON CONFLICT (accessibilityfeaturekey, educationalmaterialid) DO NOTHING;'; - // query = "INSERT INTO materialdisplayname (displayname, language, materialid, slug) VALUES ($1,$2,$3,$4) ON CONFLICT (language, materialid) DO UPDATE Set displayname = $1, slug = $4"; queries.push(await t.any(query, [element.key, element.value, emid])); } } @@ -1203,7 +1166,6 @@ export const updateMaterial = async (metadata: EducationalMaterialMetadata, emid for (const element of arr) { query = 'INSERT INTO accessibilityhazard (accessibilityhazardkey, value, educationalmaterialid) VALUES ($1,$2,$3) ON CONFLICT (accessibilityhazardkey, educationalmaterialid) DO NOTHING;'; - // query = "INSERT INTO materialdisplayname (displayname, language, materialid, slug) VALUES ($1,$2,$3,$4) ON CONFLICT (language, materialid) DO UPDATE Set displayname = $1, slug = $4"; queries.push(await t.any(query, [element.key, element.value, emid])); } } @@ -1234,7 +1196,6 @@ export const updateMaterial = async (metadata: EducationalMaterialMetadata, emid for (const element of arr) { query = 'INSERT INTO educationallevel (educationallevelkey, value, educationalmaterialid) VALUES ($1,$2,$3) ON CONFLICT (educationallevelkey, educationalmaterialid) DO NOTHING;'; - // query = "INSERT INTO materialdisplayname (displayname, language, materialid, slug) VALUES ($1,$2,$3,$4) ON CONFLICT (language, materialid) DO UPDATE Set displayname = $1, slug = $4"; queries.push(await t.any(query, [element.key, element.value, emid])); } } @@ -1258,11 +1219,9 @@ export const updateMaterial = async (metadata: EducationalMaterialMetadata, emid // insert new version query = 'INSERT INTO educationalmaterialversion (educationalmaterialid, publishedat) values ($1, now()::timestamp(3)) returning publishedat;'; - // queries.push(await t.one(query, [emid])); publishedat = await t.one(query, [emid]); - // queries.push(publishedat); + for (const element of arr) { - // query = "INSERT INTO versioncomposition (educationalmaterialid, materialid, publishedat, priority) VALUES ($1,$2,now(),$3);"; query = 'INSERT INTO versioncomposition (educationalmaterialid, materialid, publishedat, priority) select $1,$2,now()::timestamp(3),$3 where exists (select * from material where id = $2 and educationalmaterialid = $1)'; queries.push(await t.none(query, [emid, element.materialId, element.priority])); @@ -1294,7 +1253,6 @@ export const updateMaterial = async (metadata: EducationalMaterialMetadata, emid .catch((err: Error) => { winstonLogger.error(err); throw err; - // next(new ErrorHandler(400, "Issue updating material")); }); }; @@ -1445,15 +1403,12 @@ export async function insertIntoEducationalMaterial(obj: any) { const materialData = { technicalname: obj.technicalname, createdat: obj.createdat, - // author : obj.author, - // organization : obj.organization, publishedat: obj.publishedat, updatedat: obj.updatedat, archivedat: obj.archivedat, timerequired: obj.timerequired, agerangemin: obj.agerangemin, agerangemax: obj.agerangemax, - // usersid : obj.usersid, usersusername: obj.username, licensecode: obj.licensecode, originalpublishedat: obj.originalpublishedat, @@ -1590,7 +1545,6 @@ export async function insertIntoAlignmentObject(obj: any, materialid: any) { export async function insertIntoMaterial(obj: any, materialid: any) { const data = { - // materialname : obj.materialname, link: obj.link, priority: obj.priority, educationalmaterialid: materialid, @@ -1599,14 +1553,6 @@ export async function insertIntoMaterial(obj: any, materialid: any) { await db.any(query); } -function createSlug(str: string) { - str = str.replace(/[ä]/g, 'a'); - str = str.replace(/[ö]/g, 'o'); - str = str.replace(/[å]/g, 'a'); - str = str.replace(/[^a-zA-Z0-9]/g, ''); - return str; -} - export async function isOwner(educationalmaterialid: string, username: string) { if (educationalmaterialid && username) { const query = 'SELECT UsersUserName from EducationalMaterial WHERE id = $1'; diff --git a/aoe-web-backend/src/query/collectionQueries.ts b/aoe-web-backend/src/query/collectionQueries.ts index 18953a7cd..2030588bd 100644 --- a/aoe-web-backend/src/query/collectionQueries.ts +++ b/aoe-web-backend/src/query/collectionQueries.ts @@ -59,8 +59,6 @@ export async function insertEducationalMaterialToCollection(collection: Collecti */ export async function deleteEducationalMaterialFromCollection(collection: Collection) { try { - const values: any[] = []; - // collection.emId.map(id => values.push({collectionid : collection.collectionId, educationalmaterialid: id})); const query = 'DELETE FROM collectioneducationalmaterial WHERE collectionid = $1 AND educationalmaterialid IN ($2:list)'; await db.none(query, [collection.collectionId, collection.emId]); diff --git a/aoe-web-backend/src/query/fileHandling.ts b/aoe-web-backend/src/query/fileHandling.ts index 4cb20892c..0af6e6d5e 100644 --- a/aoe-web-backend/src/query/fileHandling.ts +++ b/aoe-web-backend/src/query/fileHandling.ts @@ -34,14 +34,20 @@ import MulterFile = Express.Multer.File; import SendData = ManagedUpload.SendData; import StreamZip from 'node-stream-zip'; +const isProd = process.env.NODE_ENV === 'production'; + // AWS and S3 configurations. const configAWS: ServiceConfigurationOptions = { - credentials: { - accessKeyId: process.env.CLOUD_STORAGE_ACCESS_KEY, - secretAccessKey: process.env.CLOUD_STORAGE_ACCESS_SECRET, - }, - endpoint: process.env.CLOUD_STORAGE_API, region: process.env.CLOUD_STORAGE_REGION, + ...(!isProd + ? { + endpoint: process.env.CLOUD_STORAGE_API, + credentials: { + accessKeyId: process.env.CLOUD_STORAGE_ACCESS_KEY, + secretAccessKey: process.env.CLOUD_STORAGE_ACCESS_SECRET, + }, + } + : {}), }; AWS.config.update(configAWS); @@ -49,7 +55,7 @@ AWS.config.update(configAWS); const storage: StorageEngine = multer.diskStorage({ // notice you are calling the multer.diskStorage() method here, not multer() destination: (req: Request, file: any, cb: any) => { - cb(undefined, `./${config.MEDIA_FILE_PROCESS.localFolder}/`); + cb(undefined, `${config.MEDIA_FILE_PROCESS.localFolder}/`); }, filename: (req: Request, file: any, cb: any) => { const ext = file.originalname.substring(file.originalname.lastIndexOf('.'), file.originalname.length); @@ -89,7 +95,6 @@ export const uploadAttachmentToMaterial = async (req: Request, res: Response, ne if (!file) { next(new ErrorHandler(400, 'No file sent')); } - // const emresp = await insertDataToEducationalMaterialTable(req); const metadata = JSON.parse(req.body.attachmentDetails); winstonLogger.debug(metadata); let attachmentId; @@ -109,15 +114,10 @@ export const uploadAttachmentToMaterial = async (req: Request, res: Response, ne res.status(200).json({ id: attachmentId }); try { if (typeof file !== 'undefined') { - const obj: any = await uploadFileToStorage( - './' + file.path, - file.filename, - process.env.CLOUD_STORAGE_BUCKET, - ); - // await insertDataToAttachmentTable(file, req.params.materialId, obj.Key, obj.Bucket, obj.Location, metadata); + const obj: any = await uploadFileToStorage(file.path, file.filename, process.env.CLOUD_STORAGE_BUCKET); await updateAttachment(obj.Key, obj.Bucket, obj.Location, attachmentId); await deleteDataToTempAttachmentTable(file.filename, result[0].id); - fs.unlink('./' + file.path, (err: any) => { + fs.unlink(file.path, (err: any) => { if (err) { winstonLogger.error(err); } @@ -214,7 +214,7 @@ export const uploadMaterial = async (req: Request, res: Response, next: NextFunc if (typeof file !== 'undefined') { winstonLogger.debug(materialid); const obj: any = await uploadFileToStorage( - './' + file.path, + file.path, file.filename, process.env.CLOUD_STORAGE_BUCKET, ); @@ -237,7 +237,7 @@ export const uploadMaterial = async (req: Request, res: Response, next: NextFunc winstonLogger.error(e); } await deleteDataFromTempRecordTable(file.filename, materialid); - fs.unlink('./' + file.path, (err: any) => { + fs.unlink(file.path, (err: any) => { if (err) { winstonLogger.error(err); } @@ -252,7 +252,7 @@ export const uploadMaterial = async (req: Request, res: Response, next: NextFunc if (!res.headersSent) { next(new ErrorHandler(500, 'Error in upload: ' + err)); } - fs.unlink('./' + file.path, (err: any) => { + fs.unlink(file.path, (err: any) => { if (err) { winstonLogger.debug('Error in uploadMaterial(): ' + err); } else { @@ -300,7 +300,7 @@ export const uploadFileToLocalDisk = ( }); }); } catch (err) { - fs.unlink(`./${req.file.path}`, (err) => { + fs.unlink(req.file.path, (err) => { if (err) winstonLogger.error('File removal after the interrupted upload failed: %o', err); }); reject(err); @@ -335,7 +335,7 @@ export const detectEncyptedPDF = (filePath: string): Promise => { }; export const deleteFileFromLocalDiskStorage = (file: MulterFile) => { - fs.unlink(`./${file.path}`, (err: any): void => { + fs.unlink(file.path, (err: any): void => { if (err) winstonLogger.error('Unlink removal for the uploaded file failed: %o', err); }); }; @@ -350,21 +350,19 @@ export const deleteFileFromLocalDiskStorage = (file: MulterFile) => { export const uploadFileToMaterial = async (req: Request, res: Response, next: NextFunction): Promise => { const { file, fileDetails }: any = await uploadFileToLocalDisk(req, res) .then((result: { file: MulterFile; fileDetails: Record }) => { - // winstonLogger.debug('FILE UPLOAD COMPLETED'); return result; }) .catch((err) => { winstonLogger.error('Multer upload failed: %o', err); throw err; }); - // winstonLogger.debug('FILEPATH: %s', file.filename); - if (!fs.existsSync(`uploads/${file.filename}`)) { + if (!fs.existsSync(`${config.MEDIA_FILE_PROCESS.localFolder}/${file.filename}`)) { res.status(500).json({ message: 'aborted' }); return; } // Detect and reject encrypted PDFs. if (file.mimetype === 'application/pdf') { - const isEncrypted: boolean = await detectEncyptedPDF(`uploads/${file.filename}`); + const isEncrypted: boolean = await detectEncyptedPDF(`${config.MEDIA_FILE_PROCESS.localFolder}/${file.filename}`); if (isEncrypted) { res.status(415).json({ rejected: 'Encrypted PDF files not allowed' }).end(); deleteFileFromLocalDiskStorage(file); @@ -396,15 +394,12 @@ export const uploadFileToMaterial = async (req: Request, res: Response, next: Ne await upsertMaterialDisplayName(t, req.params.edumaterialid, material.id, fileDetails); recordID = await upsertRecord(t, file, material.id); // await insertDataToRecordTable(file, material.id); - // Save the file information to the temporary records until the upstreaming is completed. - // await upsertMaterialFileToTempRecords(t, file, material.id); await t.commit(); } catch (err: any) { winstonLogger.error('Transaction for the single file upload failed: %o', err); await t.rollback(); throw new ErrorHandler(500, `Transaction for the single file upload failed: ${err}`); } - // TODO: 202 Accepted response to indicate the incomplete upstreaming. res.status(200).json({ id: req.params.edumaterialid, material: [{ id: material.id, createFrom: file.originalname, educationalmaterialid: req.params.edumaterialid }], @@ -412,7 +407,7 @@ export const uploadFileToMaterial = async (req: Request, res: Response, next: Ne try { const fileS3: SendData = await uploadFileToStorage( - `./${file.path}`, + file.path, file.filename, config.CLOUD_STORAGE_CONFIG.bucket, material, @@ -451,8 +446,6 @@ export const uploadFileToMaterial = async (req: Request, res: Response, next: Ne winstonLogger.error('Single file upstreaming or conversions failed: %o', err); if (!res.headersSent) next(new ErrorHandler(500, `File upstreaming failed: ${err}`)); } finally { - // Remove information from incomplete file tasks. - // await deleteDataFromTempRecordTable(file.filename, material.id); deleteFileFromLocalDiskStorage(file); } }; @@ -467,40 +460,15 @@ export const fileToStorage = async ( file: MulterFile, materialid: string, ): Promise<{ key: string; recordid: string }> => { - const obj: any = await uploadFileToStorage(`./${file.path}`, file.filename, process.env.CLOUD_STORAGE_BUCKET); + const obj: any = await uploadFileToStorage(file.path, file.filename, process.env.CLOUD_STORAGE_BUCKET); const recordid = await insertDataToRecordTable(file, materialid, obj.Key, obj.Bucket, obj.Location); await deleteDataFromTempRecordTable(file.filename, materialid); - fs.unlink(`./${file.path}`, (err: any) => { + fs.unlink(file.path, (err: any) => { if (err) winstonLogger.error(err); }); return { key: obj.Key, recordid: recordid }; }; -/** - * - * @param file - * @param metadata - * @param materialid - * @param attachmentId - * load attachment to allas storage - */ -export async function attachmentFileToStorage( - file: any, - metadata: any, - materialid: string, - attachmentId: string, -): Promise { - const obj: any = await uploadFileToStorage('./' + file.path, file.filename, process.env.CLOUD_STORAGE_BUCKET); - // await insertDataToAttachmentTable(file, materialid, obj.Key, obj.Bucket, obj.Location, metadata); - await updateAttachment(obj.Key, obj.Bucket, obj.Location, attachmentId); - await deleteDataToTempAttachmentTable(file.filename, materialid); - fs.unlink('./' + file.path, (err: any) => { - if (err) { - winstonLogger.error(err); - } - }); -} - /** * check if files in temporaryrecord table and try to load to allas storage */ @@ -542,40 +510,6 @@ export const checkTemporaryRecordQueue = async (): Promise => { } }; -/** - * check if files in temporaryattachment table and try to load to allas storage - */ -export async function checkTemporaryAttachmentQueue(): Promise { - try { - // take hour of - const ts = Date.now() - 1000 * 60 * 60; - const query = 'Select * From temporaryattachment where extract(epoch from createdat)*1000 < $1 limit 1000;'; - const data = await db.any(query, [ts]); - for (const element of data) { - const metadata = { - default: element.defaultfile, - kind: element.kind, - label: element.label, - srclang: element.srclang, - }; - const file = { - originalname: element.originalfilename, - path: element.filepath, - size: element.filesize, - mimetype: element.mimetype, - filename: element.filename, - }; - try { - await attachmentFileToStorage(file, metadata, element.id, element.attachmentid); - } catch (error) { - winstonLogger.error('Error in checkTemporaryAttachmentQueue(): ' + error); - } - } - } catch (error) { - winstonLogger.error('Error in checkTemporaryAttachmentQueue(): ' + error); - } -} - export const insertDataToEducationalMaterialTable = async (req: Request, t: any): Promise => { const query = ` INSERT INTO educationalmaterial (usersusername) @@ -1030,12 +964,16 @@ export const uploadFileToStorage = ( materialMeta?: Material, ): Promise => { const config: ServiceConfigurationOptions = { - credentials: { - accessKeyId: process.env.CLOUD_STORAGE_ACCESS_KEY, - secretAccessKey: process.env.CLOUD_STORAGE_ACCESS_SECRET, - }, - endpoint: process.env.CLOUD_STORAGE_API, region: process.env.CLOUD_STORAGE_REGION, + ...(!isProd + ? { + endpoint: process.env.CLOUD_STORAGE_API, + credentials: { + accessKeyId: process.env.CLOUD_STORAGE_ACCESS_KEY, + secretAccessKey: process.env.CLOUD_STORAGE_ACCESS_SECRET, + }, + } + : {}), }; AWS.config.update(config); const s3: S3 = new AWS.S3(); @@ -1086,12 +1024,16 @@ export async function uploadBase64FileToStorage( return new Promise(async (resolve, reject) => { try { const config: ServiceConfigurationOptions = { - credentials: { - accessKeyId: process.env.CLOUD_STORAGE_ACCESS_KEY, - secretAccessKey: process.env.CLOUD_STORAGE_ACCESS_SECRET, - }, - endpoint: process.env.CLOUD_STORAGE_API, region: process.env.CLOUD_STORAGE_REGION, + ...(!isProd + ? { + endpoint: process.env.CLOUD_STORAGE_API, + credentials: { + accessKeyId: process.env.CLOUD_STORAGE_ACCESS_KEY, + secretAccessKey: process.env.CLOUD_STORAGE_ACCESS_SECRET, + }, + } + : {}), }; AWS.config.update(config); const s3 = new AWS.S3(); @@ -1177,7 +1119,6 @@ export const downloadFile = async (req: Request, res: Response, next: NextFuncti } await downloadFileFromStorage(req, res, next); - //if (!data) return res.end(); // Increase download counter unless the user is the owner of the material. if (!req.isAuthenticated() || !(await hasAccesstoPublication(educationalmaterialId, req))) { @@ -1188,7 +1129,6 @@ export const downloadFile = async (req: Request, res: Response, next: NextFuncti } } next(); - // return res.status(200).end(); } catch (err) { if (!res.headersSent) { next(new ErrorHandler(400, err)); @@ -1229,7 +1169,6 @@ export const downloadFileFromStorage = async ( filesize: number; mimetype: string; } = await db.oneOrNone(query, [fileName]); - // { originalfilename: 'oceanwaves1280x720.mp4', filesize: 2000000, mimetype: 'video/mp4' }; if (!fileDetails) { next(new ErrorHandler(404, 'Requested file ' + fileName + ' not found.')); } else { @@ -1454,11 +1393,6 @@ export const downloadAndZipFromStorage = ( ): Promise => { return new Promise((resolve, reject): void => { const s3: S3Client = new S3Client({ - credentials: { - accessKeyId: process.env.CLOUD_STORAGE_ACCESS_KEY, - secretAccessKey: process.env.CLOUD_STORAGE_ACCESS_SECRET, - }, - endpoint: process.env.CLOUD_STORAGE_API, region: process.env.CLOUD_STORAGE_REGION, } as S3ClientConfig); const bucket = process.env.CLOUD_STORAGE_BUCKET; @@ -1546,7 +1480,6 @@ export default { checkTemporaryRecordQueue, uploadBase64FileToStorage, uploadAttachmentToMaterial, - checkTemporaryAttachmentQueue, insertDataToDisplayName, upsertMaterialDisplayName, upsertMaterialFileToTempRecords, diff --git a/aoe-web-backend/src/query/oaipmh.ts b/aoe-web-backend/src/query/oaipmh.ts index 20686f496..fc8876a11 100644 --- a/aoe-web-backend/src/query/oaipmh.ts +++ b/aoe-web-backend/src/query/oaipmh.ts @@ -136,12 +136,7 @@ export const getMaterialMetaData = async (req: Request, res: Response): Promise< query = 'SELECT * FROM alignmentobject WHERE educationalmaterialid = $1'; response = await t.any(query, [q.id]); - // TODO: Modify the target URLs before passing alignment objects to the response. - // if (response) { - // response.forEach((alignmentObject: AlignmentObject): void => { - // alignmentObject.targeturl && modifyEducationalSubjectAndObjectiveURL(alignmentObject); - // }); - // } + q.alignmentobject = response; query = ` diff --git a/aoe-web-backend/src/resource/awsClient.ts b/aoe-web-backend/src/resource/awsClient.ts index 8e4de567c..2e6459cbc 100644 --- a/aoe-web-backend/src/resource/awsClient.ts +++ b/aoe-web-backend/src/resource/awsClient.ts @@ -2,17 +2,23 @@ import AWS, { S3 } from 'aws-sdk'; import { ServiceConfigurationOptions } from 'aws-sdk/lib/service'; import config from '@/config'; +const isProd = process.env.NODE_ENV === 'production'; + /** * AWS S3 cloud storage configuration. * @type {{endpoint: string, credentials: {accessKeyId: string, secretAccessKey: string}, region: string}} */ const configS3: ServiceConfigurationOptions = { - credentials: { - accessKeyId: config.CLOUD_STORAGE_CONFIG.accessKey, - secretAccessKey: config.CLOUD_STORAGE_CONFIG.accessSecret, - }, - endpoint: config.CLOUD_STORAGE_CONFIG.apiURL, region: config.CLOUD_STORAGE_CONFIG.region, + ...(!isProd + ? { + endpoint: process.env.CLOUD_STORAGE_API, + credentials: { + accessKeyId: process.env.CLOUD_STORAGE_ACCESS_KEY, + secretAccessKey: process.env.CLOUD_STORAGE_ACCESS_SECRET, + }, + } + : {}), }; AWS.config.update(configS3); export const s3: S3 = new AWS.S3(); diff --git a/aoe-web-backend/src/resource/httpsClient.ts b/aoe-web-backend/src/resource/httpsClient.ts index 7c24fb528..cf051a003 100644 --- a/aoe-web-backend/src/resource/httpsClient.ts +++ b/aoe-web-backend/src/resource/httpsClient.ts @@ -1,17 +1,19 @@ import { IncomingMessage, RequestOptions } from 'http'; import https from 'https'; +import http from 'http'; + import winstonLogger from '@util/winstonLogger'; /** * HTTP(S) client to execute internal request to other service components. - * Current implementation is for secured HTTPS requests only. * Request options specified and provided as an argument. * + * @param httpsConnection use https or http * @param options http.RequestOptions */ -export default (options: RequestOptions): Promise => { +export default (httpsConnection: boolean, options: RequestOptions): Promise => { return new Promise((resolve, reject) => { - let request = https.request(options, (response: IncomingMessage) => { + let request = (httpsConnection ? https : http).request(options, (response: IncomingMessage) => { let output = ''; response .setEncoding('utf8') diff --git a/aoe-web-backend/src/resource/kafkaClient.ts b/aoe-web-backend/src/resource/kafkaClient.ts index 7dffe4c4c..ccdf5f214 100644 --- a/aoe-web-backend/src/resource/kafkaClient.ts +++ b/aoe-web-backend/src/resource/kafkaClient.ts @@ -1,11 +1,31 @@ import config from '@/config'; import { Kafka, Partitioners, Producer } from 'kafkajs'; +import { generateAuthToken } from 'aws-msk-iam-sasl-signer-js'; + +const isProd = process.env.NODE_ENV === 'production'; const kafka: Kafka = new Kafka({ clientId: config.MESSAGE_QUEUE_OPTIONS.clientId as string, brokers: config.MESSAGE_QUEUE_OPTIONS.brokerServers.split(',') as string[], + ...(isProd + ? { + ssl: true, + sasl: { + mechanism: 'oauthbearer', + oauthBearerProvider: () => oauthBearerTokenProvider(config.MESSAGE_QUEUE_OPTIONS.region), + }, + } + : {}), }); +async function oauthBearerTokenProvider(region) { + // Uses AWS Default Credentials Provider Chain to fetch credentials + const authTokenResponse = await generateAuthToken({ region }); + return { + value: authTokenResponse.token, + }; +} + export const kafkaProducer: Producer = kafka.producer({ allowAutoTopicCreation: true, createPartitioner: Partitioners.DefaultPartitioner, diff --git a/aoe-web-backend/src/resource/oidcConfig.ts b/aoe-web-backend/src/resource/oidcConfig.ts index 68b9500ee..49b44765f 100644 --- a/aoe-web-backend/src/resource/oidcConfig.ts +++ b/aoe-web-backend/src/resource/oidcConfig.ts @@ -1,13 +1,10 @@ -import config from '@/config'; import ah from '@services/authService'; import { isLoginEnabled } from '@services/routeEnablerService'; import winstonLogger from '@util/winstonLogger'; -import connectRedis, { RedisStore } from 'connect-redis'; import { CookieOptions, Express, Request, Response } from 'express'; -import session, { Cookie, SessionOptions } from 'express-session'; +import { Cookie } from 'express-session'; import openidClient, { Client, custom, HttpOptions } from 'openid-client'; import passport from 'passport'; -import clientRedis from './redisClient'; const Issuer = openidClient.Issuer; const Strategy = openidClient.Strategy; @@ -15,12 +12,6 @@ const Strategy = openidClient.Strategy; /** * Configuration for OpenID Connect Authorization Management */ -passport.serializeUser((user: Express.User, done): void => { - done(undefined, user); -}); -passport.deserializeUser((userinfo: Record, done): void => { - done(undefined, { user: userinfo.id }); -}); Issuer.discover(process.env.PROXY_URI) .then((oidcIssuer: InstanceType): void => { const client: Client = new oidcIssuer.Client({ @@ -43,6 +34,13 @@ Issuer.discover(process.env.PROXY_URI) }); }), ); + + passport.serializeUser((user: Express.User, done): void => { + done(undefined, user); + }); + passport.deserializeUser((userinfo: Record, done): void => { + done(undefined, { user: userinfo.id }); + }); }) .catch((error: any): void => { winstonLogger.error(error); @@ -56,12 +54,8 @@ export const authInit = (app: Express): void => { custom.setHttpOptionsDefaults({ timeout: Number(process.env.HTTP_OPTIONS_TIMEOUT) || 5000, retry: Number(process.env.HTTP_OPTIONS_RETRY) || 2, - // clock_tolerance: Number(process.env.HTTP_OPTIONS_CLOCK_TOLERANCE) || 5, } as HttpOptions); - app.use(passport.initialize()); - app.use(passport.session()); - // Login endpoint for the client application. app.get( '/api/login', @@ -89,10 +83,8 @@ export const authInit = (app: Express): void => { req.logout((done) => done()); req.session.destroy((error): void => { winstonLogger.debug('Logout request /logout | session termination errors: %o', error); - // res.setHeader('Cache-Control', 'no-store'); res.clearCookie('connect.sid', deleteCookie); res.status(200).json({ message: 'logged out' }); - // res.redirect(['https://', config.SESSION_COOKIE_OPTIONS.domain, '/#/logout'].join('')); }); }); @@ -107,26 +99,6 @@ export const authInit = (app: Express): void => { ); }; -/** - * Initialize session and cookie management with Redis storage. - * @param app Express - */ -export const sessionInit = (app: Express): void => { - const RedisStore: RedisStore = connectRedis(session); - app.use( - session({ - store: new RedisStore({ client: clientRedis }), // disableTTL: true - resave: config.SESSION_CONFIG_OPTIONS.resave as boolean, - rolling: config.SESSION_CONFIG_OPTIONS.rolling as boolean, - saveUninitialized: config.SESSION_CONFIG_OPTIONS.saveUninitialized as boolean, - secret: config.SESSION_CONFIG_OPTIONS.secret as string, - proxy: config.SESSION_CONFIG_OPTIONS.proxy, - cookie: config.SESSION_COOKIE_OPTIONS, - } as SessionOptions), - ); -}; - export default { authInit, - sessionInit, }; diff --git a/aoe-web-backend/src/resource/redisClient.ts b/aoe-web-backend/src/resource/redisClient.ts index c7f8addb3..a7c0f22f5 100644 --- a/aoe-web-backend/src/resource/redisClient.ts +++ b/aoe-web-backend/src/resource/redisClient.ts @@ -3,18 +3,28 @@ import { RedisClientOptions } from '@redis/client'; import winstonLogger from '@util/winstonLogger'; import { createClient } from 'redis'; -const redisHost: string = config.REDIS_OPTIONS.host; -const redisPort: number = config.REDIS_OPTIONS.port; -const redisPass: string = config.REDIS_OPTIONS.pass; - const redisClient = createClient({ - url: `redis://:${redisPass}@${redisHost}:${redisPort}`, + legacyMode: true, + url: `${config.REDIS_OPTIONS.protocol}://${config.REDIS_OPTIONS.username}:${encodeURIComponent( + config.REDIS_OPTIONS.pass, + )}@${config.REDIS_OPTIONS.host}:${config.REDIS_OPTIONS.port}`, } as RedisClientOptions) .on('ready', () => { - winstonLogger.debug('REDIS [redis://%s:%d] Connection is operable', redisHost, redisPort); + winstonLogger.info( + 'REDIS [%s://%s:%d] Connection is operable', + config.REDIS_OPTIONS.protocol, + config.REDIS_OPTIONS.host, + config.REDIS_OPTIONS.port, + ); }) .on('error', (err: Error): void => { - winstonLogger.error('REDIS [redis://%s:%d] Error: %o', redisHost, redisPort, err); + winstonLogger.error( + 'REDIS [%s://%s:%d] Error: %o', + config.REDIS_OPTIONS.protocol, + config.REDIS_OPTIONS.host, + config.REDIS_OPTIONS.port, + err, + ); }); const redisInit = async (): Promise => { diff --git a/aoe-web-backend/src/search/es.ts b/aoe-web-backend/src/search/es.ts index 2de3eb5b1..c0e13c1d7 100644 --- a/aoe-web-backend/src/search/es.ts +++ b/aoe-web-backend/src/search/es.ts @@ -1,6 +1,5 @@ import { ErrorHandler } from '@/helpers/errorHandler'; import { ISearchIndexMap } from '@aoe/search/es'; -import elasticsearch, { Client, ClientOptions } from '@elastic/elasticsearch'; import { getPopularityQuery } from '@query/analyticsQueries'; import { db } from '@resource/postgresClient'; import { aoeThumbnailDownloadUrl } from '@services/urlService'; @@ -10,17 +9,40 @@ import fs from 'fs'; import * as pgLib from 'pg-promise'; import { collectionDataToEs, collectionFromEs, getCollectionDataToEs, getCollectionDataToUpdate } from './esCollection'; import { AoeBody, AoeCollectionResult } from './esTypes'; - -const index: string = process.env.ES_INDEX; +import { Client } from '@opensearch-project/opensearch'; +import { AwsSigv4Signer } from "@opensearch-project/opensearch/aws"; +import AWS from "aws-sdk"; /** * Elastisearch client configuration */ -const client: Client = new elasticsearch.Client({ - node: process.env.ES_NODE, - log: 'trace', - keepAlive: true -} as ClientOptions); +const index: string = process.env.ES_INDEX; +const isProd = process.env.NODE_ENV == 'production'; + +const client = new Client( + isProd + ? { + ...AwsSigv4Signer({ + region: process.env.AWS_REGION || 'eu-west-1', + service: 'aoss', + getCredentials: () => + new Promise((resolve, reject) => { + AWS.config.getCredentials((err, credentials) => { + if (err) { + reject(err); + } else { + resolve(credentials); + } + }); + }), + }), + node: process.env.ES_NODE, + } + : { + node: process.env.ES_NODE, + } +); + // values for index last update time export namespace Es { @@ -36,65 +58,154 @@ const mode = new pgLib.txMode.TransactionMode({ deferrable: true }); -export const createEsIndex = async (): Promise => { - client.ping({ - // ping usually has a 3000ms timeout - // requestTimeout: 1000 - }, async function(error: any) { - if (error) { - winstonLogger.error('Elasticsearch cluster is down: ' + error); + +async function updateAoeIndexData(indexName: string, operation: 'create' | 'index') { + try { + let i = 0; + let n; + Es.ESupdated.value = new Date(); + do { + n = await metadataToEs(indexName, i, 1000, operation); + i++; + } while (n); + } catch (error) { + winstonLogger.error(`Index ${indexName} creation failed due to ${JSON.stringify(error)}`); + } +} + +async function updateCollectionIndexData(collectionIndex: string, operation: 'create' | 'index') { + + let i = 0; + let dataToEs; + do { + dataToEs = await getCollectionDataToEs(i, 1000); + i++; + await collectionDataToEs(collectionIndex, dataToEs.collections, operation); + } while (dataToEs.collections && dataToEs.collections.length > 0); + Es.CollectionEsUpdated.value = new Date(); +} + +const updateIndex = async (indexName:string, mappingFile: string, recreateIndex: boolean, updateIndexData: (indexName: string, operation: 'create' | 'index') => Promise +)=> { + try { + await client.ping() + } catch (error) { + winstonLogger.error('OpenSearch connection is down: ' + error); + throw error; + } + + const createAndPopulateIndex = async (indexName: string, mappingFile:string) => { + const indexCreated = await createIndex(indexName) + + if (indexCreated) { + await addMapping(indexName, mappingFile); + await updateIndexData(indexName, 'create'); + } + } + + try { + const indexFound = await indexExists(indexName); + + if(!indexFound) { + await createAndPopulateIndex(indexName, mappingFile) } else { - // Delete existing index before recreation. - const indexFound: boolean = await indexExists(index); - if (indexFound) await deleteIndex(index); - - const createIndexResult: boolean = await createIndex(index); - if (createIndexResult) { - try { - await addMapping(index, process.env.ES_MAPPING_FILE); - let i = 0; - let n; - Es.ESupdated.value = new Date(); - do { - n = await metadataToEs(i, 1000); - i++; - } while (n); - } catch (error) { - winstonLogger.error(error); - } + if (recreateIndex) { + await deleteIndex(indexName); + await createAndPopulateIndex(indexName, mappingFile) + } else { + await updateIndexData(indexName,'index'); } } - }); -}; + } catch (err) { + winstonLogger.error(`Index ${indexName} update failed due to ${JSON.stringify(err)}`); + } +} /** * Delete existing search index. * @param index */ export const deleteIndex = async (index: string): Promise => { - return client.indices.delete({ - index: index - }).then((data: any) => { - return !!data.body; - }).catch((error: any) => { + try { + const deleteResponse = await client.indices.delete({ index }); + winstonLogger.info( + `Index ${index} deleted with status code: ${deleteResponse.statusCode} and acknowledged: ${deleteResponse.body.acknowledged}` + ); + + if (!deleteResponse.body.acknowledged) { + return false; + } + + // delay added due to opensearch serverless issue with old index still being used for bulk + await new Promise((resolve) => setTimeout(resolve, 2000)); + + for (let attempt = 0; attempt < 5; attempt++) { + const exists = await indexExists(index); + if (!exists) { + winstonLogger.info(`Confirmed that index ${index} no longer exists.`); + return true; + } + winstonLogger.warn(`Index ${index} still exists after attempt ${attempt}.`); + + await new Promise((resolve) => setTimeout(resolve, 2000)); + } + + winstonLogger.error(`Index ${index} still exists after 5 attempts.`); + return false; + } catch (error: any) { winstonLogger.error('Search index deletion failed: %o', error); return false; - }); + } }; /** * @param index * Create a new index */ -export const createIndex = async (index: string): Promise => { - return client.indices.create({ - index: index - }).then((data: any) => { - return data.body; - }).catch((error: any) => { - winstonLogger.error(error); +export const createIndex = async (index: string): Promise => { + + const maxRetries = 5; + const retryDelay = 2000; + + try { + const response = await client.indices.create({ + index: index + }); + + if (response.statusCode === 200) { + winstonLogger.info(`Index "${index}" created successfully.`); + } else { + winstonLogger.error(`Index "${index}" creation not acknowledged.`); + return false; + } + + for (let attempt = 1; attempt <= maxRetries; attempt++) { + try { + const existsResponse = await indexExists(index); + if (existsResponse) { + winstonLogger.info(`Index "${index}" is accessible after ${attempt} attempt(s).`); + return true; + } else { + winstonLogger.warn( + `Attempt ${attempt} to check index "${index}" accessibility returned: ${existsResponse}` + ); + } + } catch (error) { + winstonLogger.warn( + `Error checking index "${index}" accessibility on attempt ${attempt}: ${error.message}` + ); + } + + await new Promise((resolve) => setTimeout(resolve, retryDelay)); + } + + winstonLogger.error(`Index "${index}" is not accessible after ${maxRetries} retries.`); return false; - }); + + } catch (error) { + winstonLogger.error(`Error creating or accessing index "${index}": ${error.message}`); + return false; + } }; /** @@ -104,10 +215,11 @@ export const createIndex = async (index: string): Promise => { export const indexExists = async (index: string): Promise => { return client.indices.exists({ index: index - }).then((data: any) => { - return !!data.body; + }).then((data) => { + winstonLogger.info(`Index ${index} exists ${data.statusCode === 200}`) + return data.statusCode === 200 && data.body === true; }).catch((error: any) => { - winstonLogger.error(error); + winstonLogger.error(`Failed to check if index ${index} exists ${JSON.stringify(error)}`); return false; }); }; @@ -121,15 +233,14 @@ export const addMapping = async (index: string, fileLocation: string): Promise<{ return new Promise(async (resolve, reject) => { const rawdata: Buffer = fs.readFileSync(fileLocation); const searchIndexMap: ISearchIndexMap = JSON.parse(rawdata.toString()); + client.indices.putMapping({ index: index, - // body: aoemapping body: searchIndexMap.mappings, }, (err: any, _resp: any) => { if (err) { reject(new Error(err)); } else { - // winstonLogger.debug("ES mapping created: ", index, resp.body); resolve({ 'status': 'success' }); } }); @@ -141,19 +252,19 @@ export const addMapping = async (index: string, fileLocation: string): Promise<{ * @param limit * insert metadata */ -export async function metadataToEs(offset: number, limit: number) { +export async function metadataToEs(indexName: string, offset: number, limit: number, operation : 'index' | 'create') { return new Promise(async (resolve, reject) => { db.tx({ mode }, async (t: any) => { const params: any = []; params.push(offset * limit); params.push(limit); let query = 'select em.id, em.createdat, em.publishedat, em.updatedat, em.archivedat, em.timerequired, em.agerangemin, em.agerangemax, em.obsoleted, em.originalpublishedat, em.expires, em.suitsallearlychildhoodsubjects, em.suitsallpreprimarysubjects, em.suitsallbasicstudysubjects, em.suitsalluppersecondarysubjects, em.suitsalluppersecondarysubjectsnew, em.suitsallvocationaldegrees, em.suitsallselfmotivatedsubjects, em.suitsallbranches' + - ' from educationalmaterial as em where em.obsoleted = 0 and em.publishedat IS NOT NULL order by em.id asc OFFSET $1 LIMIT $2;'; + ' from educationalmaterial as em where em.obsoleted = 0 and em.publishedat IS NOT NULL order by em.id asc OFFSET $1 LIMIT $2;'; return t.map(query, params, async (q: any) => { const m: any = []; t.map('select m.id, m.materiallanguagekey as language, link, version.priority, filepath, originalfilename, filesize, mimetype, filekey, filebucket, obsoleted ' + - 'from (select materialid, publishedat, priority from versioncomposition where publishedat = (select max(publishedat) from versioncomposition where educationalmaterialid = $1)) as version ' + - 'left join material m on m.id = version.materialid left join record r on m.id = r.materialid where m.educationalmaterialid = $1', [q.id], (q2: any) => { + 'from (select materialid, publishedat, priority from versioncomposition where publishedat = (select max(publishedat) from versioncomposition where educationalmaterialid = $1)) as version ' + + 'left join material m on m.id = version.materialid left join record r on m.id = r.materialid where m.educationalmaterialid = $1', [q.id], (q2: any) => { t.any('select * from materialdisplayname where materialid = $1;', q2.id) .then((data: any) => { q2.materialdisplayname = data; @@ -246,12 +357,17 @@ export async function metadataToEs(offset: number, limit: number) { }); }) .then(async (data: any) => { - // winstonLogger.debug("inserting data to elastic material number: " + (offset * limit + 1)); if (data.length > 0) { - const body = data.flatMap(doc => [{ index: { _index: index, _id: doc.id } }, doc]); - // winstonLogger.debug("THIS IS BODY:"); - // winstonLogger.debug(JSON.stringify(body)); - const { body: bulkResponse } = await client.bulk({ refresh: true, body }); + winstonLogger.info(`Adding ${data.length} documents to OpenSearch index ${indexName}`) + const body = data.flatMap(doc => [{ [operation]: { _index: indexName, _id: doc.id } }, doc]); + + const {statusCode, body: bulkResponse } = await performBulkOperation(client, indexName, body); + winstonLogger.info(`OpenSearch index ${indexName} bulk completed with status code: ${statusCode} , took: ${bulkResponse.took}, errors: ${bulkResponse.errors}`); + + if (winstonLogger.isDebugEnabled()) { + winstonLogger.debug(`OpenSearch index ${indexName} bulk completed with response body ${JSON.stringify(bulkResponse)}`); + } + if (bulkResponse.errors) { const erroredDocuments = []; // The items array has the same order of the dataset we just indexed. @@ -271,7 +387,7 @@ export async function metadataToEs(offset: number, limit: number) { }); } }); - winstonLogger.debug('Error documents in metadataToEs(): %o', erroredDocuments); + winstonLogger.error('Error documents in metadataToEs(): %o', erroredDocuments); } resolve(data.length); } else { @@ -279,12 +395,70 @@ export async function metadataToEs(offset: number, limit: number) { } }) .catch(error => { - winstonLogger.error(error); + winstonLogger.error(`Failed to add documents to OpenSearch index ${indexName} due to ${JSON.stringify(error)}`) reject(); }); }); } +export async function performBulkOperation( + client: Client, + index: string, + body: any, + maxRetries = 3 +): Promise<{ + statusCode: number, + body: Record +}> { + let attempt = 0; + + while (attempt < maxRetries) { + try { + const { statusCode, body: bulkResponse } = await client.bulk({ + index, + refresh: false, + body, + }); + + if (statusCode === 200 && !bulkResponse.errors) { + return { statusCode, body: bulkResponse }; + } else { + winstonLogger.info( + `OpenSearch index ${index} bulk attempt ${attempt} failed with status code: ${statusCode}, took: ${bulkResponse.took}, errors: ${bulkResponse.errors}` + ); + if (winstonLogger.isDebugEnabled()) { + winstonLogger.debug(`OpenSearch index ${index} bulk attempt ${attempt} failure response body: ${JSON.stringify(bulkResponse)}`); + } + } + + attempt++; + + if (attempt === maxRetries) { + winstonLogger.error(`OpenSearch index ${index} bulk failed after ${maxRetries} attempts.`); + return { statusCode, body: bulkResponse }; + } + + } catch (error) { + winstonLogger.error( + `Error during bulk operation for index ${index} on attempt ${attempt}: ${JSON.stringify(error)}` + ); + + attempt++; + + if (attempt === maxRetries) { + winstonLogger.error(`OpenSearch index ${index} bulk failed after ${maxRetries} attempts.`); + throw error + } + + } + await new Promise((resolve) => setTimeout(resolve, 2000)); + + } + +} + + + /** * Update search engine index after recent changes in information resources. * TODO: Complexity of the function must be refactored. @@ -298,33 +472,33 @@ export const updateEsDocument = (updateCounters?: boolean): Promise => { if (updateCounters) { params.push(Es.ESCounterUpdated.value); query = 'SELECT em.id, em.createdat, em.publishedat, em.updatedat, em.archivedat, em.timerequired, ' + - 'em.agerangemin, em.agerangemax, em.obsoleted, em.originalpublishedat, em.expires, ' + - 'em.suitsallearlychildhoodsubjects, em.suitsallpreprimarysubjects, em.suitsallbasicstudysubjects, ' + - 'em.suitsalluppersecondarysubjects, em.suitsalluppersecondarysubjectsnew, ' + - 'em.suitsallvocationaldegrees, em.suitsallselfmotivatedsubjects, em.suitsallbranches ' + - 'FROM educationalmaterial AS em ' + - 'WHERE counterupdatedat > $1 AND em.publishedat IS NOT NULL'; + 'em.agerangemin, em.agerangemax, em.obsoleted, em.originalpublishedat, em.expires, ' + + 'em.suitsallearlychildhoodsubjects, em.suitsallpreprimarysubjects, em.suitsallbasicstudysubjects, ' + + 'em.suitsalluppersecondarysubjects, em.suitsalluppersecondarysubjectsnew, ' + + 'em.suitsallvocationaldegrees, em.suitsallselfmotivatedsubjects, em.suitsallbranches ' + + 'FROM educationalmaterial AS em ' + + 'WHERE counterupdatedat > $1 AND em.publishedat IS NOT NULL'; } else { params.push(Es.ESupdated.value); query = 'SELECT em.id, em.createdat, em.publishedat, em.updatedat, em.archivedat, em.timerequired, ' + - 'em.agerangemin, em.agerangemax, em.obsoleted, em.originalpublishedat, em.expires, ' + - 'em.suitsallearlychildhoodsubjects, em.suitsallpreprimarysubjects, em.suitsallbasicstudysubjects, ' + - 'em.suitsalluppersecondarysubjects, em.suitsalluppersecondarysubjectsnew, ' + - 'em.suitsallvocationaldegrees, em.suitsallselfmotivatedsubjects, em.suitsallbranches ' + - 'FROM educationalmaterial AS em ' + - 'WHERE updatedat > $1 AND em.publishedat IS NOT NULL'; + 'em.agerangemin, em.agerangemax, em.obsoleted, em.originalpublishedat, em.expires, ' + + 'em.suitsallearlychildhoodsubjects, em.suitsallpreprimarysubjects, em.suitsallbasicstudysubjects, ' + + 'em.suitsalluppersecondarysubjects, em.suitsalluppersecondarysubjectsnew, ' + + 'em.suitsallvocationaldegrees, em.suitsallselfmotivatedsubjects, em.suitsallbranches ' + + 'FROM educationalmaterial AS em ' + + 'WHERE updatedat > $1 AND em.publishedat IS NOT NULL'; } return t.map(query, params, async (q: any) => { // #2 async start const m: any = []; t.map('SELECT m.id, m.materiallanguagekey AS language, link, version.priority, filepath, ' + - 'originalfilename, filesize, mimetype, filekey, filebucket, obsoleted ' + - 'FROM (select materialid, publishedat, priority ' + - 'FROM versioncomposition ' + - 'WHERE publishedat = ' + - '(SELECT MAX(publishedat) FROM versioncomposition WHERE educationalmaterialid = $1)) AS version ' + - 'LEFT JOIN material m ON m.id = version.materialid ' + - 'LEFT JOIN record r ON m.id = r.materialid ' + - 'WHERE m.educationalmaterialid = $1', [q.id], (q2: any) => { + 'originalfilename, filesize, mimetype, filekey, filebucket, obsoleted ' + + 'FROM (select materialid, publishedat, priority ' + + 'FROM versioncomposition ' + + 'WHERE publishedat = ' + + '(SELECT MAX(publishedat) FROM versioncomposition WHERE educationalmaterialid = $1)) AS version ' + + 'LEFT JOIN material m ON m.id = version.materialid ' + + 'LEFT JOIN record r ON m.id = r.materialid ' + + 'WHERE m.educationalmaterialid = $1', [q.id], (q2: any) => { t.any('select * from materialdisplayname where materialid = $1;', q2.id).then((data: any) => { q2.materialdisplayname = data; m.push(q2); @@ -383,21 +557,16 @@ export const updateEsDocument = (updateCounters?: boolean): Promise => { return q2; }); - // response = await t.any(query, [q.id]); q.isbasedon = response; - // query = "select * from inlanguage where educationalmaterialid = $1;"; - // response = await t.any(query, [q.id]); - // q.inlanguage = response; - query = 'SELECT * FROM alignmentobject WHERE educationalmaterialid = $1'; response = await t.any(query, [q.id]); q.alignmentobject = response; query = 'SELECT users.firstname, users.lastname ' + - 'FROM educationalmaterial ' + - 'INNER JOIN users ON educationalmaterial.usersusername = users.username ' + - 'WHERE educationalmaterial.id = $1'; + 'FROM educationalmaterial ' + + 'INNER JOIN users ON educationalmaterial.usersusername = users.username ' + + 'WHERE educationalmaterial.id = $1'; response = await t.any(query, [q.id]); q.owner = response; @@ -413,9 +582,9 @@ export const updateEsDocument = (updateCounters?: boolean): Promise => { q.thumbnail = response; query = 'SELECT licensecode AS key, license AS value ' + - 'FROM educationalmaterial AS m ' + - 'LEFT JOIN licensecode AS l ON m.licensecode = l.code ' + - 'WHERE m.id = $1'; + 'FROM educationalmaterial AS m ' + + 'LEFT JOIN licensecode AS l ON m.licensecode = l.code ' + + 'WHERE m.id = $1'; q.license = await t.oneOrNone(query, [q.id]); response = await t.oneOrNone(getPopularityQuery, [q.id]); @@ -433,7 +602,7 @@ export const updateEsDocument = (updateCounters?: boolean): Promise => { .then(async (data: any) => { // #1 then start if (data.length > 0) { const body = data.flatMap(doc => [{ index: { _index: index, _id: doc.id } }, doc]); - const { body: bulkResponse } = await client.bulk({ refresh: true, body }); + const { body: bulkResponse } = await client.bulk({ refresh: false, body }); if (bulkResponse.errors) { winstonLogger.error('Bulk response error: %o', bulkResponse.errors); } else { @@ -450,42 +619,12 @@ export const updateEsDocument = (updateCounters?: boolean): Promise => { } ) // #1 then end .catch((error) => { // #1 catch start - winstonLogger.debug('Search index update faild in updateEsDocument(): ' + error); + winstonLogger.error('Search index update failed in updateEsDocument(): ' + error); reject(error); }); // #1 catch end }); }; -export async function createEsCollectionIndex() { - try { - const collectionIndex = process.env.ES_COLLECTION_INDEX; - const result: boolean = await indexExists(collectionIndex); - // winstonLogger.debug("COLLECTION INDEX RESULT: " + result); - if (result) { - await deleteIndex(collectionIndex); - // winstonLogger.debug("COLLECTION DELETE INDEX RESULT: " + JSON.stringify(deleteResult)); - } - const createIndexResult = await createIndex(collectionIndex); - // winstonLogger.debug("createIndexResult: " + JSON.stringify(createIndexResult)); - if (createIndexResult) { - await addMapping(collectionIndex, process.env.ES_COLLECTION_MAPPING_FILE); - // winstonLogger.debug("mappingResult: " + JSON.stringify(mappingResult)); - let i = 0; - let dataToEs; - do { - dataToEs = await getCollectionDataToEs(i, 1000); - i++; - await collectionDataToEs(collectionIndex, dataToEs.collections); - } while (dataToEs.collections && dataToEs.collections.length > 0); - // set new date CollectionEsUpdated - Es.CollectionEsUpdated.value = new Date(); - } - } catch (err) { - winstonLogger.debug('Error creating collection index'); - winstonLogger.error(err); - } -} - export async function getCollectionEsData(req: Request, res: Response, next: NextFunction) { try { const responseBody: AoeBody = await collectionFromEs(req.body); @@ -493,7 +632,7 @@ export async function getCollectionEsData(req: Request, res: Response, next: Nex } catch (err) { winstonLogger.debug('elasticSearchQuery error'); winstonLogger.error(err); - next(new ErrorHandler(500, 'There was an issue prosessing your request')); + next(new ErrorHandler(500, 'There was an issue processing your request')); } } @@ -502,7 +641,7 @@ export const updateEsCollectionIndex = async (): Promise => { const collectionIndex = process.env.ES_COLLECTION_INDEX; const newDate = new Date(); const dataToEs = await getCollectionDataToUpdate(Es.CollectionEsUpdated.value); - await collectionDataToEs(collectionIndex, dataToEs.collections); + await collectionDataToEs(collectionIndex, dataToEs.collections, 'index'); Es.CollectionEsUpdated.value = newDate; } catch (error) { @@ -514,13 +653,20 @@ export const updateEsCollectionIndex = async (): Promise => { /** * START POINT OF SEARCH ENGINE INDEXING */ -if (process.env.CREATE_ES_INDEX) { - createEsIndex().then(); - createEsCollectionIndex().then(); +async function initializeIndices(): Promise { + const recreateIndex = (process.env.CREATE_ES_INDEX === '1') as boolean + + try { + await updateIndex(process.env.ES_INDEX, process.env.ES_MAPPING_FILE, recreateIndex, updateAoeIndexData); + await updateIndex(process.env.ES_COLLECTION_INDEX, process.env.ES_COLLECTION_MAPPING_FILE, recreateIndex, updateCollectionIndexData); + } catch (error) { + winstonLogger.error(`Error ${recreateIndex ? 'creating' : 'updating'} OpenSearch indices: ` , error); + } } +initializeIndices(); + export default { - createEsIndex, getCollectionEsData, updateEsCollectionIndex, updateEsDocument, diff --git a/aoe-web-backend/src/search/esCollection.ts b/aoe-web-backend/src/search/esCollection.ts index 8b815109e..b4be6ef71 100644 --- a/aoe-web-backend/src/search/esCollection.ts +++ b/aoe-web-backend/src/search/esCollection.ts @@ -1,15 +1,38 @@ -import elasticsearch, { ApiResponse, Client, ClientOptions } from '@elastic/elasticsearch'; import { db } from '@resource/postgresClient'; import { aoeCollectionThumbnailDownloadUrl } from '@services/urlService'; import winstonLogger from '@util/winstonLogger'; import { createMatchAllObject } from './esQueries'; import { AoeBody, AoeCollectionResult, MultiMatchSeachBody, SearchResponse } from './esTypes'; +import { AwsSigv4Signer } from "@opensearch-project/opensearch/aws"; +import AWS from "aws-sdk"; +import { Client, ApiResponse } from "@opensearch-project/opensearch"; +import { performBulkOperation } from "@search/es"; -const client: Client = new elasticsearch.Client({ - node: process.env.ES_NODE, - log: 'trace', - keepAlive: true -} as ClientOptions); +const isProd = process.env.NODE_ENV == 'production'; + +const client = new Client( + isProd + ? { + ...AwsSigv4Signer({ + region: process.env.AWS_REGION || 'eu-west-1', + service: 'aoss', + getCredentials: () => + new Promise((resolve, reject) => { + AWS.config.getCredentials((err, credentials) => { + if (err) { + reject(err); + } else { + resolve(credentials); + } + }); + }), + }), + node: process.env.ES_NODE, + } + : { + node: process.env.ES_NODE, + } +); /** * create es collection query @@ -29,10 +52,7 @@ export async function collectionFromEs(obj: any) { const mustList = []; // match all if keywords null mustList.push(createMatchAllObject()); - // if (req.body.filters) { - // const filters = filterMapper(req.body.filters); - // filters.map(filter => mustList.push(filter)); - // } + const body: MultiMatchSeachBody = { 'query': { 'bool': { @@ -46,7 +66,8 @@ export async function collectionFromEs(obj: any) { 'size': size, 'body': body }; - const result: ApiResponse> = await client.search(query); + + const result: ApiResponse> = await client.search>(query); return await aoeCollectionResponseMapper(result); } catch (error) { throw new Error(error); @@ -124,13 +145,19 @@ export const getCollectionDataToEs = async (offset: number, limit: number) => { } }; -export async function collectionDataToEs(index: string, data: any) { +export async function collectionDataToEs(index: string, data: any, operation : 'create' | 'index') { try { if (data.length > 0) { - const body = data.flatMap(doc => [{ index: { _index: index, _id: doc.id } }, doc]); - // winstonLogger.debug("THIS IS BODY:"); - // winstonLogger.debug(JSON.stringify(body)); - const { body: bulkResponse } = await client.bulk({ refresh: true, body }); + const body = data.flatMap(doc => [{ [operation]: { _index: index, _id: doc.id } }, doc]); + winstonLogger.info(`Adding ${data.length} documents to OpenSearch index ${index}`) + + const { statusCode, body: bulkResponse } = await performBulkOperation(client, index, body) + winstonLogger.info(`OpenSearch index ${index} bulk completed with status code: ${statusCode} , took: ${bulkResponse.took}, errors: ${bulkResponse.errors}`); + + if (winstonLogger.isDebugEnabled()) { + winstonLogger.debug(`OpenSearch index ${index} bulk completed with response body ${JSON.stringify(bulkResponse)}`); + } + if (bulkResponse.errors) { const erroredDocuments = []; // The items array has the same order of the dataset we just indexed. @@ -154,6 +181,7 @@ export async function collectionDataToEs(index: string, data: any) { } } } catch (err) { + winstonLogger.error(`Failed to add documents to OpenSearch index ${index} due to ${JSON.stringify(err)}`) throw new Error(err); } } diff --git a/aoe-web-backend/src/search/esQueries.ts b/aoe-web-backend/src/search/esQueries.ts index 5f39b0fe2..26ded671e 100644 --- a/aoe-web-backend/src/search/esQueries.ts +++ b/aoe-web-backend/src/search/esQueries.ts @@ -1,6 +1,5 @@ // import { ErrorHandler } from '@/helpers/errorHandler'; -import { ApiResponse, Client } from '@elastic/elasticsearch'; import winstonLogger from '@util/winstonLogger'; import { NextFunction, Request, Response } from 'express'; import { @@ -14,9 +13,36 @@ import { SearchResponse, Source } from './esTypes'; +import { AwsSigv4Signer } from "@opensearch-project/opensearch/aws"; +import AWS from "aws-sdk"; +import { ApiResponse, Client } from "@opensearch-project/opensearch"; const index: string = process.env.ES_INDEX; -const client: Client = new Client({ node: process.env.ES_NODE }); +const isProd = process.env.NODE_ENV == 'production'; + +const client = new Client( + isProd + ? { + ...AwsSigv4Signer({ + region: process.env.AWS_REGION || 'eu-west-1', + service: 'aoss', + getCredentials: () => + new Promise((resolve, reject) => { + AWS.config.getCredentials((err, credentials) => { + if (err) { + reject(err); + } else { + resolve(credentials); + } + }); + }), + }), + node: process.env.ES_NODE, + } + : { + node: process.env.ES_NODE, + } +); export async function aoeResponseMapper(response: ApiResponse>) { try { @@ -209,7 +235,7 @@ export const elasticSearchQuery = async (req: Request, res: Response, next: Next 'size': size, 'body': body }; - const result: ApiResponse> = await client.search(query); + const result: ApiResponse> = await client.search>(query); const responseBody: AoeBody = await aoeResponseMapper(result); res.status(200).json(responseBody); } catch (error) { diff --git a/aoe-web-backend/src/search/esTypes.ts b/aoe-web-backend/src/search/esTypes.ts index 601972b8b..bbb05634b 100644 --- a/aoe-web-backend/src/search/esTypes.ts +++ b/aoe-web-backend/src/search/esTypes.ts @@ -88,21 +88,6 @@ export interface SearchResponse { aggregations?: any; } -// export interface CollectionSource { -// id: number; -// createdat: Date; -// publishedat: Date; -// updatedat: Date; -// name: string; -// description: string; -// keywords: Array<{ -// value: string; -// key: string; -// }>; -// languages: Array; -// alignmentObjects: -// } - export interface Source { id: number; createdat: Date; @@ -229,12 +214,6 @@ export interface Source { popularity: number; } -// interface AoeBody { -// hits: number; -// results?: Array< -// { -// _source: T; - export interface AoeRequestFilter { educationalLevels: Array; learningResourceTypes: Array; diff --git a/aoe-web-backend/src/server.ts b/aoe-web-backend/src/server.ts index 396ff98b2..3ae161c68 100755 --- a/aoe-web-backend/src/server.ts +++ b/aoe-web-backend/src/server.ts @@ -5,49 +5,24 @@ dotenv.config(); import app from './app'; import errorHandler from 'errorhandler'; -import fs from 'fs'; -import https from 'https'; -import { Server, Socket } from 'net'; +import { Socket } from 'net'; import winstonLogger from '@util/winstonLogger'; app.use(errorHandler()); -// Start HTTP or HTTPS server depending on NODE_ENV variable ('localhost' | 'development' | 'production'). -let server: Server; -if (process.env.NODE_ENV === 'localhost') { - const options = { - key: fs.readFileSync('./cert/cert.key') as Buffer, - cert: fs.readFileSync('./cert/cert.crt') as Buffer, - }; - server = https - .createServer(options, app) - .listen(parseInt(process.env.PORT_LISTEN as string, 10) || 3000, '127.0.0.1', () => { - winstonLogger.info('App is running at https://localhost:%d in %s mode', app.get('port'), app.get('env')); - }); -} else { - server = app.listen(parseInt(process.env.PORT_LISTEN as string, 10) || 3000, '0.0.0.0', () => { - winstonLogger.info('App is running at http://localhost:%d in %s mode', app.get('port'), app.get('env')); - }); -} +const server = app.listen(parseInt(process.env.PORT_LISTEN as string, 10) || 3000, '0.0.0.0', () => { + winstonLogger.info('App is running at http://0.0.0.0:%d in %s mode', process.env.PORT_LISTEN, app.get('env')); +}); // Socket event handlers for the debugging purposes. server.on('connection', (socket: Socket) => { socket.setTimeout(600 * 60 * 1000); - // winstonLogger.debug('SOCKET OPENED: %s', socket.address()); - // socket.on('end', () => winstonLogger.debug('SOCKET END: other end of the socket sends a FIN packet')); socket.on('timeout', () => { - // winstonLogger.debug("SOCKET TIMEOUT"); - // socket.destroy(); socket.end(); }); socket.on('error', () => { - // winstonLogger.error('SOCKET ERROR: %o', error); - // socket.destroy(); socket.end(); }); - // socket.on('close', (isError: boolean) => { - // winstonLogger.debug('SOCKET CLOSED FOR ERROR: %s', isError)); - // }); }); export default server; diff --git a/aoe-web-backend/src/services/authService.ts b/aoe-web-backend/src/services/authService.ts index f1fc25cb0..1cd0e83db 100644 --- a/aoe-web-backend/src/services/authService.ts +++ b/aoe-web-backend/src/services/authService.ts @@ -204,14 +204,11 @@ export const hasAoeAccess = async (username: string): Promise => { export default { getUserData, - // hasAccesstoPublication, checkAuthenticated, insertUserToDatabase, hasAccessToPublicatication, - // logout, hasAccessToMaterial, hasAccessToAttachmentFile, hasAccessToCollection, - // hasAccessToCollectionParams, userInfo, }; diff --git a/aoe-web-backend/src/services/h5pService.ts b/aoe-web-backend/src/services/h5pService.ts index 6730b3315..28f0aadfd 100644 --- a/aoe-web-backend/src/services/h5pService.ts +++ b/aoe-web-backend/src/services/h5pService.ts @@ -66,7 +66,6 @@ export const downloadAndRenderH5P = async (req: Request, res: Response): Promise const options: { onlyInstallLibraries?: boolean } = { onlyInstallLibraries: false, }; - let page: string | any; try { await directoryDownloadFromStorage(paramsS3, targetPath); const buffer: Buffer = await promises.readFile(targetPath); diff --git a/aoe-web-backend/src/services/mailService.ts b/aoe-web-backend/src/services/mailService.ts index f84cd9bb8..9b276ae68 100644 --- a/aoe-web-backend/src/services/mailService.ts +++ b/aoe-web-backend/src/services/mailService.ts @@ -61,12 +61,10 @@ export async function sendExpirationMail() { if (!(process.env.SEND_EMAIL === '1')) { winstonLogger.debug('Email sending disabled'); } else { - const materials = await getExpiredMaterials(); for (const element of emailArray) { mailOptions.to = element; const info = await transporter.sendMail(mailOptions); winstonLogger.debug('Message sent: %s', info.messageId); - // winstonLogger.debug("Message sent: %s", info.response); } } } catch (err) { @@ -100,7 +98,6 @@ export async function sendRatingNotificationMail() { try { const info = await transporter.sendMail(mailOptions); winstonLogger.debug('Message sent: %s', info.messageId); - // winstonLogger.debug("Message sent: %s", info.response); } catch (error) { winstonLogger.error(error); } @@ -154,7 +151,6 @@ export async function sendVerificationEmail(user: string, email: string) { if (process.env.SEND_EMAIL === '1') { const info = await transporter.sendMail(mailOptions); winstonLogger.debug('Message sent: %s', info.messageId); - // winstonLogger.debug("Message sent: %s", info.response); } return url; } diff --git a/aoe-web-backend/src/services/pidResolutionService.ts b/aoe-web-backend/src/services/pidResolutionService.ts index 4d5719a14..bff6c8294 100644 --- a/aoe-web-backend/src/services/pidResolutionService.ts +++ b/aoe-web-backend/src/services/pidResolutionService.ts @@ -1,35 +1,35 @@ -import axios, { AxiosRequestConfig } from 'axios'; import { updateEduMaterialVersionURN } from '@query/apiQueries'; import { getEdumaterialVersionsWithoutURN } from '@query/pidQueries'; import { getEduMaterialVersionURL } from './urlService'; import winstonLogger from '@util/winstonLogger'; -import { IRegisterPID } from '@aoe/services/pidResolutionService'; -import config from '@/config'; +import { Urn } from '@domain/aoeModels'; /** * Request for PID registration using URN type. * @param url string Resource URL for PID registration. */ -export const registerPID = async (url: string): Promise => { - try { - const pidRegistrationParams: IRegisterPID = { - url: url as string, - type: 'URN', - persist: '0', - }; - const requestHeaders: Record = { - 'Content-Type': 'application/json', - apikey: config.SERVER_CONFIG_OPTIONS.pidApiKey, - }; - const response: Record = await axios.post( - config.SERVER_CONFIG_OPTIONS.pidServiceURL, - pidRegistrationParams as IRegisterPID, - { headers: requestHeaders } as AxiosRequestConfig, - ); - return response.data; - } catch (error) { - winstonLogger.error('PID registration failed in registerPID(): ' + error); +export const registerPID = async (url: string): Promise => { + const record = await Urn.findOne({ + where: { material_url: url }, + }); + + if (record) { + winstonLogger.error(`URL ${url} already has urn generated`); + return null; } + + const newId = await Urn.create({ material_url: url }); + const internalId = newId.id; + const now = new Date(); + const year = now.getFullYear(); + const month = (now.getMonth() + 1).toString().padStart(2, '0'); + + // Previous URN generator generated formattedInternalId with 8 digits. To prevent possible duplicates increase it to 9 + const formattedInternalId = internalId.toString().padStart(9, '0'); + const formattedString = `${year}${month}${formattedInternalId}`; + + const luhnChecksum = calculateLuhn(formattedString); + return `urn:nbn:fi:oerfi-${year}${month}${formattedInternalId}_${luhnChecksum}`; }; /** @@ -82,6 +82,27 @@ export const processEntriesWithoutPID = async (): Promise => { } }; +const calculateLuhn = (number: string): number => { + let sum = 0; + let alternate = false; + + for (let i = number.length - 1; i >= 0; i--) { + let n = parseInt(number[i], 10); + + if (alternate) { + n *= 2; + if (n > 9) { + n -= 9; + } + } + + sum += n; + alternate = !alternate; + } + + return (10 - (sum % 10)) % 10; +}; + export default { processEntriesWithoutPID, registerPID, diff --git a/aoe-web-backend/src/services/streamingService.ts b/aoe-web-backend/src/services/streamingService.ts index ae73cb717..8f7b44bbb 100644 --- a/aoe-web-backend/src/services/streamingService.ts +++ b/aoe-web-backend/src/services/streamingService.ts @@ -27,11 +27,12 @@ export const requestRedirected = async ( * @return {Promise} Streaming service operable: true | false */ export const streamingStatusCheck = (fileStorageId: string): Promise => { - return httpsClient({ + return httpsClient(config.STREAM_STATUS_REQUEST.httpsEnabled, { headers: { 'Cache-Control': 'no-cache', }, host: config.STREAM_STATUS_REQUEST.host as string, + ...(config.STREAM_STATUS_REQUEST.httpsEnabled ? {} : { port: config.STREAM_STATUS_REQUEST.port }), method: 'HEAD', path: (config.STREAM_STATUS_REQUEST.path as string) + fileStorageId, timeout: 1000, diff --git a/aoe-web-backend/src/services/workers/workerActivity.ts b/aoe-web-backend/src/services/workers/workerActivity.ts index 52bf2db0c..d79c82672 100644 --- a/aoe-web-backend/src/services/workers/workerActivity.ts +++ b/aoe-web-backend/src/services/workers/workerActivity.ts @@ -4,10 +4,8 @@ import { kafkaProducer } from '@resource/kafkaClient'; import winstonLogger from '@util/winstonLogger'; import moment from 'moment'; import { parentPort, workerData } from 'worker_threads'; -// import { createHash } from 'crypto'; const message: TypeMaterialActivity = { - // sessionId: createHash('md5').update(workerData.headers['cookie']).digest('hex') as string, timestamp: moment.utc().toISOString() as string, eduMaterialId: null, interaction: workerData.query.interaction, diff --git a/aoe-web-backend/src/services/workers/workerSearch.ts b/aoe-web-backend/src/services/workers/workerSearch.ts index 52af1e0f0..4eb7e9333 100644 --- a/aoe-web-backend/src/services/workers/workerSearch.ts +++ b/aoe-web-backend/src/services/workers/workerSearch.ts @@ -4,10 +4,8 @@ import { kafkaProducer } from '@resource/kafkaClient'; import winstonLogger from '@util/winstonLogger'; import moment from 'moment'; import { parentPort, workerData } from 'worker_threads'; -// import { createHash } from 'crypto'; const message: TypeSearchRequest = { - // sessionId: createHash('md5').update(workerData.headers['cookie']).digest('hex') as string, timestamp: workerData.body.timestamp ? workerData.body.timestamp : (moment.utc().toISOString() as string), keywords: workerData.body.keywords, filters: workerData.body.filters, diff --git a/aoe-web-backend/src/util/metadataModifer.ts b/aoe-web-backend/src/util/metadataModifer.ts deleted file mode 100644 index d3f85ccf1..000000000 --- a/aoe-web-backend/src/util/metadataModifer.ts +++ /dev/null @@ -1,9 +0,0 @@ -import { AlignmentObject } from '@aoe/util/metadataModifier'; - -/** - * AlignmentObject model transformations for the target URLs of educational subjects and objectives. - * @param alignmentObject - */ -export const transformAlignmentObject = async (alignmentObject: AlignmentObject): Promise => { - // TODO: Development in progress -}; diff --git a/aoe-web-backend/src/util/requestErrorHandler.ts b/aoe-web-backend/src/util/requestErrorHandler.ts index a8c43341d..4ddff9d06 100644 --- a/aoe-web-backend/src/util/requestErrorHandler.ts +++ b/aoe-web-backend/src/util/requestErrorHandler.ts @@ -2,7 +2,7 @@ import { NextFunction, Request, Response } from 'express'; import { ValidationError, validationResult } from 'express-validator'; export default async (req: Request, res: Response, next: NextFunction): Promise => { - const errorFormatter = ({ location, msg, param /*value, nestedErrors*/ }: ValidationError) => { + const errorFormatter = ({ location, msg, param }: ValidationError) => { return `${location}[${param}]: ${msg}`; }; const result = validationResult(req).formatWith(errorFormatter); diff --git a/aoe-web-backend/src/util/requestValidator.ts b/aoe-web-backend/src/util/requestValidator.ts index b3f8925d9..084051299 100644 --- a/aoe-web-backend/src/util/requestValidator.ts +++ b/aoe-web-backend/src/util/requestValidator.ts @@ -16,7 +16,6 @@ export const addCollectionValidationRules = (): ValidationChain[] => { body('emId', 'emId expected').exists(), body('emId', 'Array emId expected').isArray(), body('emId.*').isInt(), - // body("emId").custom(arr => arr.every((e) => { return Number.isInteger(parseInt(e)); })), ]; }; diff --git a/aoe-web-backend/types/aoe/index.d.ts b/aoe-web-backend/types/aoe/index.d.ts index 34eac8178..a9598a3fb 100644 --- a/aoe-web-backend/types/aoe/index.d.ts +++ b/aoe-web-backend/types/aoe/index.d.ts @@ -1,4 +1,4 @@ -import { BuildOptions, Model } from 'sequelize'; +import { BuildOptions, CreationOptional, InferAttributes, InferCreationAttributes, Model } from 'sequelize'; /** * Global interface and type declarations for the data persistence with Sequelize. @@ -68,7 +68,6 @@ declare global { // Reference Information materialDisplayNames?: MaterialDisplayName[]; temporaryRecords?: TemporaryRecord[]; - // records: Record[]; } type MaterialType = typeof Model & { @@ -142,4 +141,9 @@ declare global { type TemporaryRecordType = typeof Model & { new (values?: Record, options?: BuildOptions): TemporaryRecord; }; + + interface UrnModel extends Model, InferCreationAttributes> { + id: CreationOptional; + material_url: string; + } } diff --git a/aoe-web-backend/types/aoe/search/es.d.ts b/aoe-web-backend/types/aoe/search/es.d.ts index 30ca9824c..5e71ecfb6 100644 --- a/aoe-web-backend/types/aoe/search/es.d.ts +++ b/aoe-web-backend/types/aoe/search/es.d.ts @@ -1,3 +1,3 @@ export interface ISearchIndexMap { - mappings: unknown; + mappings: Record; } diff --git a/aoe-web-frontend/.gitlab-ci.yml b/aoe-web-frontend/.gitlab-ci.yml deleted file mode 100644 index c3cb4ca14..000000000 --- a/aoe-web-frontend/.gitlab-ci.yml +++ /dev/null @@ -1,71 +0,0 @@ -variables: - DOCKER_HOST: unix:///var/run/docker.sock - DOCKER_DRIVER: overlay2 - -image: - name: docker:24.0.6 - -stages: - - build - - deploy - - cleanup - -build_demo: - stage: build - script: - - echo "DEMO build" - - docker compose -f docker-compose.demo.yml build - only: - - test - tags: - - test-apps - -deploy_demo: - stage: deploy - script: - - echo "DEMO deploy" - - docker compose -f docker-compose.demo.yml up -d - only: - - test - tags: - - test-apps - -cleanup_demo: - stage: cleanup - script: - - echo "DEMO system clean up" - - docker system prune -a -f --volumes - only: - - test - tags: - - test-apps - -build_prod: - stage: build - script: - - echo "PROD build" - - docker compose -f docker-compose.prod.yml build - only: - - main - tags: - - prod - -deploy_prod: - stage: deploy - script: - - echo "PROD deploy" - - docker compose -f docker-compose.prod.yml up -d - only: - - main - tags: - - prod - -cleanup_prod: - stage: cleanup - script: - - echo "PROD system clean up" - - docker system prune -a --volumes -f - only: - - main - tags: - - prod diff --git a/aoe-web-frontend/README.md b/aoe-web-frontend/README.md index 86f682a72..b8cc832aa 100644 --- a/aoe-web-frontend/README.md +++ b/aoe-web-frontend/README.md @@ -1,13 +1,4 @@ -# [AOE - Library of Open Educational Resources](https://github.com/CSCfi/aoe) - -## Service Component Links to GitHub Repositories - -- [aoe-data-analytics](https://github.com/CSCfi/aoe-data-analytics) -- [aoe-data-services](https://github.com/CSCfi/aoe-data-services) -- [aoe-semantic-apis](https://github.com/CSCfi/aoe-semantic-apis) -- [aoe-streaming-app](https://github.com/CSCfi/aoe-streaming-app) -- [aoe-web-backend](https://github.com/CSCfi/aoe-web-backend) -- aoe-web-frontend +# AOE Web frontend Project is based on [CoreUI Angular 2+ Admin template.](https://github.com/coreui/coreui-free-angular-admin-template) @@ -15,15 +6,7 @@ Main tools are Angular, Bootstrap 4 and SASS ## Installation -### Clone repo - ``` bash -# clone the repo -$ git clone https://github.com/CSCfi/aoe-web-frontend.git - -# go into app's directory -$ cd aoe-frontend - # install app's dependencies $ npm install ``` diff --git a/aoe-web-frontend/angular.json b/aoe-web-frontend/angular.json index e112f466c..58211c3aa 100644 --- a/aoe-web-frontend/angular.json +++ b/aoe-web-frontend/angular.json @@ -44,7 +44,7 @@ "namedChunks": true }, "configurations": { - "production": { + "prod": { "budgets": [ { "type": "anyComponentStyle", @@ -85,6 +85,48 @@ "with": "src/environments/environment.demo.ts" } ] + }, + "dev": { + "budgets": [ + { + "type": "anyComponentStyle", + "maximumWarning": "6kb" + } + ], + "optimization": true, + "outputHashing": "all", + "sourceMap": false, + "namedChunks": false, + "extractLicenses": true, + "vendorChunk": false, + "buildOptimizer": true, + "fileReplacements": [ + { + "replace": "src/environments/environment.ts", + "with": "src/environments/environment.dev.ts" + } + ] + }, + "qa": { + "budgets": [ + { + "type": "anyComponentStyle", + "maximumWarning": "6kb" + } + ], + "optimization": true, + "outputHashing": "all", + "sourceMap": false, + "namedChunks": false, + "extractLicenses": true, + "vendorChunk": false, + "buildOptimizer": true, + "fileReplacements": [ + { + "replace": "src/environments/environment.ts", + "with": "src/environments/environment.qa.ts" + } + ] } }, "defaultConfiguration": "" @@ -95,11 +137,17 @@ "browserTarget": "ng:build" }, "configurations": { - "production": { - "browserTarget": "ng:build:production" + "prod": { + "browserTarget": "ng:build:prod" }, "demo": { "browserTarget": "ng:build:demo" + }, + "qa": { + "browserTarget": "ng:build:qa" + }, + "dev": { + "browserTarget": "ng:build:dev" } } }, @@ -150,6 +198,7 @@ }, "cli": { "packageManager": "npm", - "defaultCollection": "@angular-eslint/schematics" + "defaultCollection": "@angular-eslint/schematics", + "analytics": false } } diff --git a/aoe-web-frontend/deploy-scripts/01-build.sh b/aoe-web-frontend/deploy-scripts/01-build.sh new file mode 100755 index 000000000..2892c1719 --- /dev/null +++ b/aoe-web-frontend/deploy-scripts/01-build.sh @@ -0,0 +1,14 @@ +#!/usr/bin/env bash +set -o errexit -o nounset -o pipefail + +build_command="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )/scripts/build-configuration.sh" + +function main { + ${build_command} dev + ${build_command} qa + ${build_command} prod +} + +main + + diff --git a/aoe-web-frontend/deploy-scripts/02-push-image.sh b/aoe-web-frontend/deploy-scripts/02-push-image.sh new file mode 100755 index 000000000..e989dad2c --- /dev/null +++ b/aoe-web-frontend/deploy-scripts/02-push-image.sh @@ -0,0 +1,14 @@ +#!/usr/bin/env bash +set -o errexit -o nounset -o pipefail + +push_command="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )/scripts/push-image.sh" + +function main { + ${push_command} dev + ${push_command} qa + ${push_command} prod +} + +main + + diff --git a/aoe-web-frontend/deploy-scripts/scripts/build-configuration.sh b/aoe-web-frontend/deploy-scripts/scripts/build-configuration.sh new file mode 100755 index 000000000..958243793 --- /dev/null +++ b/aoe-web-frontend/deploy-scripts/scripts/build-configuration.sh @@ -0,0 +1,33 @@ +#!/usr/bin/env bash +set -o errexit -o nounset -o pipefail + +# shellcheck source=../scripts/common-functions.sh +source "$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )/../../../scripts/common-functions.sh" + +# shellcheck source=./deploy-functions.sh +source "$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )/../../../deploy-scripts/deploy-functions.sh" +source "$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )/../../../scripts/build-functions.sh" + +function main { + local aoe_service_name="aoe-web-frontend" + local service_image_tag="AOE_WEB_FRONTEND_TAG" + + configuration=${1:-none} + case $configuration in + "dev"|"qa"|"prod") + echo "Building for configuration ${configuration}" + ;; + *) + echo "Configuration parameter is not correct (got configuration=$configuration)" + exit 1 + ;; + esac + + cd "$repo" + + buildService "$aoe_service_name-${configuration}" "$service_image_tag" +} + +main "$1" + + diff --git a/aoe-web-frontend/deploy-scripts/scripts/push-image.sh b/aoe-web-frontend/deploy-scripts/scripts/push-image.sh new file mode 100755 index 000000000..d6c99e189 --- /dev/null +++ b/aoe-web-frontend/deploy-scripts/scripts/push-image.sh @@ -0,0 +1,40 @@ +#!/usr/bin/env bash +set -o errexit -o nounset -o pipefail + +# shellcheck source=../scripts/common-functions.sh +source "$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )/../../../scripts/common-functions.sh" + +# shellcheck source=./deploy-functions.sh +source "$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )/../../../deploy-scripts/deploy-functions.sh" + +function main() { + setup + + configuration=${1:-none} + case $configuration in + "dev"|"qa"|"prod") + echo "Building for configuration ${configuration}" + ;; + *) + echo "Configuration parameter is not correct (got configuration=$configuration)" + exit 1 + ;; + esac + + local aoe_service_name="aoe-web-frontend" + local github_image_tag="$github_registry${aoe_service_name}-${configuration}:${IMAGE_TAG}" + + local ecr_registry="${REGISTRY}/$aoe_service_name" + local ecr_image_tag="${ecr_registry}:${IMAGE_TAG}" + upload_image_to_ecr "$github_image_tag" "$ecr_image_tag-${configuration}" +} + +function setup() { + cd "${repo}" + require_command docker + require_docker_compose + configure_aws_credentials + get_ecr_login_credentials +} + +main "$1" diff --git a/aoe-web-frontend/docker-compose.demo.yml b/aoe-web-frontend/docker-compose.demo.yml deleted file mode 100644 index efbbe2f29..000000000 --- a/aoe-web-frontend/docker-compose.demo.yml +++ /dev/null @@ -1,19 +0,0 @@ -services: - aoe-web-frontend: - build: - context: . - dockerfile: ./docker/demo.Dockerfile - image: aoe-web-frontend-demo:latest - container_name: aoe-web-frontend-demo - restart: unless-stopped - ports: - - "8282:80" - volumes: - - /webdata:/usr/share/nginx/html/webdata - user: "root:5606" - networks: - - network-frontend-demo - -networks: - network-frontend-demo: - driver: bridge diff --git a/aoe-web-frontend/docker-compose.prod.yml b/aoe-web-frontend/docker-compose.prod.yml deleted file mode 100644 index 16acc2a75..000000000 --- a/aoe-web-frontend/docker-compose.prod.yml +++ /dev/null @@ -1,19 +0,0 @@ -services: - aoe-web-frontend: - build: - context: . - dockerfile: ./docker/prod.Dockerfile - image: aoe-web-frontend-prod:latest - container_name: aoe-web-frontend-prod - restart: unless-stopped - ports: - - "8282:80" - volumes: - - /webdata:/usr/share/nginx/html/webdata - user: "root:5606" - networks: - - network-frontend-prod - -networks: - network-frontend-prod: - driver: bridge diff --git a/aoe-web-frontend/docker/Dockerfile b/aoe-web-frontend/docker/Dockerfile new file mode 100644 index 000000000..845fa37ab --- /dev/null +++ b/aoe-web-frontend/docker/Dockerfile @@ -0,0 +1,16 @@ +ARG FRONTEND_CONFIGURATION +FROM node:14.21.3-bullseye AS front-builder + +WORKDIR /app +ADD ./ /app/ + +ARG FRONTEND_CONFIGURATION + +RUN echo "The frontend configuration is: $FRONTEND_CONFIGURATION" + +RUN npm ci && \ + npm run build --no-cache -- --configuration "$FRONTEND_CONFIGURATION" + +FROM openresty/openresty:1.19.9.1-4-bullseye +COPY --from=front-builder /app/dist/ /usr/share/nginx/html/ +COPY ./docker/server.conf /etc/nginx/conf.d/default.conf diff --git a/aoe-web-frontend/docker/demo.Dockerfile b/aoe-web-frontend/docker/demo.Dockerfile deleted file mode 100644 index c575af11b..000000000 --- a/aoe-web-frontend/docker/demo.Dockerfile +++ /dev/null @@ -1,9 +0,0 @@ -FROM node:14.21.3-bullseye as front-builder -WORKDIR /app -ADD ./ /app/ -RUN npm ci && \ - npm run build-demo --no-cache - -FROM openresty/openresty:1.19.9.1-4-bullseye -COPY --from=front-builder /app/dist/ /usr/share/nginx/html/ -COPY ./docker/server.conf /etc/nginx/conf.d/default.conf diff --git a/aoe-web-frontend/docker/prod.Dockerfile b/aoe-web-frontend/docker/prod.Dockerfile deleted file mode 100644 index 585b83c0a..000000000 --- a/aoe-web-frontend/docker/prod.Dockerfile +++ /dev/null @@ -1,9 +0,0 @@ -FROM node:14.21.3-bullseye as front-builder -WORKDIR /app -ADD ./ /app/ -RUN npm ci && \ - npm run build-prod --no-cache - -FROM openresty/openresty:1.19.9.1-4-bullseye -COPY --from=front-builder /app/dist/ /usr/share/nginx/html/ -COPY ./docker/server.conf /etc/nginx/conf.d/default.conf diff --git a/aoe-web-frontend/docker/server.conf b/aoe-web-frontend/docker/server.conf index 56cec3470..484fa2d4f 100644 --- a/aoe-web-frontend/docker/server.conf +++ b/aoe-web-frontend/docker/server.conf @@ -1,5 +1,5 @@ server { - listen 80; + listen 8080; server_name localhost; server_tokens off; client_max_body_size 1G; @@ -7,4 +7,9 @@ server { root /usr/share/nginx/html; index index.html; } + location /health { + access_log off; + return 200; + add_header Content-Type text/plain; + } } diff --git a/aoe-web-frontend/package.json b/aoe-web-frontend/package.json index b53769c96..c96e6346b 100644 --- a/aoe-web-frontend/package.json +++ b/aoe-web-frontend/package.json @@ -24,8 +24,6 @@ "ng": "ng", "start": "ng serve", "build": "ng build", - "build-prod": "ng build --configuration production", - "build-demo": "ng build --configuration demo", "test": "ng test", "lint": "ng lint", "e2e": "ng e2e", diff --git a/aoe-web-frontend/src/app/providers/credential.interceptor.ts b/aoe-web-frontend/src/app/providers/credential.interceptor.ts index 8b3833d92..cc3406fa3 100644 --- a/aoe-web-frontend/src/app/providers/credential.interceptor.ts +++ b/aoe-web-frontend/src/app/providers/credential.interceptor.ts @@ -13,7 +13,7 @@ export class CredentialInterceptor implements HttpInterceptor { if (environment.production === false) { if ( (req.url.includes('userdata') || this.authService.hasUserData()) && - req.url.includes(environment.backendUrl) + (req.url.includes(environment.backendUrl) || req.url.includes(environment.backendUrlV2)) ) { req = req.clone({ withCredentials: true, diff --git a/aoe-web-frontend/src/environments/environment.dev.ts b/aoe-web-frontend/src/environments/environment.dev.ts new file mode 100644 index 000000000..ae1e548be --- /dev/null +++ b/aoe-web-frontend/src/environments/environment.dev.ts @@ -0,0 +1,28 @@ +/** + * @ignore + */ +export const environment = { + production: true, + loginUrl: 'https://dev.aoe.fi/api', + backendUrl: 'https://dev.aoe.fi/api/v1', + backendUrlV2: 'https://dev.aoe.fi/api/v2', + embedBackendUrl: 'https://dev.aoe.fi/embed', + statisticsBackendUrl: 'https://dev.aoe.fi/api/v2/statistics/prod', + frontendUrl: 'https://dev.aoe.fi', + newERLSKey: 'aoe.new-educational-resource', + koodistoUrl: 'https://dev.aoe.fi/ref/api/v1', + sessionCookie: 'connect.sid', + userdataKey: 'aoe.userdata', + cookieSettingsCookie: 'aoe.cookies', + searchParams: 'aoe.searchParams', + searchResults: 'aoe.searchResults', + usedFilters: 'aoe.usedFilters', + editMaterial: 'aoe.editMaterial', + title: '- Avointen oppimateriaalien kirjasto (dev.aoe.fi)', + collection: 'aoe.collection', + collectionSearchParams: 'aoe.collectionSearchParams', + collectionSearchResults: 'aoe.collectionSearchResults', + sessionMaxAge: 60 * 60 * 8 * 1000, // 8 hours + disableForms: 'aoe.disableForms', + disableLogin: 'aoe.disableLogin', +}; diff --git a/aoe-web-frontend/src/environments/environment.prod.ts b/aoe-web-frontend/src/environments/environment.prod.ts index a1534ce6b..130e22fdf 100644 --- a/aoe-web-frontend/src/environments/environment.prod.ts +++ b/aoe-web-frontend/src/environments/environment.prod.ts @@ -3,14 +3,14 @@ */ export const environment = { production: true, - loginUrl: 'https://aoe.fi/api', - backendUrl: 'https://aoe.fi/api/v1', - backendUrlV2: 'https://aoe.fi/api/v2', - embedBackendUrl: 'https://lessons.aoe.fi/embed', - statisticsBackendUrl: 'https://aoe.fi/api/v2/statistics/prod', // demo - frontendUrl: 'https://aoe.fi', + loginUrl: 'https://aws.aoe.fi/api', + backendUrl: 'https://aws.aoe.fi/api/v1', + backendUrlV2: 'https://aws.aoe.fi/api/v2', + embedBackendUrl: 'https://aws.aoe.fi/embed', + statisticsBackendUrl: 'https://aws.aoe.fi/api/v2/statistics/prod', + frontendUrl: 'https://aws.aoe.fi', newERLSKey: 'aoe.new-educational-resource', - koodistoUrl: 'https://aoe.fi/ref/api/v1', + koodistoUrl: 'https://aws.aoe.fi/ref/api/v1', sessionCookie: 'connect.sid', userdataKey: 'aoe.userdata', cookieSettingsCookie: 'aoe.cookies', @@ -18,7 +18,7 @@ export const environment = { searchResults: 'aoe.searchResults', usedFilters: 'aoe.usedFilters', editMaterial: 'aoe.editMaterial', - title: '- Avointen oppimateriaalien kirjasto (aoe.fi)', + title: '- Avointen oppimateriaalien kirjasto (aws.aoe.fi)', collection: 'aoe.collection', collectionSearchParams: 'aoe.collectionSearchParams', collectionSearchResults: 'aoe.collectionSearchResults', diff --git a/aoe-web-frontend/src/environments/environment.qa.ts b/aoe-web-frontend/src/environments/environment.qa.ts new file mode 100644 index 000000000..a06fd4fd6 --- /dev/null +++ b/aoe-web-frontend/src/environments/environment.qa.ts @@ -0,0 +1,28 @@ +/** + * @ignore + */ +export const environment = { + production: true, + loginUrl: 'https://qa.aoe.fi/api', + backendUrl: 'https://qa.aoe.fi/api/v1', + backendUrlV2: 'https://qa.aoe.fi/api/v2', + embedBackendUrl: 'https://qa.aoe.fi/embed', + statisticsBackendUrl: 'https://qa.aoe.fi/api/v2/statistics/prod', + frontendUrl: 'https://qa.aoe.fi', + newERLSKey: 'aoe.new-educational-resource', + koodistoUrl: 'https://qa.aoe.fi/ref/api/v1', + sessionCookie: 'connect.sid', + userdataKey: 'aoe.userdata', + cookieSettingsCookie: 'aoe.cookies', + searchParams: 'aoe.searchParams', + searchResults: 'aoe.searchResults', + usedFilters: 'aoe.usedFilters', + editMaterial: 'aoe.editMaterial', + title: '- Avointen oppimateriaalien kirjasto (qa.aoe.fi)', + collection: 'aoe.collection', + collectionSearchParams: 'aoe.collectionSearchParams', + collectionSearchResults: 'aoe.collectionSearchResults', + sessionMaxAge: 60 * 60 * 8 * 1000, // 8 hours + disableForms: 'aoe.disableForms', + disableLogin: 'aoe.disableLogin', +}; diff --git a/deploy-scripts/01-build.sh b/deploy-scripts/01-build.sh new file mode 100755 index 000000000..d73fea627 --- /dev/null +++ b/deploy-scripts/01-build.sh @@ -0,0 +1,30 @@ +#!/usr/bin/env bash +set -o errexit -o nounset -o pipefail + +# Figure out where we are currently +CURRENT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +readonly CURRENT_DIR + +# Run cdk buildscript +"${CURRENT_DIR}/../aoe-infra/01-build.sh" + +# Run data analytics buildscript +"${CURRENT_DIR}/../aoe-data-analytics/deploy-scripts/01-build.sh" + +# Run semantic-api buildscript +"${CURRENT_DIR}/../aoe-semantic-apis/deploy-scripts/01-build.sh" + +# Run streaming app buildscript +"${CURRENT_DIR}/../aoe-streaming-app/deploy-scripts/01-build.sh" + +# Run web backend buildscript +"${CURRENT_DIR}/../aoe-web-backend/deploy-scripts/01-build.sh" + +# Run data analytics buildscript +"${CURRENT_DIR}/../aoe-data-analytics/deploy-scripts/01-build.sh" + +# Run web frontend buildscript +"${CURRENT_DIR}/../aoe-web-frontend/deploy-scripts/01-build.sh" + +# Run data services buildscript +"${CURRENT_DIR}/../aoe-data-services/deploy-scripts/01-build.sh" diff --git a/deploy-scripts/02-lint.sh b/deploy-scripts/02-lint.sh new file mode 100755 index 000000000..9a609ea1c --- /dev/null +++ b/deploy-scripts/02-lint.sh @@ -0,0 +1,32 @@ +#!/usr/bin/env bash +set -o errexit -o nounset -o pipefail + +# shellcheck source=../scripts/common-functions.sh +source "$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )/../scripts/common-functions.sh" + +# shellcheck source=./deploy-functions.sh +source "$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )/deploy-functions.sh" + +function main { + echo "$repo" + cd "$repo" + use_correct_node_version + + runLint "aoe-semantic-apis" + runLint "aoe-web-frontend" + runLint "aoe-web-backend" + runLint "aoe-streaming-app" + runLint "aoe-infra" +} + +function runLint { + local repository=$1 + start_gh_actions_group "$repository" + pushd "$repository" + npm_ci_if_package_lock_has_changed + npm run lint + popd + end_gh_actions_group +} + +main "$@" diff --git a/deploy-scripts/03-push-image.sh b/deploy-scripts/03-push-image.sh new file mode 100755 index 000000000..011a70fbb --- /dev/null +++ b/deploy-scripts/03-push-image.sh @@ -0,0 +1,24 @@ +#!/usr/bin/env bash +set -o errexit -o nounset -o pipefail + +# Figure out where we are currently +CURRENT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +readonly CURRENT_DIR + +# Run semantic-api push image script +"${CURRENT_DIR}/../aoe-semantic-apis/deploy-scripts/02-push-image.sh" + +# Run streaming app push image script +"${CURRENT_DIR}/../aoe-streaming-app/deploy-scripts/02-push-image.sh" + +# Run web backend push image script +"${CURRENT_DIR}/../aoe-web-backend/deploy-scripts/02-push-image.sh" + +# Run web frontend push image script +"${CURRENT_DIR}/../aoe-web-frontend/deploy-scripts/02-push-image.sh" + +# Run data analytics push image script +"${CURRENT_DIR}/../aoe-data-analytics/deploy-scripts/02-push-image.sh" + +# Run data analytics push image script +"${CURRENT_DIR}/../aoe-data-services/deploy-scripts/02-push-image.sh" diff --git a/deploy-scripts/04-deploy-dev.sh b/deploy-scripts/04-deploy-dev.sh new file mode 120000 index 000000000..91c113752 --- /dev/null +++ b/deploy-scripts/04-deploy-dev.sh @@ -0,0 +1 @@ +deploy.sh \ No newline at end of file diff --git a/deploy-scripts/05-deploy-qa.sh b/deploy-scripts/05-deploy-qa.sh new file mode 120000 index 000000000..91c113752 --- /dev/null +++ b/deploy-scripts/05-deploy-qa.sh @@ -0,0 +1 @@ +deploy.sh \ No newline at end of file diff --git a/deploy-scripts/06-deploy-prod.sh b/deploy-scripts/06-deploy-prod.sh new file mode 120000 index 000000000..91c113752 --- /dev/null +++ b/deploy-scripts/06-deploy-prod.sh @@ -0,0 +1 @@ +deploy.sh \ No newline at end of file diff --git a/deploy-scripts/deploy-functions.sh b/deploy-scripts/deploy-functions.sh new file mode 100644 index 000000000..0b72ba930 --- /dev/null +++ b/deploy-scripts/deploy-functions.sh @@ -0,0 +1,51 @@ +#!/usr/bin/env bash +set -o errexit -o nounset -o pipefail + +# allow sourcing this file multiple times from different scripts +if [ -n "${DEPLOY_FUNCTIONS_SOURCED:-}" ]; then + return +fi +readonly DEPLOY_FUNCTIONS_SOURCED="true" + +# shellcheck source=./common-functions.sh +source "$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )/../scripts/common-functions.sh" + +readonly github_registry="ghcr.io/opetushallitus/" + +readonly deploy_dist_dir="$repo/deploy-scripts/dist/" +mkdir -p "$deploy_dist_dir" + +function image_exists_locally { + local tag="$1" + docker image inspect "$tag" &> /dev/null +} + +function require_built_image { + local tag="$1" + if image_exists_locally "${tag}"; then + info "${tag} already exists locally" + else + info "Pulling ${tag} because it does not exist locally" + docker pull "${tag}" + fi +} + +function upload_image_to_ecr { + local github_image_tag="$1" + local ecr_image_tag="$2" + + start_gh_actions_group "Uploading image to util account" + + require_built_image "$github_image_tag" + docker tag "${github_image_tag}" "${ecr_image_tag}" + docker push "${ecr_image_tag}" + + end_gh_actions_group +} + + +function get_ecr_login_credentials() { + if [[ "${CI:-}" = "true" ]]; then + aws ecr get-login-password --region "$AWS_REGION" | docker login --username AWS --password-stdin "$REGISTRY" + fi +} diff --git a/deploy-scripts/deploy.sh b/deploy-scripts/deploy.sh new file mode 100755 index 000000000..3cfa757b8 --- /dev/null +++ b/deploy-scripts/deploy.sh @@ -0,0 +1,38 @@ +#!/usr/bin/env bash +set -o errexit -o nounset -o pipefail + +# shellcheck source=../scripts/common-functions.sh +source "$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )/../scripts/common-functions.sh" + +ACTION=$1 + +function main { + start_gh_actions_group "Setup" + parse_env_from_script_name "..-deploy" + use_correct_node_version + end_gh_actions_group + + if [[ "$ACTION" == "diff" ]]; then + start_gh_actions_group "Diff $ENV" + diff + end_gh_actions_group + elif [[ "$ACTION" == "deploy" ]]; then + start_gh_actions_group "Deploy $ENV" + deploy + end_gh_actions_group + fi +} + +function deploy { + pushd "$repo"/aoe-infra + "./cdk.sh" deploy --all + popd +} + +function diff { + pushd "$repo"/aoe-infra + "./cdk.sh" diff + popd +} + +main diff --git a/docker-compose.local-dev.yml b/docker-compose.local-dev.yml new file mode 100644 index 000000000..5612d044d --- /dev/null +++ b/docker-compose.local-dev.yml @@ -0,0 +1,73 @@ +name: aoe-local +services: + + aoe-web-frontend: + ports: + - 8282:80 + + aoe-streaming-app: + environment: + - NODE_ENV=development + - PORT=3001 + ports: + - 3001:3001 + env_file: + - path: ./aoe-streaming-app/.env + + aoe-data-services: + ports: + - 8002:8002 + env_file: + - path: ./aoe-data-services/.env + + aoe-semantic-apis: + environment: + - NODE_ENV=development + - LOG_LEVEL=info + - PORT_LISTEN=3002 + ports: + - 3002:3002 + env_file: + - path: ./aoe-semantic-apis/.env + required: true + command: sh -c "yarn serve" + + aoe-data-analytics: + entrypoint: ["java", "-Xms512m", "-Xmx512m", "-Djava.security.egd=file:/dev/./urandom", "-jar", "service-etl-processor.jar"] + ports: + - 8080:8080 + env_file: + - path: ./aoe-data-analytics/.env + logging: + options: + max-size: '10m' + max-file: '3' + + aoe-web-backend: + environment: + - NODE_ENV=development + + ports: + - 3000:3000 + env_file: + - ./aoe-web-backend/.env + redis: + ports: + - 6379:6379 + + postgres: + ports: + - 5432:5432 + + opensearch: + ports: + - 9200:9200 + - 9600:9600 + + mongo: + ports: + - 27017:27017 + +networks: + default: + name: aoe_local-dev diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 000000000..473156d24 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,407 @@ +name: aoe + +services: + + aoe-web-frontend: + build: + context: aoe-web-frontend + dockerfile: ./docker/Dockerfile + args: + FRONTEND_CONFIGURATION: ${FRONTEND_CONFIGURATION:-demo} + image: ${AOE_WEB_FRONTEND_TAG-aoe-web-frontend:latest} + container_name: aoe-web-frontend + restart: unless-stopped + volumes: + - ./docker/dev/webdata:/usr/share/nginx/html/webdata + user: "root:5606" + depends_on: + aoe-web-backend: + condition: service_started + + # Alias for build system + aoe-web-frontend-dev: + extends: aoe-web-frontend + build: + args: + FRONTEND_CONFIGURATION: dev + profiles: ["build-only"] + + # Alias for build system + aoe-web-frontend-qa: + extends: aoe-web-frontend + build: + args: + FRONTEND_CONFIGURATION: qa + profiles: ["build-only"] + + # Alias for build system + aoe-web-frontend-prod: + extends: aoe-web-frontend + build: + args: + FRONTEND_CONFIGURATION: prod + profiles: ["build-only"] + + nginx: + image: nginx:latest + hostname: nginx + container_name: nginx_proxy + ports: + - 443:443 + - 80:80 + volumes: + - ./docker/nginx/nginx.conf:/etc/nginx/nginx.conf:ro + - ./docker/dev/nginx-selfsigned.crt:/etc/nginx/certs/nginx-selfsigned.crt:ro + - ./docker/dev/nginx-selfsigned.key:/etc/nginx/certs/nginx-selfsigned.key:ro + depends_on: + aoe-web-backend: + condition: service_started + aoe-web-frontend: + condition: service_started + aoe-semantic-apis: + condition: service_started + + aoe-web-backend: + build: + context: aoe-web-backend + dockerfile: ./docker/Dockerfile + image: ${AOE_WEB_BACKEND_TAG-aoe-web-backend:latest} + container_name: aoe-web-backend + healthcheck: + test: [ "CMD", "wget", "--spider", "-q", "http://aoe-web-backend:3000" ] + interval: 10s + timeout: 5s + retries: 5 + restart: unless-stopped + volumes: + - ./docker/dev/web/thumbnail/:/app/thumbnail:rw + - ./docker/dev/web/uploads:/app/uploads:rw + - ./docker/dev/web/webdata:/webdata:rw + - ./docker/dev/web/webdata/htmlfolder:/webdata/htmlfolder:rw + depends_on: + aoe-oidc-server: + condition: service_healthy + postgres: + condition: service_started + kafka: + condition: service_healthy + kafka2: + condition: service_healthy + redis: + condition: service_started + opensearch: + condition: service_healthy + + aoe-data-analytics: + hostname: aoe-data-analytics + build: + context: aoe-data-analytics + dockerfile: ./service-etl-processor/Dockerfile + secrets: + - trust_store_password + image: ${AOE_DATA_ANALYTICS_TAG-aoe-data-analytics:latest} + container_name: aoe-data-analytics + restart: unless-stopped + depends_on: + kafka: + condition: service_healthy + kafka2: + condition: service_healthy + mongo: + condition: service_started + postgres: + condition: service_started + + aoe-streaming-app: + hostname: aoe-streaming-app + build: + context: aoe-streaming-app + dockerfile: ./docker/Dockerfile + image: ${AOE_STREAMING_APP_TAG-aoe-streaming-app:latest} + container_name: aoe-streaming-app + restart: unless-stopped + + depends_on: + localstack: + condition: service_started + + aoe-data-services: + hostname: aoe-data-services + build: + context: aoe-data-services + dockerfile: ./oaipmh-provider/Dockerfile + image: ${AOE_DATA_SERVICES_TAG-aoe-data-services:latest} + container_name: aoe-data-services + restart: unless-stopped + depends_on: + aoe-web-backend: + condition: service_started + + aoe-semantic-apis: + hostname: aoe-semantic-apis + build: + context: aoe-semantic-apis + dockerfile: ./docker/Dockerfile + image: ${AOE_SEMANTIC_APIS_TAG-aoe-semantic-apis:latest} + container_name: aoe-semantic-apis + restart: unless-stopped + depends_on: + redis: + condition: service_healthy + + + postgres: + image: postgres:15-alpine + container_name: aoe-postgres + environment: + POSTGRES_USER: aoeuser + POSTGRES_PASSWORD: aoepassword + POSTGRES_DB: aoe + volumes: + - ./docker/init-scripts/aoe-init.sql:/docker-entrypoint-initdb.d/init.sql + - ./docker/dev/postgres_data:/var/lib/postgresql/data + + opensearch: + image: opensearchproject/opensearch:2.10.0 + container_name: opensearch + environment: + - cluster.name=opensearch-cluster + - node.name=opensearch-node + - discovery.type=single-node + - bootstrap.memory_lock=true + - plugins.security.disabled=true + - "OPENSEARCH_JAVA_OPTS=-Xms512m -Xmx512m" + volumes: + - ./docker/dev/os_data:/usr/share/opensearch/data + ulimits: + memlock: + soft: -1 + hard: -1 + healthcheck: + test: [ "CMD", "curl", "-f", "http://localhost:9200/_cluster/health" ] + interval: 10s + timeout: 5s + retries: 5 + + mongo: + image: mongo:6.0 + container_name: aoe-mongodb + environment: + MONGO_INITDB_ROOT_USERNAME: aoerootuser + MONGO_INITDB_ROOT_PASSWORD: aoerootpassword + MONGO_INITDB_DATABASE: aoe + volumes: + - ./docker/dev/mongo_data:/data/db + - ./docker/init-scripts/init-mongo.js:/docker-entrypoint-initdb.d/init-mongo.js + + zookeeper: + image: confluentinc/cp-zookeeper:7.0.1 + container_name: zookeeper + environment: + ZOOKEEPER_CLIENT_PORT: 2181 + ZOOKEEPER_TICK_TIME: 2000 + healthcheck: + test: [ "CMD", "nc", "-z", "localhost", "2181" ] + interval: 10s + timeout: 5s + retries: 5 + start_period: 20s + ports: + - 2181:2181 + + kafka: + image: confluentinc/cp-kafka:7.0.1 + container_name: kafka + depends_on: + zookeeper: + condition: service_healthy + environment: + KAFKA_BROKER_ID: 1 + KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 + KAFKA_ADVERTISED_LISTENERS: INTERNAL://kafka:9092,OUTSIDE://kafka:29092 + KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: INTERNAL:PLAINTEXT,OUTSIDE:PLAINTEXT + KAFKA_INTER_BROKER_LISTENER_NAME: INTERNAL + KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 2 + ports: + - 29092:29092 + healthcheck: + test: [ "CMD", "nc", "-z", "localhost", "9092" ] + interval: 10s + timeout: 5s + retries: 5 + start_period: 30s + + kafka2: + image: confluentinc/cp-kafka:7.0.1 + container_name: kafka2 + depends_on: + zookeeper: + condition: service_healthy + environment: + KAFKA_BROKER_ID: 2 + KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 + KAFKA_ADVERTISED_LISTENERS: INTERNAL://kafka2:9092,OUTSIDE://kafka2:39092 + KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: INTERNAL:PLAINTEXT,OUTSIDE:PLAINTEXT + KAFKA_INTER_BROKER_LISTENER_NAME: INTERNAL + KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 2 + ports: + - 39092:39092 + healthcheck: + test: [ "CMD", "nc", "-z", "localhost", "9092" ] + interval: 10s + timeout: 5s + retries: 5 + start_period: 30s + + redis: + container_name: redis-reference + image: redis:6.2.4-alpine + privileged: true + command: ["redis-server", "--requirepass", "dev_password", "--user", "devuser", "on", ">devuser_password", "~*", "+@all", "--user", "default", "off", "nopass", "nocommands"] + restart: unless-stopped + environment: + REDIS_REPLICATION_MODE: master + volumes: + - ./docker/dev/redis_data:/data + healthcheck: + test: ["CMD-SHELL", "redis-cli -u redis://devuser:devuser_password@localhost:6379 ping | grep PONG"] + interval: 30s + timeout: 10s + retries: 5 + start_period: 10s + + localstack: + image: localstack/localstack:latest + networks: + default: + aliases: + - aoe.s3.localhost.localstack.cloud + - aoepdf.s3.localhost.localstack.cloud + - aoethumbnail.s3.localhost.localstack.cloud + environment: + - SERVICES=s3 + - AWS_ACCESS_KEY_ID=test + - AWS_SECRET_ACCESS_KEY=test + - DEBUG="1" + - HOSTNAME_EXTERNAL=localhost + ports: + - 4566:4566 + volumes: + - ./docker/init-scripts/aws-shutdown.sh:/etc/localstack/init/shutdown.d/aws-shutdown.sh + - ./docker/init-scripts/init-aws.sh:/etc/localstack/init/ready.d/init-aws.sh + - ./docker/dev/localstack/S3:/host-directory + + aoe-oidc-server: + hostname: aoe-oidc-server + container_name: oidc-server-mock + image: ghcr.io/soluto/oidc-server-mock:latest + restart: on-failure:2 + expose: + - 80 + healthcheck: + test: [ "CMD", "curl", "-f", "http://localhost:80/.well-known/openid-configuration" ] + interval: 30s + timeout: 10s + retries: 3 + start_period: 5s + environment: + ASPNETCORE_ENVIRONMENT: Development + SERVER_OPTIONS_INLINE: | + { + "AccessTokenJwtType": "JWT", + "Discovery": { + "ShowKeySet": true + }, + "Authentication": { + "CookieSameSiteMode": "Lax", + "CheckSessionCookieSameSiteMode": "Lax" + } + } + OVERRIDE_STANDARD_IDENTITY_RESOURCES: true + IDENTITY_RESOURCES_INLINE: | + [ + { + "Name": "profile", + "ClaimTypes": [ "name", "given_name", "family_name", "uid", "id" ] + }, + { + "Name": "openid", + "ClaimTypes": [ "sub" ] + } + ] + USERS_CONFIGURATION_INLINE: | + [ + { + "SubjectId": "aoeuser-1", + "Username": "aoeuser", + "Password": "password123", + "Claims": [ + { + "Type": "sub", + "Value": "aoeuser-1", + "ValueType": "string" + }, + { + "Type": "locale", + "Value": "de", + "ValueType": "string" + }, + { + "Type": "uid", + "Value": "c37ccf17-c8b8-4d5f-b2be-a751f8a4f46e", + "ValueType": "string" + }, + { + "Type": "id", + "Value": "1", + "ValueType": "string" + }, + { + "Type": "name", + "Value": "AOE User", + "ValueType": "string" + }, + { + "Type": "given_name", + "Value": "AOE_first", + "ValueType": "string" + }, + { 'Type': 'some-custom-identity-user-claim', 'Value': "Jack's Custom User Claim", 'ValueType': 'string' }, + + { + "Type": "family_name", + "Value": "AOE_last", + "ValueType": "string" + }, + { + "Type": "email", + "Value": "aoeuser@aoe.fi", + "ValueType": "string" + } + ] + } + ] + CLIENTS_CONFIGURATION_INLINE: | + [ + { + "ClientId": "aoe-client", + "ClientSecrets": ["aoe-secret"], + "RedirectUris": ["https://demo.aoe.fi/api/secure/redirect"], + "AllowedScopes": ["openid", "offline_access", "profile"], + "AllowOfflineAccess" : "true", + "AllowedGrantTypes": ["authorization_code"] + } + ] + ASPNETCORE_URLS: http://+:80 + ASPNET_SERVICES_OPTIONS_INLINE: | + { + "ForwardedHeadersOptions": { + "ForwardedHeaders" : "All" + } + } +volumes: + postgres_data: + +secrets: + trust_store_password: + environment: TRUST_STORE_PASSWORD diff --git a/docker/init-scripts/aoe-init.sql b/docker/init-scripts/aoe-init.sql new file mode 100644 index 000000000..798ca1604 --- /dev/null +++ b/docker/init-scripts/aoe-init.sql @@ -0,0 +1,699 @@ +CREATE TYPE lang AS ENUM ('fi', 'en', 'sv'); + + +CREATE TABLE Users +( + Id BIGSERIAL NOT NULL, + FirstName text NOT NULL, + LastName text NOT NULL, + UserName text NOT NULL, + PreferredLanguage lang DEFAULT 'fi' NOT NULL, + PreferredTargetName text NOT NULL, + PreferredAlignmentType text NOT NULL, + TermsOfUsage bool DEFAULT '0' NOT NULL, + email text, + verifiedemail BOOLEAN DEFAULT false, + newratings BOOLEAN DEFAULT false, + almostexpired BOOLEAN DEFAULT false, + termsupdated BOOLEAN DEFAULT false, + allowtransfer BOOLEAN DEFAULT false, + PRIMARY KEY (UserName) +); + +CREATE TABLE EducationalMaterial +( + Id BIGSERIAL NOT NULL, + CreatedAt timestamp with time zone DEFAULT CURRENT_TIMESTAMP NOT NULL, + PublishedAt timestamp with time zone, + UpdatedAt timestamp with time zone DEFAULT CURRENT_TIMESTAMP NOT NULL, + ArchivedAt timestamp with time zone, + TimeRequired text DEFAULT '' NOT NULL, + AgeRangeMin INTEGER, + AgeRangeMax INTEGER, + LicenseCode text DEFAULT '' NOT NULL, + Obsoleted INTEGER DEFAULT 0 NOT NULL, + OriginalPublishedAt timestamp with time zone DEFAULT CURRENT_TIMESTAMP NOT NULL, + UsersUserName text NOT NULL, + Expires timestamp with time zone, + SuitsAllEarlyChildhoodSubjects bool DEFAULT 'false' NOT NULL, + SuitsAllPrePrimarySubjects bool DEFAULT 'false' NOT NULL, + SuitsAllBasicStudySubjects bool DEFAULT 'false' NOT NULL, + SuitsAllUpperSecondarySubjects bool DEFAULT 'false' NOT NULL, + SuitsAllVocationalDegrees bool DEFAULT 'false' NOT NULL, + SuitsAllSelfMotivatedSubjects bool DEFAULT 'false' NOT NULL, + SuitsAllBranches bool DEFAULT 'false' NOT NULL, + SuitsAllUpperSecondarySubjectsNew bool DEFAULT 'false' NOT NULL, + RatingContentAverage NUMERIC(2, 1), + RatingVisualAverage NUMERIC(2, 1), + viewcounter BIGINT DEFAULT 0, + downloadcounter BIGINT DEFAULT 0, + counterupdatedat timestamp with time zone, + PRIMARY KEY (Id) +); + + +CREATE TABLE Material +( + Id BIGSERIAL NOT NULL, + Link text NOT NULL, + EducationalMaterialId BIGINT NOT NULL, + Obsoleted INTEGER DEFAULT 0 NOT NULL, + Priority INTEGER DEFAULT 0 NOT NULL, + MaterialLanguageKey TEXT DEFAULT 'fi'::lang NOT NULL, + PRIMARY KEY (Id) +); + + +CREATE TABLE EducationalAudience +( + Id BIGSERIAL NOT NULL, + EducationalRole text NOT NULL, + EducationalMaterialId BIGINT NOT NULL, + EducationalRoleKey text NOT NULL, + PRIMARY KEY (Id) +); + +CREATE TABLE InLanguage +( + Id BIGSERIAL NOT NULL, + InLanguage text NOT NULL, + Url text NOT NULL, + EducationalMaterialId BIGINT NOT NULL, + PRIMARY KEY (Id) +); +CREATE TABLE AlignmentObject +( + Id BIGSERIAL NOT NULL, + EducationalMaterialId BIGINT NOT NULL, + AlignmentType text NOT NULL, + TargetName text NOT NULL, + Source text NOT NULL, + EducationalFramework text DEFAULT '' NOT NULL, + ObjectKey text NOT NULL, + TargetUrl text, + PRIMARY KEY (Id) +); + +CREATE TABLE LearningResourceType +( + Id BIGSERIAL NOT NULL, + Value text NOT NULL, + EducationalMaterialId BIGINT NOT NULL, + LearningResourceTypeKey text, + PRIMARY KEY (Id) +); + +CREATE TABLE AccessibilityHazard +( + Id BIGSERIAL NOT NULL, + Value text NOT NULL, + EducationalMaterialId BIGINT NOT NULL, + AccessibilityHazardKey text NOT NULL, + PRIMARY KEY (Id) +); + +CREATE TABLE AccessibilityFeature +( + Id BIGSERIAL NOT NULL, + Value text NOT NULL, + EducationalMaterialId BIGINT NOT NULL, + AccessibilityFeatureKey text NOT NULL, + PRIMARY KEY (Id) +); + +CREATE TABLE EducationalLevel +( + Id BIGSERIAL NOT NULL, + Value text NOT NULL, + EducationalMaterialId BIGINT NOT NULL, + EducationalLevelKey text NOT NULL, + PRIMARY KEY (Id) +); + +CREATE TABLE KeyWord +( + Id BIGSERIAL NOT NULL, + Value text NOT NULL, + EducationalMaterialId BIGINT NOT NULL, + KeywordKey text NOT NULL, + PRIMARY KEY (Id) +); + +CREATE TABLE Record +( + Id BIGSERIAL NOT NULL, + FilePath text, + OriginalFileName text NOT NULL, + FileSize BIGINT NOT NULL, + MimeType text NOT NULL, + Format text, -- NOT IN SEQUALIZE MODEL + MaterialId BIGINT NOT NULL, + FileKey text, + FileBucket text, + pdfkey text, + PRIMARY KEY (Id) +); + +CREATE TABLE EducationalUse +( + Id BIGSERIAL NOT NULL, + Value text NOT NULL, + EducationalMaterialId BIGINT NOT NULL, + EducationalUseKey text NOT NULL, + PRIMARY KEY (Id) +); + +CREATE TABLE IsBasedOn +( + Id BIGSERIAL NOT NULL, + Url text NOT NULL, + MaterialName text NOT NULL, + EducationalMaterialId BIGINT NOT NULL, + PRIMARY KEY (Id) +); + +CREATE TABLE Publisher +( + Id BIGSERIAL NOT NULL, + Name text NOT NULL, + EducationalMaterialId BIGINT NOT NULL, + PublisherKey text NOT NULL, + PRIMARY KEY (Id) +); + +CREATE TABLE MaterialDescription +( + Id BIGSERIAL NOT NULL, + Description text NOT NULL, + Language lang NOT NULL, + EducationalMaterialId BIGINT NOT NULL, + PRIMARY KEY (Id) +); +CREATE TABLE MaterialName +( + Id BIGSERIAL NOT NULL, + MaterialName text DEFAULT '' NOT NULL, + Language lang NOT NULL, + Slug text DEFAULT '' NOT NULL, + EducationalMaterialId BIGINT NOT NULL, + PRIMARY KEY (Id) +); + +CREATE TABLE Author +( + Id BIGSERIAL NOT NULL, + authorname text NOT NULL, + organization text NOT NULL, + EducationalMaterialId BIGINT NOT NULL, + organizationkey text NOT NULL, + PRIMARY KEY (Id) +); + +CREATE TABLE MaterialDisplayName +( + Id BIGSERIAL NOT NULL, + DisplayName text NOT NULL, + Language lang NOT NULL, + MaterialId BIGINT NOT NULL, + PRIMARY KEY (Id) +); + +CREATE TABLE temporaryrecord +( + Id BIGSERIAL NOT NULL, + FilePath text NOT NULL, + OriginalFileName text NOT NULL, + Filesize INTEGER, + Mimetype text NOT NULL, + Format text NOT NULL, + FileName text NOT NULL, + MaterialId BIGINT NOT NULL, + CreatedAt timestamp with time zone DEFAULT CURRENT_TIMESTAMP NOT NULL, + PRIMARY KEY (Id) +); + +CREATE TABLE Thumbnail +( + Id BIGSERIAL NOT NULL, + FilePath text NOT NULL, + MimeType text NOT NULL, + EducationalMaterialId BIGINT NOT NULL, + FileName text NOT NULL, + Obsoleted INTEGER DEFAULT 0 NOT NULL, + FileKey text NOT NULL, + FileBucket text NOT NULL, + PRIMARY KEY (Id) +); + +CREATE TABLE Attachment +( + Id BIGSERIAL NOT NULL, + FilePath text, + OriginalFileName text NOT NULL, + FileSize INTEGER NOT NULL, + MimeType text NOT NULL, + Format text NOT NULL, + FileKey text, + FileBucket text, + DefaultFile bool NOT NULL, + Kind text NOT NULL, + Label text NOT NULL, + Srclang text NOT NULL, + MaterialId BIGINT NOT NULL, + Obsoleted INTEGER DEFAULT 0 NOT NULL, + PRIMARY KEY (Id) +); + +CREATE TABLE TemporaryAttachment +( + Id BIGSERIAL NOT NULL, + FilePath text NOT NULL, + OriginalFileName text NOT NULL, + Filesize INTEGER NOT NULL, + Mimetype text NOT NULL, + Format text NOT NULL, + FileName text NOT NULL, + CreatedAt timestamp with time zone DEFAULT CURRENT_TIMESTAMP NOT NULL, + DefaultFile bool NOT NULL, + Kind text NOT NULL, + Label text NOT NULL, + Srclang text NOT NULL, + AttachmentId BIGINT NOT NULL, + PRIMARY KEY (Id) +); + + +CREATE TABLE IsBasedOnAuthor +( + Id BIGSERIAL NOT NULL, + AuthorName text NOT NULL, + IsBasedOnId BIGINT NOT NULL, + PRIMARY KEY (Id) +); +ALTER TABLE IsBasedOnAuthor + ADD CONSTRAINT FKIsBasedOnAuthor FOREIGN KEY (IsBasedOnId) REFERENCES IsBasedOn (Id); + + +CREATE TABLE Rating +( + Id BIGSERIAL NOT NULL, + RatingContent INTEGER, + RatingVisual INTEGER, + FeedbackPositive varchar(1000), + FeedbackSuggest varchar(1000), + FeedbackPurpose varchar(1000), + EducationalMaterialId BIGINT NOT NULL, + UsersUserName text NOT NULL, + UpdatedAt timestamp DEFAULT CURRENT_TIMESTAMP NOT NULL, + PRIMARY KEY (Id) +); + + +CREATE TABLE VersionComposition +( + EducationalMaterialId int8 NOT NULL, + MaterialId int8 NOT NULL, + PublishedAt timestamp NOT NULL, + Priority int4, + PRIMARY KEY (EducationalMaterialId, + MaterialId, + PublishedAt) +); +ALTER TABLE VersionComposition + ADD CONSTRAINT FKMaterialVersion FOREIGN KEY (MaterialId) REFERENCES Material (Id); +ALTER TABLE VersionComposition + ADD CONSTRAINT FKEducationalMaterialVersion FOREIGN KEY (EducationalMaterialId) REFERENCES EducationalMaterial (Id); + +CREATE TABLE AttachmentVersionComposition +( + VersionEducationalMaterialId int8 NOT NULL, + VersionMaterialId int8 NOT NULL, + VersionPublishedAt timestamp NOT NULL, + AttachmentId int8 NOT NULL, + PRIMARY KEY (VersionEducationalMaterialId, + VersionMaterialId, + VersionPublishedAt, + AttachmentId) +); +ALTER TABLE AttachmentVersionComposition + ADD CONSTRAINT FKVersionCompositionAttachment FOREIGN KEY (VersionEducationalMaterialId, VersionMaterialId, VersionPublishedAt) REFERENCES VersionComposition (EducationalMaterialId, MaterialId, PublishedAt); +ALTER TABLE AttachmentVersionComposition + ADD CONSTRAINT FKAttachmentVersion FOREIGN KEY (AttachmentId) REFERENCES Attachment (Id); + +CREATE TABLE LicenseCode +( + Code text NOT NULL, + License text NOT NULL, + PRIMARY KEY (Code) +); + +INSERT INTO LicenseCode (Code, License) +VALUES ('CCBY4.0', 'CC BY 4.0'); +INSERT INTO LicenseCode (Code, License) +VALUES ('CCBYNC4.0', 'CC BY-NC 4.0'); +INSERT INTO LicenseCode (Code, License) +VALUES ('CCBYNCND4.0', 'CC BY-NC-ND 4.0'); +INSERT INTO LicenseCode (Code, License) +VALUES ('CCBYNCSA4.0', 'CC BY-NC-SA 4.0'); +INSERT INTO LicenseCode (Code, License) +VALUES ('CCBYND4.0', 'CC BY-ND 4.0'); +INSERT INTO LicenseCode (Code, License) +VALUES ('CCBYSA4.0', 'CC BY-SA 4.0'); + +ALTER TABLE TemporaryAttachment + ADD CONSTRAINT FKTempAttachment FOREIGN KEY (AttachmentId) REFERENCES Attachment (Id); + +CREATE TABLE CollectionEducationalMaterial +( + CollectionId int8 NOT NULL, + EducationalMaterialId int8 NOT NULL, + Priority int4 DEFAULT 999 NOT NULL, + PRIMARY KEY (CollectionId, + EducationalMaterialId) +); +CREATE TABLE Collection +( + Id BIGSERIAL NOT NULL, + CreatedAt timestamp with time zone NOT NULL, + UpdatedAt timestamp with time zone, + PublishedAt timestamp with time zone, + CreatedBy text NOT NULL, + AgeRangeMin int4, + AgeRangeMax int4, + CollectionName varchar(255) NOT NULL, + Description varchar(2000), + PRIMARY KEY (Id) +); +CREATE TABLE UsersCollection +( + CollectionId int8 NOT NULL, + UsersUserName text NOT NULL, + PRIMARY KEY (CollectionId, + UsersUserName) +); +ALTER TABLE CollectionEducationalMaterial + ADD CONSTRAINT FKMaterialCollection FOREIGN KEY (EducationalMaterialId) REFERENCES EducationalMaterial (Id) ON DELETE Restrict; +ALTER TABLE UsersCollection + ADD CONSTRAINT FKUsersCollection FOREIGN KEY (UsersUserName) REFERENCES Users (UserName); +ALTER TABLE CollectionEducationalMaterial + ADD CONSTRAINT FKCollectionMaterial FOREIGN KEY (CollectionId) REFERENCES Collection (Id) ON DELETE Cascade; +ALTER TABLE UsersCollection + ADD CONSTRAINT FKCollectionUsers FOREIGN KEY (CollectionId) REFERENCES Collection (Id) ON DELETE Cascade; + +CREATE TABLE CollectionKeyWord +( + Id BIGSERIAL NOT NULL, + Value text NOT NULL, + CollectionId int8 NOT NULL, + KeywordKey text NOT NULL, + PRIMARY KEY (Id) +); +CREATE TABLE CollectionAlignmentObject +( + Id BIGSERIAL NOT NULL, + AlignmentType text NOT NULL, + CollectionId int8 NOT NULL, + TargetName text NOT NULL, + Source text NOT NULL, + EducationalFramework text, + ObjectKey text NOT NULL, + TargetUrl text, + PRIMARY KEY (Id) +); +CREATE TABLE CollectionEducationalUse +( + Id BIGSERIAL NOT NULL, + EducationalUseKey text NOT NULL, + CollectionId int8 NOT NULL, + Value text NOT NULL, + PRIMARY KEY (Id) +); +CREATE TABLE CollectionLanguage +( + Id BIGSERIAL NOT NULL, + Language text NOT NULL, + CollectionId int8 NOT NULL, + PRIMARY KEY (Id) +); +CREATE TABLE CollectionEducationalAudience +( + Id BIGSERIAL NOT NULL, + EducationalRole text NOT NULL, + CollectionId int8 NOT NULL, + EducationalRoleKey text NOT NULL, + PRIMARY KEY (Id) +); +CREATE TABLE CollectionAccessibilityHazard +( + Id BIGSERIAL NOT NULL, + Value text NOT NULL, + AccessibilityHazardKey text NOT NULL, + CollectionId int8 NOT NULL, + PRIMARY KEY (Id) +); +CREATE TABLE CollectionAccessibilityFeature +( + Id SERIAL NOT NULL, + Value text NOT NULL, + AccessibilityFeatureKey text NOT NULL, + CollectionId int8 NOT NULL, + PRIMARY KEY (Id) +); + +ALTER TABLE CollectionAccessibilityHazard + ADD CONSTRAINT FKCollectionAccessibilityHazard FOREIGN KEY (CollectionId) REFERENCES Collection (Id); +ALTER TABLE CollectionAccessibilityFeature + ADD CONSTRAINT FKCollectionAccessibilityFeature FOREIGN KEY (CollectionId) REFERENCES Collection (Id); +ALTER TABLE CollectionEducationalAudience + ADD CONSTRAINT FKCollectionEducationalAudience FOREIGN KEY (CollectionId) REFERENCES Collection (Id) ON DELETE Cascade; +ALTER TABLE CollectionEducationalUse + ADD CONSTRAINT FKCollectionEducationalUse FOREIGN KEY (CollectionId) REFERENCES Collection (Id) ON DELETE Cascade; +ALTER TABLE CollectionAlignmentObject + ADD CONSTRAINT FKCollectionAligmentObject FOREIGN KEY (CollectionId) REFERENCES Collection (Id) ON DELETE Cascade; +ALTER TABLE CollectionLanguage + ADD CONSTRAINT FKCollectionLanguage FOREIGN KEY (CollectionId) REFERENCES Collection (Id) ON DELETE Cascade; +ALTER TABLE CollectionKeyWord + ADD CONSTRAINT FKCollectionKeyWords FOREIGN KEY (CollectionId) REFERENCES Collection (Id) ON DELETE Cascade; + +CREATE TABLE CollectionHeading +( + Id BIGSERIAL NOT NULL, + Heading varchar(255) NOT NULL, + Description varchar(2000), + Priority int4 DEFAULT 999 NOT NULL, + CollectionId int8 NOT NULL, + PRIMARY KEY (Id) +); +ALTER TABLE CollectionHeading + ADD CONSTRAINT FKCollectionHeading FOREIGN KEY (CollectionId) REFERENCES Collection (Id); + +CREATE TABLE CollectionEducationalLevel +( + Id BIGSERIAL NOT NULL, + EducationalLevelKey text NOT NULL, + CollectionId int8 NOT NULL, + Value text NOT NULL, + PRIMARY KEY (Id) +); +ALTER TABLE CollectionEducationalLevel + ADD CONSTRAINT FKCollectionEducationalLevel FOREIGN KEY (CollectionId) REFERENCES Collection (Id) ON DELETE Cascade; + +CREATE TABLE collectionthumbnail +( + id BIGSERIAL NOT NULL, + filepath text NOT NULL, + mimetype text NOT NULL, + filename text NOT NULL, + obsoleted int4 DEFAULT 0 NOT NULL, + filekey text NOT NULL, + filebucket text NOT NULL, + collectionid int8 NOT NULL, + PRIMARY KEY (id) +); +ALTER TABLE collectionthumbnail + ADD CONSTRAINT FKCollectionThumbnail FOREIGN KEY (collectionid) REFERENCES Collection (Id); + +CREATE TABLE AccessibilityFeatureExtension +( + Id BIGSERIAL NOT NULL, + Value text NOT NULL, + AccessibilityFeatureKey text NOT NULL, + EducationalMaterialId int8 NOT NULL, + UsersUserName text NOT NULL, + PRIMARY KEY (Id) +); +CREATE TABLE AccessibilityHazardExtension +( + Id BIGSERIAL NOT NULL, + Value text NOT NULL, + AccessibilityHazardKey text NOT NULL, + EducationalMaterialId int8 NOT NULL, + UsersUserName text NOT NULL, + PRIMARY KEY (Id) +); +CREATE TABLE EducationalLevelExtension +( + Id BIGSERIAL NOT NULL, + Value text NOT NULL, + EducationalLevelKey text NOT NULL, + EducationalMaterialId int8 NOT NULL, + UsersUserName text NOT NULL, + PRIMARY KEY (Id) +); +CREATE TABLE KeyWordExtension +( + Id BIGSERIAL NOT NULL, + Value text NOT NULL, + EducationalMaterialId int8 NOT NULL, + KeywordKey text NOT NULL, + UsersUserName text NOT NULL, + PRIMARY KEY (Id) +); +ALTER TABLE AccessibilityFeatureExtension + ADD CONSTRAINT fkAccessibilityFeatureExtension FOREIGN KEY (EducationalMaterialId) REFERENCES EducationalMaterial (Id); +ALTER TABLE AccessibilityHazardExtension + ADD CONSTRAINT fkAccessibilityHazardExtension FOREIGN KEY (EducationalMaterialId) REFERENCES EducationalMaterial (Id); +ALTER TABLE EducationalLevelExtension + ADD CONSTRAINT fkEducationalLevelExtension FOREIGN KEY (EducationalMaterialId) REFERENCES EducationalMaterial (Id); +ALTER TABLE KeyWordExtension + ADD CONSTRAINT fkKeyWordExtension FOREIGN KEY (UsersUserName) REFERENCES Users (UserName); +ALTER TABLE AccessibilityFeatureExtension + ADD CONSTRAINT fkUserAccessibilityFeatureExtension FOREIGN KEY (UsersUserName) REFERENCES Users (UserName); +ALTER TABLE AccessibilityHazardExtension + ADD CONSTRAINT fkUsersAccessibiltyHazardExtension FOREIGN KEY (UsersUserName) REFERENCES Users (UserName); +ALTER TABLE EducationalLevelExtension + ADD CONSTRAINT fkUsersEducationalLevelExtension FOREIGN KEY (UsersUserName) REFERENCES Users (UserName); +ALTER TABLE KeyWordExtension + ADD CONSTRAINT fkUsersKeyWordExtension FOREIGN KEY (EducationalMaterialId) REFERENCES EducationalMaterial (Id); + +ALTER TABLE AccessibilityFeatureExtension + ADD CONSTRAINT constraint_AccessibilityFeatureExtension UNIQUE (accessibilityfeaturekey, educationalmaterialid); +ALTER TABLE AccessibilityHazardExtension + ADD CONSTRAINT constraint_AccessibilityHazardExtension UNIQUE (accessibilityhazardkey, educationalmaterialid); +ALTER TABLE EducationalLevelExtension + ADD CONSTRAINT constraint_EducationalLevelExtension UNIQUE (educationallevelkey, educationalmaterialid); +ALTER TABLE KeyWordExtension + ADD CONSTRAINT constraint_KeyWordExtension UNIQUE (keywordkey, educationalmaterialid); + + +CREATE TABLE aoeuser +( + username varchar(255) NOT NULL, + PRIMARY KEY (username) +); + +CREATE TABLE educationalMaterialVersion +( + educationalMaterialId int8 NOT NULL, + publishedAt timestamp NOT NULL, + urn text, + PRIMARY KEY (educationalMaterialId, + publishedAt) +); + +CREATE TABLE notification +( + nf_id BIGSERIAL PRIMARY KEY, + nf_text VARCHAR(1500) NOT NULL, + nf_type VARCHAR(255) NOT NULL, + nf_created_at TIMESTAMP with time zone, + nf_show_since TIMESTAMP with time zone, + nf_show_until TIMESTAMP with time zone, + nf_disabled BOOLEAN, + nf_username VARCHAR(255) NOT NULL + REFERENCES aoeuser + ON UPDATE CASCADE +); + + + +ALTER TABLE AlignmentObject + ADD CONSTRAINT FKAlignmentObject FOREIGN KEY (EducationalMaterialId) REFERENCES EducationalMaterial (Id) ON DELETE Cascade; +ALTER TABLE EducationalMaterial + ADD CONSTRAINT FKEducationalMaterial FOREIGN KEY (UsersUserName) REFERENCES Users (UserName) ON DELETE Restrict; +ALTER TABLE EducationalAudience + ADD CONSTRAINT FKEducationalAudience FOREIGN KEY (EducationalMaterialId) REFERENCES EducationalMaterial (Id) ON DELETE Cascade; +ALTER TABLE LearningResourceType + ADD CONSTRAINT FKLearningResourceType FOREIGN KEY (EducationalMaterialId) REFERENCES EducationalMaterial (Id) ON DELETE Cascade; +ALTER TABLE KeyWord + ADD CONSTRAINT FKKeyWord FOREIGN KEY (EducationalMaterialId) REFERENCES EducationalMaterial (Id) ON DELETE Cascade; +ALTER TABLE EducationalUse + ADD CONSTRAINT FKEducationalUse FOREIGN KEY (EducationalMaterialId) REFERENCES EducationalMaterial (Id) ON DELETE Cascade; +ALTER TABLE IsBasedOn + ADD CONSTRAINT FKIsBasedOn FOREIGN KEY (EducationalMaterialId) REFERENCES EducationalMaterial (Id) ON DELETE Cascade; +ALTER TABLE InLanguage + ADD CONSTRAINT FKInLanguage FOREIGN KEY (EducationalMaterialId) REFERENCES EducationalMaterial (Id) ON DELETE Cascade; +ALTER TABLE Material + ADD CONSTRAINT FKMaterial FOREIGN KEY (EducationalMaterialId) REFERENCES EducationalMaterial (Id) ON DELETE Restrict; +ALTER TABLE Record + ADD CONSTRAINT FKRecord FOREIGN KEY (MaterialId) REFERENCES Material (Id) ON DELETE Restrict; +ALTER TABLE EducationalLevel + ADD CONSTRAINT fk_EducationalLevel FOREIGN KEY (EducationalMaterialId) REFERENCES EducationalMaterial (Id) ON DELETE Cascade; +ALTER TABLE Publisher + ADD CONSTRAINT fk_Publisher FOREIGN KEY (EducationalMaterialId) REFERENCES EducationalMaterial (Id) ON DELETE Cascade; +ALTER TABLE MaterialDescription + ADD CONSTRAINT fk_Description FOREIGN KEY (EducationalMaterialId) REFERENCES EducationalMaterial (Id) ON DELETE Cascade; +ALTER TABLE MaterialName + ADD CONSTRAINT fk_MaterialName FOREIGN KEY (EducationalMaterialId) REFERENCES EducationalMaterial (Id) ON DELETE Cascade; +ALTER TABLE AccessibilityFeature + ADD CONSTRAINT fk_AccessibilityFeature FOREIGN KEY (EducationalMaterialId) REFERENCES EducationalMaterial (Id) ON DELETE Cascade; +ALTER TABLE AccessibilityHazard + ADD CONSTRAINT fk_AccessibilityHazard FOREIGN KEY (EducationalMaterialId) REFERENCES EducationalMaterial (Id) ON DELETE Cascade; +ALTER TABLE Author + ADD CONSTRAINT fk_author FOREIGN KEY (EducationalMaterialId) REFERENCES EducationalMaterial (Id) ON DELETE Cascade; +ALTER TABLE MaterialDisplayName + ADD CONSTRAINT fk_MaterialDisplayName FOREIGN KEY (MaterialId) REFERENCES Material (Id) ON DELETE Cascade; +ALTER TABLE temporaryrecord + ADD CONSTRAINT fk_temporaryrecord FOREIGN KEY (MaterialId) REFERENCES Material (Id) ON DELETE Restrict; +ALTER TABLE Thumbnail + ADD CONSTRAINT fk_thumbnail FOREIGN KEY (EducationalMaterialId) REFERENCES EducationalMaterial (Id) ON DELETE Restrict; + +ALTER TABLE materialname + ADD CONSTRAINT constraint_lang_id UNIQUE (language, educationalmaterialid); +ALTER TABLE materialdescription + ADD CONSTRAINT constraint_materialdescription_lang_id UNIQUE (language, educationalmaterialid); +ALTER TABLE educationalaudience + ADD CONSTRAINT constraint_educationalaudience UNIQUE (educationalrolekey, educationalmaterialid); +ALTER TABLE educationaluse + ADD CONSTRAINT constraint_educationaluse UNIQUE (educationalusekey, educationalmaterialid); +ALTER TABLE learningresourcetype + ADD CONSTRAINT constraint_learningresourcetype UNIQUE (learningresourcetypekey, educationalmaterialid); +ALTER TABLE inlanguage + ADD CONSTRAINT constraint_inlanguage UNIQUE (inlanguage, educationalmaterialid); +ALTER TABLE keyword + ADD CONSTRAINT constraint_keyword UNIQUE (keywordkey, educationalmaterialid); +ALTER TABLE publisher + ADD CONSTRAINT constraint_publisher UNIQUE (PublisherKey, educationalmaterialid); +ALTER TABLE isbasedon + ADD CONSTRAINT constraint_isbasedon UNIQUE (materialname, educationalmaterialid); +ALTER TABLE alignmentobject + ADD CONSTRAINT constraint_alignmentobject UNIQUE (alignmentType, targetName, source, educationalmaterialid); +ALTER TABLE materialdisplayname + ADD CONSTRAINT constraint_materialdisplayname UNIQUE (language, materialid); +ALTER TABLE accessibilityfeature + ADD CONSTRAINT constraint_accessibilityfeature UNIQUE (accessibilityfeaturekey, educationalmaterialid); +ALTER TABLE accessibilityhazard + ADD CONSTRAINT constraint_accessibilityhazard UNIQUE (accessibilityhazardkey, educationalmaterialid); +ALTER TABLE EducationalLevel + ADD CONSTRAINT constraint_EducationalLevel UNIQUE (EducationalLevelKey, educationalmaterialid); + +ALTER TABLE Rating + ADD CONSTRAINT FKRatingUsers FOREIGN KEY (UsersUserName) REFERENCES Users (UserName); +ALTER TABLE Rating + ADD CONSTRAINT FKRatingEducationalMaterial FOREIGN KEY (EducationalMaterialId) REFERENCES EducationalMaterial (Id); +ALTER TABLE Rating + ADD CONSTRAINT constraint_Rating UNIQUE (UsersUserName, educationalmaterialid); + + + +ALTER TABLE alignmentobject + DROP CONSTRAINT constraint_alignmentobject; +ALTER TABLE alignmentobject + ADD CONSTRAINT constraint_alignmentobject UNIQUE (alignmentType, objectkey, source, educationalmaterialid); + + +ALTER TABLE VersionComposition + ADD CONSTRAINT fkversioncomposition FOREIGN KEY (EducationalMaterialId, PublishedAt) REFERENCES EducationalMaterialVersion (EducationalMaterialId, PublishedAt); + +ALTER TABLE EducationalMaterialVersion + ADD CONSTRAINT FKemversion FOREIGN KEY (EducationalMaterialId) REFERENCES EducationalMaterial (Id); diff --git a/docker/init-scripts/aws-shutdown.sh b/docker/init-scripts/aws-shutdown.sh new file mode 100755 index 000000000..e1be412a5 --- /dev/null +++ b/docker/init-scripts/aws-shutdown.sh @@ -0,0 +1,18 @@ +#!/bin/bash + +copy_s3_bucket_to_directory() { + local bucket_name=$1 + local target_directory=$2 + + echo "Copying files from S3 bucket ${bucket_name} to ${target_directory}..." + + # Use awslocal to copy files from the specified bucket to the target directory + awslocal s3 cp s3://${bucket_name} ${target_directory} --recursive + + echo "Files copied successfully from ${bucket_name} to ${target_directory}." +} + +# Call the function for each bucket-directory pair +copy_s3_bucket_to_directory "aoe" "/host-directory/aoe" +copy_s3_bucket_to_directory "aoepdf" "/host-directory/aoepdf" +copy_s3_bucket_to_directory "aoethumbnail" "/host-directory/aoethumbnail" diff --git a/docker/init-scripts/init-aws.sh b/docker/init-scripts/init-aws.sh new file mode 100755 index 000000000..a8e83d016 --- /dev/null +++ b/docker/init-scripts/init-aws.sh @@ -0,0 +1,34 @@ +#!/bin/sh + +awslocal s3 mb s3://aoe +awslocal s3 mb s3://aoepdf +awslocal s3 mb s3://aoethumbnail + +echo "S3 bucket 'aoe' created." + +UPLOAD_DIR="/host-directory" + +if [ -d "$UPLOAD_DIR" ] && [ "$(ls -A "$UPLOAD_DIR")" ]; then + echo "Uploading files from $UPLOAD_DIR to corresponding S3 buckets..." + + for subfolder in "$UPLOAD_DIR"/*; do + if [ -d "$subfolder" ]; then + bucket_name=$(basename "$subfolder") + echo "Processing folder '$subfolder' for S3 bucket '$bucket_name'..." + + if [ "$(ls -A "$subfolder")" ]; then + for file in "$subfolder"/*; do + if [ -f "$file" ]; then + echo "Uploading $file to S3 bucket '$bucket_name'..." + awslocal s3 cp "$file" "s3://$bucket_name/$(basename "$file")" + echo "$file uploaded to S3 bucket '$bucket_name'." + fi + done + else + echo "No files to upload in folder '$subfolder'." + fi + fi + done +else + echo "No files or folders to upload in $UPLOAD_DIR." +fi \ No newline at end of file diff --git a/docker/init-scripts/init-mongo.js b/docker/init-scripts/init-mongo.js new file mode 100644 index 000000000..6cccfe31a --- /dev/null +++ b/docker/init-scripts/init-mongo.js @@ -0,0 +1,8 @@ +db = db.getSiblingDB('aoe'); + +// Create a new user with readWrite access on the appdb database +db.createUser({ + user: 'aoeuser', + pwd: 'aoepassword', // Replace with your desired password + roles: [{ role: 'readWrite', db: 'aoe' }] +}); \ No newline at end of file diff --git a/docker/nginx/nginx.conf b/docker/nginx/nginx.conf new file mode 100644 index 000000000..371edab7f --- /dev/null +++ b/docker/nginx/nginx.conf @@ -0,0 +1,135 @@ +events { } + +http { + upstream api_service { + server aoe-web-backend:3000; + } + + upstream reference_service { + server aoe-semantic-apis:3002; + } + + upstream frontend { + server aoe-web-frontend:8080; + } + + upstream oidc { + server aoe-oidc-server:80; + } + + upstream streaming_service { + server aoe-streaming-app:3001; + } + + server { + listen 80; + server_name aoe-oidc-server; + + location / { + proxy_pass http://oidc; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + } + } + + server { + listen 443 ssl; + server_name aoe-streaming-app; + ssl_certificate /etc/nginx/certs/nginx-selfsigned.crt; + ssl_certificate_key /etc/nginx/certs/nginx-selfsigned.key; + + ssl_protocols TLSv1.2 TLSv1.3; + ssl_prefer_server_ciphers on; + + location / { + proxy_pass http://streaming_service; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + } + + } + + server { + listen 443 ssl; + server_name lessons.demo.aoe.fi; + ssl_certificate /etc/nginx/certs/nginx-selfsigned.crt; + ssl_certificate_key /etc/nginx/certs/nginx-selfsigned.key; + + ssl_protocols TLSv1.2 TLSv1.3; + ssl_prefer_server_ciphers on; + + location /embed/ { + proxy_pass http://api_service; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + } + + } + + server { + listen 443 ssl; + # SSL certificate and key + ssl_certificate /etc/nginx/certs/nginx-selfsigned.crt; + ssl_certificate_key /etc/nginx/certs/nginx-selfsigned.key; + + ssl_protocols TLSv1.2 TLSv1.3; + ssl_prefer_server_ciphers on; + client_max_body_size 1000M; + location / { + proxy_pass http://streaming_service; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + } + } + + server { + listen 443 ssl; + server_name demo.aoe.fi; + # SSL certificate and key + ssl_certificate /etc/nginx/certs/nginx-selfsigned.crt; + ssl_certificate_key /etc/nginx/certs/nginx-selfsigned.key; + + ssl_protocols TLSv1.2 TLSv1.3; + ssl_prefer_server_ciphers on; + client_max_body_size 1000M; + location / { + proxy_pass http://frontend; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + } + + location /api/ { + proxy_pass http://api_service; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + } + + location /stream/ { + proxy_pass http://streaming_service; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + } + + location /ref/api/v1 { + proxy_pass http://reference_service; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + } + } +} \ No newline at end of file diff --git a/docker/nginx/san.cnf b/docker/nginx/san.cnf new file mode 100644 index 000000000..7f343c1a6 --- /dev/null +++ b/docker/nginx/san.cnf @@ -0,0 +1,22 @@ +[req] +default_bits = 2048 +distinguished_name = req_distinguished_name +req_extensions = req_ext +x509_extensions = v3_ca +prompt = no + +[req_distinguished_name] +countryName = FI +stateOrProvinceName = North Savo +localityName = Kuopio +organizationName = OPH +commonName = demo.aoe.fi + +[req_ext] +subjectAltName = @alt_names + +[v3_ca] +subjectAltName = @alt_names + +[alt_names] +DNS.1 = demo.aoe.fi diff --git a/scripts/build-functions.sh b/scripts/build-functions.sh new file mode 100644 index 000000000..ca0a0b882 --- /dev/null +++ b/scripts/build-functions.sh @@ -0,0 +1,60 @@ +#!/usr/bin/env bash +set -o errexit -o nounset -o pipefail + +# allow sourcing this file multiple times from different scripts +if [ -n "${BUILD_FUNCTIONS_SOURCED:-}" ]; then + return +fi +readonly BUILD_FUNCTIONS_SOURCED="true" + +function buildService { + cd "$repo" + + require_command docker + + local service=$1 + local compose_tag=$2 + local tag_value + + local img_tag="$github_registry${service}:${IMAGE_TAG}" + + local tags_to_push=() + + start_gh_actions_group "Building service $service with tag $img_tag" + + if ! running_on_gh_actions; then + tag_value="${service}:latest" + else + tag_value="$img_tag" + fi + + eval "${compose_tag}='${tag_value}'" + export "${compose_tag}" + docker compose build "$service" + tags_to_push+=("$img_tag") + + end_gh_actions_group + + if [ -n "${GITHUB_REF_NAME:-}" ]; then + # Github refs often have slashes, which are not allowed in tag names + # https://github.com/opencontainers/distribution-spec/blob/main/spec.md#pulling-manifests + readonly clean_ref_name="${GITHUB_REF_NAME//[!a-zA-Z0-9._-]/-}" + readonly ref_tag="$github_registry${service}:$clean_ref_name" + info "Tagging as $ref_tag" + docker tag "$img_tag" "$ref_tag" + tags_to_push+=("$ref_tag") + fi + + if running_on_gh_actions; then + start_gh_actions_group "Pushing tags" + for tag in "${tags_to_push[@]}" + do + info "docker push $tag" + docker push "$tag" + done + end_gh_actions_group + else + info "Not pushing tags when running locally" + fi + +} \ No newline at end of file diff --git a/scripts/common-functions.sh b/scripts/common-functions.sh new file mode 100755 index 000000000..02f4bd610 --- /dev/null +++ b/scripts/common-functions.sh @@ -0,0 +1,234 @@ +#!/usr/bin/env bash +set -o errexit -o nounset -o pipefail + +# allow sourcing this file multiple times from different scripts +if [ -n "${COMMON_FUNCTIONS_SOURCED:-}" ]; then + return +fi +readonly COMMON_FUNCTIONS_SOURCED="true" + +readonly revision="${GITHUB_SHA:-$(git rev-parse HEAD)}" +readonly repo="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && cd .. && pwd )" +NODE_VERSION="$(cat "$repo/.nvmrc")" && readonly NODE_VERSION + +read -r -d '' AWS_CLI_DOCKERFILE < "$aws_cli_dockerfile_temp" + + require_command shasum + local -r checksum_file="$aws_cli_dockerfile_temp.checksum" + + function build_aws_cli { + echo "$AWS_CLI_DOCKERFILE" | docker build --tag "amazon/aws-cli:local" - + shasum "$aws_cli_dockerfile_temp" > "$checksum_file" + } + + if [ ! $(docker images -q amazon/aws-cli:local) ]; then + echo "no amazon/aws-cli:local image found; running docker build" + build_aws_cli + elif [ ! -f "$checksum_file" ]; then + echo "no checksum for aws cli dockerfile; running docker build" + build_aws_cli + elif ! shasum --check "$checksum_file"; then + info "aws cli dockerfile seems to have changed, running docker build" + build_aws_cli + else + info "aws cli dockerfile doesn't seem to have changed, skipping docker build" + fi +} + +function require_dev_aws_session { + info "Verifying that AWS session has not expired" + ## SSO Login does not work in container + aws sts get-caller-identity --profile=aoe-dev 1>/dev/null || { + info "Session is expired" + aws --profile aoe-dev sso login + } +} + +function configure_aws_credentials { + if [[ "${CI:-}" = "true" ]]; then + export AWS_REGION=${AWS_REGION:-"eu-west-1"} + ACCOUNT_ID=$(aws sts get-caller-identity --query Account --output text) || { + fatal "Could not check that AWS credentials are working." + } + + export REGISTRY="$ACCOUNT_ID.dkr.ecr.$AWS_REGION.amazonaws.com" + fi + + export AWS_DEFAULT_REGION="$AWS_REGION" + echo "Constructed registry: $REGISTRY" + +} + +function use_correct_node_version { + export NVM_DIR="${NVM_DIR:-$HOME/.cache/nvm}" + set +o errexit + source "$repo/scripts/nvm.sh" + set -o errexit + nvm use "$NODE_VERSION" || nvm install -b "$NODE_VERSION" +} + +function npm_ci_if_package_lock_has_changed { + info "Checking if npm ci needs to be run" + require_command shasum + local -r checksum_file=".package-lock.json.checksum" + + function run_npm_ci { + npm ci + shasum package-lock.json > "$checksum_file" + } + + if [ ! -f "$checksum_file" ]; then + echo "new package-lock.json; running npm ci" + run_npm_ci + elif ! shasum --check "$checksum_file"; then + info "package-lock.json seems to have changed, running npm ci" + run_npm_ci + else + info "package-lock.json doesn't seem to have changed, skipping npm ci" + fi +} + +function require_command { + if ! command -v "$1" > /dev/null; then + fatal "I require $1 but it's not installed. Aborting." + fi +} + +function require_docker { + require_command docker + docker ps > /dev/null 2>&1 || fatal "Running 'docker ps' failed. Is docker daemon running? Aborting." +} + +function require_docker_compose { + docker compose > /dev/null || fatal "docker compose missing" +} + +function parse_env_from_script_name { + local BASE_FILENAME="$1" + FILE_NAME=$(basename "$0") + if echo "${FILE_NAME}" | grep -E -q "$BASE_FILENAME-.([^-]+)\.sh"; then + ENV=$(echo "${FILE_NAME}" | sed -E -e "s|$BASE_FILENAME-([^-]+)\.sh|\1|g") + export ENV + echo "Targeting environment [${ENV}]" + else + echo >&2 "Don't call this script directly" + exit 1 + fi +} + +CURRENT_GROUP="" +GROUP_START_TIME=0 +function start_gh_actions_group { + local group_title="$1" + GROUP_START_TIME=$(date +%s) + CURRENT_GROUP="$group_title" + + if [ "${GITHUB_ACTIONS:-}" == "true" ]; then + echo "::group::$group_title" + fi +} + +function end_gh_actions_group { + if [ "${GITHUB_ACTIONS:-}" == "true" ]; then + echo "::endgroup::" + fi + END_TIME=$(date +%s) + info "$CURRENT_GROUP took $(( END_TIME - GROUP_START_TIME )) seconds" +} + +function running_on_gh_actions { + [ "${GITHUB_ACTIONS:-}" == "true" ] +} + +function info { + log "INFO" "$1" +} + +function fatal { + log "ERROR" "$1" + exit 1 +} + +function log { + local -r level="$1" + local -r message="$2" + local -r timestamp=$(date +"%Y-%m-%d %H:%M:%S") + + >&2 echo -e "${timestamp} ${level} ${message}" +} + +function get_playwright_version { + cd "$repo/playwright" + npm list --package-lock-only --json "@playwright/test" | jq --raw-output '.dependencies."@playwright/test".version' +} + +function require_aws_session_for_untuva { + info "Verifying that AWS session has not expired" + ## SSO Login does not work in container + aws sts get-caller-identity --profile=oph-ludos-dev 1>/dev/null || { + info "Session is expired" + aws --profile oph-ludos-dev sso login + } + export AWS_PROFILE="oph-ludos-dev" + export AWS_REGION="eu-west-1" + export AWS_DEFAULT_REGION="$AWS_REGION" + info "Using AWS profile $AWS_PROFILE" +} +function require_aws_session_for_qa { + info "Verifying that AWS session has not expired" + ## SSO Login does not work in container + aws sts get-caller-identity --profile=oph-ludos-qa 1>/dev/null || { + info "Session is expired" + aws --profile oph-ludos-dev sso login + } + export AWS_PROFILE="oph-ludos-qa" + export AWS_REGION="eu-west-1" + export AWS_DEFAULT_REGION="$AWS_REGION" + info "Using AWS profile $AWS_PROFILE" +} +function require_aws_session_for_prod { + info "Verifying that AWS session has not expired" + ## SSO Login does not work in container + aws sts get-caller-identity --profile=oph-ludos-prod 1>/dev/null || { + info "Session is expired" + aws --profile oph-ludos-dev sso login + } + export AWS_PROFILE="oph-ludos-prod" + export AWS_REGION="eu-west-1" + export AWS_DEFAULT_REGION="$AWS_REGION" + info "Using AWS profile $AWS_PROFILE" +} + +function require_util_aws_session { + info "Verifying that AWS session has not expired" + ## SSO Login does not work in container + aws sts get-caller-identity --profile=oph-ludos-utility 1>/dev/null || { + info "Session is expired" + aws --profile oph-ludos-utility sso login + } + export AWS_REGION="eu-west-1" + export AWS_DEFAULT_REGION="$AWS_REGION" + export AWS_PROFILE="oph-ludos-utility" + info "Using AWS profile $AWS_PROFILE" +} + +function get_secret { + local name="$1" + aws secretsmanager get-secret-value --secret-id "$name" --query "SecretString" --output text +} + +function wait_for_container_to_be_healthy { + require_command docker + local -r container_name="$1" + + info "Waiting for docker container $container_name to be healthy" + until [ "$(docker inspect -f {{.State.Health.Status}} "$container_name" 2>/dev/null || echo "not-running")" == "healthy" ]; do + sleep 2; + done; +} \ No newline at end of file diff --git a/scripts/fetch_secrets.sh b/scripts/fetch_secrets.sh new file mode 100755 index 000000000..ef2cd7e6d --- /dev/null +++ b/scripts/fetch_secrets.sh @@ -0,0 +1,13 @@ +#!/bin/bash +set -euo pipefail + +source "$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )/common-functions.sh" + +require_dev_aws_session + +echo "Fetching secret from AWS Secrets Manager..." +aws s3 cp s3://aoe-local-dev/semantic-api/.env "$(dirname "$0")/../aoe-semantic-apis/.env" --profile aoe-dev +aws s3 cp s3://aoe-local-dev/data-services/.env "$(dirname "$0")/../aoe-data-services/.env" --profile aoe-dev +aws s3 cp s3://aoe-local-dev/data-analytics/.env "$(dirname "$0")/../aoe-data-analytics/.env" --profile aoe-dev +aws s3 cp s3://aoe-local-dev/web-backend/.env "$(dirname "$0")/../aoe-web-backend/.env" --profile aoe-dev +aws s3 cp s3://aoe-local-dev/streaming-app/.env "$(dirname "$0")/../aoe-streaming-app/.env" --profile aoe-dev diff --git a/scripts/nvm.sh b/scripts/nvm.sh new file mode 100644 index 000000000..72842299e --- /dev/null +++ b/scripts/nvm.sh @@ -0,0 +1,4539 @@ +# Node Version Manager +# Implemented as a POSIX-compliant function +# Should work on sh, dash, bash, ksh, zsh +# To use source this file from your bash profile +# +# Implemented by Tim Caswell +# with much bash help from Matthew Ranney + +# "local" warning, quote expansion warning, sed warning, `local` warning +# shellcheck disable=SC2039,SC2016,SC2001,SC3043 +{ # this ensures the entire script is downloaded # + +# shellcheck disable=SC3028 +NVM_SCRIPT_SOURCE="$_" + +nvm_is_zsh() { + [ -n "${ZSH_VERSION-}" ] +} + +nvm_stdout_is_terminal() { + [ -t 1 ] +} + +nvm_echo() { + command printf %s\\n "$*" 2>/dev/null +} + +nvm_echo_with_colors() { + command printf %b\\n "$*" 2>/dev/null +} + +nvm_cd() { + \cd "$@" +} + +nvm_err() { + >&2 nvm_echo "$@" +} + +nvm_err_with_colors() { + >&2 nvm_echo_with_colors "$@" +} + +nvm_grep() { + GREP_OPTIONS='' command grep "$@" +} + +nvm_has() { + type "${1-}" >/dev/null 2>&1 +} + +nvm_has_non_aliased() { + nvm_has "${1-}" && ! nvm_is_alias "${1-}" +} + +nvm_is_alias() { + # this is intentionally not "command alias" so it works in zsh. + \alias "${1-}" >/dev/null 2>&1 +} + +nvm_command_info() { + local COMMAND + local INFO + COMMAND="${1}" + if type "${COMMAND}" | nvm_grep -q hashed; then + INFO="$(type "${COMMAND}" | command sed -E 's/\(|\)//g' | command awk '{print $4}')" + elif type "${COMMAND}" | nvm_grep -q aliased; then + # shellcheck disable=SC2230 + INFO="$(which "${COMMAND}") ($(type "${COMMAND}" | command awk '{ $1=$2=$3=$4="" ;print }' | command sed -e 's/^\ *//g' -Ee "s/\`|'//g"))" + elif type "${COMMAND}" | nvm_grep -q "^${COMMAND} is an alias for"; then + # shellcheck disable=SC2230 + INFO="$(which "${COMMAND}") ($(type "${COMMAND}" | command awk '{ $1=$2=$3=$4=$5="" ;print }' | command sed 's/^\ *//g'))" + elif type "${COMMAND}" | nvm_grep -q "^${COMMAND} is /"; then + INFO="$(type "${COMMAND}" | command awk '{print $3}')" + else + INFO="$(type "${COMMAND}")" + fi + nvm_echo "${INFO}" +} + +nvm_has_colors() { + local NVM_NUM_COLORS + if nvm_has tput; then + NVM_NUM_COLORS="$(tput -T "${TERM:-vt100}" colors)" + fi + [ "${NVM_NUM_COLORS:--1}" -ge 8 ] +} + +nvm_curl_libz_support() { + curl -V 2>/dev/null | nvm_grep "^Features:" | nvm_grep -q "libz" +} + +nvm_curl_use_compression() { + nvm_curl_libz_support && nvm_version_greater_than_or_equal_to "$(nvm_curl_version)" 7.21.0 +} + +nvm_get_latest() { + local NVM_LATEST_URL + local CURL_COMPRESSED_FLAG + if nvm_has "curl"; then + if nvm_curl_use_compression; then + CURL_COMPRESSED_FLAG="--compressed" + fi + NVM_LATEST_URL="$(curl ${CURL_COMPRESSED_FLAG:-} -q -w "%{url_effective}\\n" -L -s -S https://latest.nvm.sh -o /dev/null)" + elif nvm_has "wget"; then + NVM_LATEST_URL="$(wget -q https://latest.nvm.sh --server-response -O /dev/null 2>&1 | command awk '/^ Location: /{DEST=$2} END{ print DEST }')" + else + nvm_err 'nvm needs curl or wget to proceed.' + return 1 + fi + if [ -z "${NVM_LATEST_URL}" ]; then + nvm_err "https://latest.nvm.sh did not redirect to the latest release on GitHub" + return 2 + fi + nvm_echo "${NVM_LATEST_URL##*/}" +} + +nvm_download() { + local CURL_COMPRESSED_FLAG + if nvm_has "curl"; then + if nvm_curl_use_compression; then + CURL_COMPRESSED_FLAG="--compressed" + fi + curl --fail ${CURL_COMPRESSED_FLAG:-} -q "$@" + elif nvm_has "wget"; then + # Emulate curl with wget + ARGS=$(nvm_echo "$@" | command sed -e 's/--progress-bar /--progress=bar /' \ + -e 's/--compressed //' \ + -e 's/--fail //' \ + -e 's/-L //' \ + -e 's/-I /--server-response /' \ + -e 's/-s /-q /' \ + -e 's/-sS /-nv /' \ + -e 's/-o /-O /' \ + -e 's/-C - /-c /') + # shellcheck disable=SC2086 + eval wget $ARGS + fi +} + +nvm_has_system_node() { + [ "$(nvm deactivate >/dev/null 2>&1 && command -v node)" != '' ] +} + +nvm_has_system_iojs() { + [ "$(nvm deactivate >/dev/null 2>&1 && command -v iojs)" != '' ] +} + +nvm_is_version_installed() { + if [ -z "${1-}" ]; then + return 1 + fi + local NVM_NODE_BINARY + NVM_NODE_BINARY='node' + if [ "_$(nvm_get_os)" = '_win' ]; then + NVM_NODE_BINARY='node.exe' + fi + if [ -x "$(nvm_version_path "$1" 2>/dev/null)/bin/${NVM_NODE_BINARY}" ]; then + return 0 + fi + return 1 +} + +nvm_print_npm_version() { + if nvm_has "npm"; then + local NPM_VERSION + NPM_VERSION="$(npm --version 2>/dev/null)" + if [ -n "${NPM_VERSION}" ]; then + command printf " (npm v${NPM_VERSION})" + fi + fi +} + +nvm_install_latest_npm() { + nvm_echo 'Attempting to upgrade to the latest working version of npm...' + local NODE_VERSION + NODE_VERSION="$(nvm_strip_iojs_prefix "$(nvm_ls_current)")" + if [ "${NODE_VERSION}" = 'system' ]; then + NODE_VERSION="$(node --version)" + elif [ "${NODE_VERSION}" = 'none' ]; then + nvm_echo "Detected node version ${NODE_VERSION}, npm version v${NPM_VERSION}" + NODE_VERSION='' + fi + if [ -z "${NODE_VERSION}" ]; then + nvm_err 'Unable to obtain node version.' + return 1 + fi + local NPM_VERSION + NPM_VERSION="$(npm --version 2>/dev/null)" + if [ -z "${NPM_VERSION}" ]; then + nvm_err 'Unable to obtain npm version.' + return 2 + fi + + local NVM_NPM_CMD + NVM_NPM_CMD='npm' + if [ "${NVM_DEBUG-}" = 1 ]; then + nvm_echo "Detected node version ${NODE_VERSION}, npm version v${NPM_VERSION}" + NVM_NPM_CMD='nvm_echo npm' + fi + + local NVM_IS_0_6 + NVM_IS_0_6=0 + if nvm_version_greater_than_or_equal_to "${NODE_VERSION}" 0.6.0 && nvm_version_greater 0.7.0 "${NODE_VERSION}"; then + NVM_IS_0_6=1 + fi + local NVM_IS_0_9 + NVM_IS_0_9=0 + if nvm_version_greater_than_or_equal_to "${NODE_VERSION}" 0.9.0 && nvm_version_greater 0.10.0 "${NODE_VERSION}"; then + NVM_IS_0_9=1 + fi + + if [ $NVM_IS_0_6 -eq 1 ]; then + nvm_echo '* `node` v0.6.x can only upgrade to `npm` v1.3.x' + $NVM_NPM_CMD install -g npm@1.3 + elif [ $NVM_IS_0_9 -eq 0 ]; then + # node 0.9 breaks here, for some reason + if nvm_version_greater_than_or_equal_to "${NPM_VERSION}" 1.0.0 && nvm_version_greater 2.0.0 "${NPM_VERSION}"; then + nvm_echo '* `npm` v1.x needs to first jump to `npm` v1.4.28 to be able to upgrade further' + $NVM_NPM_CMD install -g npm@1.4.28 + elif nvm_version_greater_than_or_equal_to "${NPM_VERSION}" 2.0.0 && nvm_version_greater 3.0.0 "${NPM_VERSION}"; then + nvm_echo '* `npm` v2.x needs to first jump to the latest v2 to be able to upgrade further' + $NVM_NPM_CMD install -g npm@2 + fi + fi + + if [ $NVM_IS_0_9 -eq 1 ] || [ $NVM_IS_0_6 -eq 1 ]; then + nvm_echo '* node v0.6 and v0.9 are unable to upgrade further' + elif nvm_version_greater 1.1.0 "${NODE_VERSION}"; then + nvm_echo '* `npm` v4.5.x is the last version that works on `node` versions < v1.1.0' + $NVM_NPM_CMD install -g npm@4.5 + elif nvm_version_greater 4.0.0 "${NODE_VERSION}"; then + nvm_echo '* `npm` v5 and higher do not work on `node` versions below v4.0.0' + $NVM_NPM_CMD install -g npm@4 + elif [ $NVM_IS_0_9 -eq 0 ] && [ $NVM_IS_0_6 -eq 0 ]; then + local NVM_IS_4_4_OR_BELOW + NVM_IS_4_4_OR_BELOW=0 + if nvm_version_greater 4.5.0 "${NODE_VERSION}"; then + NVM_IS_4_4_OR_BELOW=1 + fi + + local NVM_IS_5_OR_ABOVE + NVM_IS_5_OR_ABOVE=0 + if [ $NVM_IS_4_4_OR_BELOW -eq 0 ] && nvm_version_greater_than_or_equal_to "${NODE_VERSION}" 5.0.0; then + NVM_IS_5_OR_ABOVE=1 + fi + + local NVM_IS_6_OR_ABOVE + NVM_IS_6_OR_ABOVE=0 + local NVM_IS_6_2_OR_ABOVE + NVM_IS_6_2_OR_ABOVE=0 + if [ $NVM_IS_5_OR_ABOVE -eq 1 ] && nvm_version_greater_than_or_equal_to "${NODE_VERSION}" 6.0.0; then + NVM_IS_6_OR_ABOVE=1 + if nvm_version_greater_than_or_equal_to "${NODE_VERSION}" 6.2.0; then + NVM_IS_6_2_OR_ABOVE=1 + fi + fi + + local NVM_IS_9_OR_ABOVE + NVM_IS_9_OR_ABOVE=0 + local NVM_IS_9_3_OR_ABOVE + NVM_IS_9_3_OR_ABOVE=0 + if [ $NVM_IS_6_2_OR_ABOVE -eq 1 ] && nvm_version_greater_than_or_equal_to "${NODE_VERSION}" 9.0.0; then + NVM_IS_9_OR_ABOVE=1 + if nvm_version_greater_than_or_equal_to "${NODE_VERSION}" 9.3.0; then + NVM_IS_9_3_OR_ABOVE=1 + fi + fi + + local NVM_IS_10_OR_ABOVE + NVM_IS_10_OR_ABOVE=0 + if [ $NVM_IS_9_3_OR_ABOVE -eq 1 ] && nvm_version_greater_than_or_equal_to "${NODE_VERSION}" 10.0.0; then + NVM_IS_10_OR_ABOVE=1 + fi + local NVM_IS_12_LTS_OR_ABOVE + NVM_IS_12_LTS_OR_ABOVE=0 + if [ $NVM_IS_10_OR_ABOVE -eq 1 ] && nvm_version_greater_than_or_equal_to "${NODE_VERSION}" 12.13.0; then + NVM_IS_12_LTS_OR_ABOVE=1 + fi + local NVM_IS_13_OR_ABOVE + NVM_IS_13_OR_ABOVE=0 + if [ $NVM_IS_12_LTS_OR_ABOVE -eq 1 ] && nvm_version_greater_than_or_equal_to "${NODE_VERSION}" 13.0.0; then + NVM_IS_13_OR_ABOVE=1 + fi + local NVM_IS_14_LTS_OR_ABOVE + NVM_IS_14_LTS_OR_ABOVE=0 + if [ $NVM_IS_13_OR_ABOVE -eq 1 ] && nvm_version_greater_than_or_equal_to "${NODE_VERSION}" 14.15.0; then + NVM_IS_14_LTS_OR_ABOVE=1 + fi + local NVM_IS_14_17_OR_ABOVE + NVM_IS_14_17_OR_ABOVE=0 + if [ $NVM_IS_14_LTS_OR_ABOVE -eq 1 ] && nvm_version_greater_than_or_equal_to "${NODE_VERSION}" 14.17.0; then + NVM_IS_14_17_OR_ABOVE=1 + fi + local NVM_IS_15_OR_ABOVE + NVM_IS_15_OR_ABOVE=0 + if [ $NVM_IS_14_LTS_OR_ABOVE -eq 1 ] && nvm_version_greater_than_or_equal_to "${NODE_VERSION}" 15.0.0; then + NVM_IS_15_OR_ABOVE=1 + fi + local NVM_IS_16_OR_ABOVE + NVM_IS_16_OR_ABOVE=0 + if [ $NVM_IS_15_OR_ABOVE -eq 1 ] && nvm_version_greater_than_or_equal_to "${NODE_VERSION}" 16.0.0; then + NVM_IS_16_OR_ABOVE=1 + fi + local NVM_IS_16_LTS_OR_ABOVE + NVM_IS_16_LTS_OR_ABOVE=0 + if [ $NVM_IS_16_OR_ABOVE -eq 1 ] && nvm_version_greater_than_or_equal_to "${NODE_VERSION}" 16.13.0; then + NVM_IS_16_LTS_OR_ABOVE=1 + fi + local NVM_IS_17_OR_ABOVE + NVM_IS_17_OR_ABOVE=0 + if [ $NVM_IS_16_LTS_OR_ABOVE -eq 1 ] && nvm_version_greater_than_or_equal_to "${NODE_VERSION}" 17.0.0; then + NVM_IS_17_OR_ABOVE=1 + fi + local NVM_IS_18_OR_ABOVE + NVM_IS_18_OR_ABOVE=0 + if [ $NVM_IS_17_OR_ABOVE -eq 1 ] && nvm_version_greater_than_or_equal_to "${NODE_VERSION}" 18.0.0; then + NVM_IS_18_OR_ABOVE=1 + fi + local NVM_IS_18_17_OR_ABOVE + NVM_IS_18_17_OR_ABOVE=0 + if [ $NVM_IS_18_OR_ABOVE -eq 1 ] && nvm_version_greater_than_or_equal_to "${NODE_VERSION}" 18.17.0; then + NVM_IS_18_17_OR_ABOVE=1 + fi + local NVM_IS_19_OR_ABOVE + NVM_IS_19_OR_ABOVE=0 + if [ $NVM_IS_18_17_OR_ABOVE -eq 1 ] && nvm_version_greater_than_or_equal_to "${NODE_VERSION}" 19.0.0; then + NVM_IS_19_OR_ABOVE=1 + fi + local NVM_IS_20_5_OR_ABOVE + NVM_IS_20_5_OR_ABOVE=0 + if [ $NVM_IS_19_OR_ABOVE -eq 1 ] && nvm_version_greater_than_or_equal_to "${NODE_VERSION}" 20.5.0; then + NVM_IS_20_5_OR_ABOVE=1 + fi + + if [ $NVM_IS_4_4_OR_BELOW -eq 1 ] || { + [ $NVM_IS_5_OR_ABOVE -eq 1 ] && nvm_version_greater 5.10.0 "${NODE_VERSION}"; \ + }; then + nvm_echo '* `npm` `v5.3.x` is the last version that works on `node` 4.x versions below v4.4, or 5.x versions below v5.10, due to `Buffer.alloc`' + $NVM_NPM_CMD install -g npm@5.3 + elif [ $NVM_IS_4_4_OR_BELOW -eq 0 ] && nvm_version_greater 4.7.0 "${NODE_VERSION}"; then + nvm_echo '* `npm` `v5.4.1` is the last version that works on `node` `v4.5` and `v4.6`' + $NVM_NPM_CMD install -g npm@5.4.1 + elif [ $NVM_IS_6_OR_ABOVE -eq 0 ]; then + nvm_echo '* `npm` `v5.x` is the last version that works on `node` below `v6.0.0`' + $NVM_NPM_CMD install -g npm@5 + elif \ + { [ $NVM_IS_6_OR_ABOVE -eq 1 ] && [ $NVM_IS_6_2_OR_ABOVE -eq 0 ]; } \ + || { [ $NVM_IS_9_OR_ABOVE -eq 1 ] && [ $NVM_IS_9_3_OR_ABOVE -eq 0 ]; } \ + ; then + nvm_echo '* `npm` `v6.9` is the last version that works on `node` `v6.0.x`, `v6.1.x`, `v9.0.x`, `v9.1.x`, or `v9.2.x`' + $NVM_NPM_CMD install -g npm@6.9 + elif [ $NVM_IS_10_OR_ABOVE -eq 0 ]; then + if nvm_version_greater 4.4.4 "${NPM_VERSION}"; then + nvm_echo '* `npm` `v4.4.4` or later is required to install npm v6.14.18' + $NVM_NPM_CMD install -g npm@4 + fi + nvm_echo '* `npm` `v6.x` is the last version that works on `node` below `v10.0.0`' + $NVM_NPM_CMD install -g npm@6 + elif \ + [ $NVM_IS_12_LTS_OR_ABOVE -eq 0 ] \ + || { [ $NVM_IS_13_OR_ABOVE -eq 1 ] && [ $NVM_IS_14_LTS_OR_ABOVE -eq 0 ]; } \ + || { [ $NVM_IS_15_OR_ABOVE -eq 1 ] && [ $NVM_IS_16_OR_ABOVE -eq 0 ]; } \ + ; then + nvm_echo '* `npm` `v7.x` is the last version that works on `node` `v13`, `v15`, below `v12.13`, or `v14.0` - `v14.15`' + $NVM_NPM_CMD install -g npm@7 + elif \ + { [ $NVM_IS_12_LTS_OR_ABOVE -eq 1 ] && [ $NVM_IS_13_OR_ABOVE -eq 0 ]; } \ + || { [ $NVM_IS_14_LTS_OR_ABOVE -eq 1 ] && [ $NVM_IS_14_17_OR_ABOVE -eq 0 ]; } \ + || { [ $NVM_IS_16_OR_ABOVE -eq 1 ] && [ $NVM_IS_16_LTS_OR_ABOVE -eq 0 ]; } \ + || { [ $NVM_IS_17_OR_ABOVE -eq 1 ] && [ $NVM_IS_18_OR_ABOVE -eq 0 ]; } \ + ; then + nvm_echo '* `npm` `v8.x` is the last version that works on `node` `v12`, `v14.13` - `v14.16`, or `v16.0` - `v16.12`' + $NVM_NPM_CMD install -g npm@8 + elif \ + [ $NVM_IS_18_17_OR_ABOVE -eq 0 ] \ + || { [ $NVM_IS_19_OR_ABOVE -eq 1 ] && [ $NVM_IS_20_5_OR_ABOVE -eq 0 ]; } \ + ; then + nvm_echo '* `npm` `v9.x` is the last version that works on `node` `< v18.17`, `v19`, or `v20.0` - `v20.4`' + $NVM_NPM_CMD install -g npm@9 + else + nvm_echo '* Installing latest `npm`; if this does not work on your node version, please report a bug!' + $NVM_NPM_CMD install -g npm + fi + fi + nvm_echo "* npm upgraded to: v$(npm --version 2>/dev/null)" +} + +# Make zsh glob matching behave same as bash +# This fixes the "zsh: no matches found" errors +if [ -z "${NVM_CD_FLAGS-}" ]; then + export NVM_CD_FLAGS='' +fi +if nvm_is_zsh; then + NVM_CD_FLAGS="-q" +fi + +# Auto detect the NVM_DIR when not set +if [ -z "${NVM_DIR-}" ]; then + # shellcheck disable=SC2128 + if [ -n "${BASH_SOURCE-}" ]; then + # shellcheck disable=SC2169,SC3054 + NVM_SCRIPT_SOURCE="${BASH_SOURCE[0]}" + fi + # shellcheck disable=SC2086 + NVM_DIR="$(nvm_cd ${NVM_CD_FLAGS} "$(dirname "${NVM_SCRIPT_SOURCE:-$0}")" >/dev/null && \pwd)" + export NVM_DIR +else + # https://unix.stackexchange.com/a/198289 + case $NVM_DIR in + *[!/]*/) + NVM_DIR="${NVM_DIR%"${NVM_DIR##*[!/]}"}" + export NVM_DIR + nvm_err "Warning: \$NVM_DIR should not have trailing slashes" + ;; + esac +fi +unset NVM_SCRIPT_SOURCE 2>/dev/null + +nvm_tree_contains_path() { + local tree + tree="${1-}" + local node_path + node_path="${2-}" + + if [ "@${tree}@" = "@@" ] || [ "@${node_path}@" = "@@" ]; then + nvm_err "both the tree and the node path are required" + return 2 + fi + + local previous_pathdir + previous_pathdir="${node_path}" + local pathdir + pathdir=$(dirname "${previous_pathdir}") + while [ "${pathdir}" != '' ] && [ "${pathdir}" != '.' ] && [ "${pathdir}" != '/' ] && + [ "${pathdir}" != "${tree}" ] && [ "${pathdir}" != "${previous_pathdir}" ]; do + previous_pathdir="${pathdir}" + pathdir=$(dirname "${previous_pathdir}") + done + [ "${pathdir}" = "${tree}" ] +} + +nvm_find_project_dir() { + local path_ + path_="${PWD}" + while [ "${path_}" != "" ] && [ "${path_}" != '.' ] && [ ! -f "${path_}/package.json" ] && [ ! -d "${path_}/node_modules" ]; do + path_=${path_%/*} + done + nvm_echo "${path_}" +} + +# Traverse up in directory tree to find containing folder +nvm_find_up() { + local path_ + path_="${PWD}" + while [ "${path_}" != "" ] && [ "${path_}" != '.' ] && [ ! -f "${path_}/${1-}" ]; do + path_=${path_%/*} + done + nvm_echo "${path_}" +} + +nvm_find_nvmrc() { + local dir + dir="$(nvm_find_up '.nvmrc')" + if [ -e "${dir}/.nvmrc" ]; then + nvm_echo "${dir}/.nvmrc" + fi +} + +nvm_nvmrc_invalid_msg() { + local error_text + error_text="invalid .nvmrc! +all non-commented content (anything after # is a comment) must be either: + - a single bare nvm-recognized version-ish + - or, multiple distinct key-value pairs, each key/value separated by a single equals sign (=) + +additionally, a single bare nvm-recognized version-ish must be present (after stripping comments)." + + local warn_text + warn_text="non-commented content parsed: +${1}" + + nvm_err "$(nvm_wrap_with_color_code r "${error_text}") + +$(nvm_wrap_with_color_code y "${warn_text}")" +} + +nvm_process_nvmrc() { + local NVMRC_PATH="$1" + local lines + local unpaired_line + + lines=$(command sed 's/#.*//' "$NVMRC_PATH" | command sed 's/^[[:space:]]*//;s/[[:space:]]*$//' | nvm_grep -v '^$') + + if [ -z "$lines" ]; then + nvm_nvmrc_invalid_msg "${lines}" + return 1 + fi + + # Initialize key-value storage + local keys='' + local values='' + + while IFS= read -r line; do + if [ -z "${line}" ]; then + continue + elif [ -z "${line%%=*}" ]; then + if [ -n "${unpaired_line}" ]; then + nvm_nvmrc_invalid_msg "${lines}" + return 1 + fi + unpaired_line="${line}" + elif case "$line" in *'='*) true;; *) false;; esac; then + key="${line%%=*}" + value="${line#*=}" + + # Trim whitespace around key and value + key=$(nvm_echo "${key}" | command sed 's/^[[:space:]]*//;s/[[:space:]]*$//') + value=$(nvm_echo "${value}" | command sed 's/^[[:space:]]*//;s/[[:space:]]*$//') + + # Check for invalid key "node" + if [ "${key}" = 'node' ]; then + nvm_nvmrc_invalid_msg "${lines}" + return 1 + fi + + # Check for duplicate keys + if nvm_echo "${keys}" | nvm_grep -q -E "(^| )${key}( |$)"; then + nvm_nvmrc_invalid_msg "${lines}" + return 1 + fi + keys="${keys} ${key}" + values="${values} ${value}" + else + if [ -n "${unpaired_line}" ]; then + nvm_nvmrc_invalid_msg "${lines}" + return 1 + fi + unpaired_line="${line}" + fi + done <" + fi +} + +nvm_clang_version() { + clang --version | command awk '{ if ($2 == "version") print $3; else if ($3 == "version") print $4 }' | command sed 's/-.*$//g' +} + +nvm_curl_version() { + curl -V | command awk '{ if ($1 == "curl") print $2 }' | command sed 's/-.*$//g' +} + +nvm_version_greater() { + command awk 'BEGIN { + if (ARGV[1] == "" || ARGV[2] == "") exit(1) + split(ARGV[1], a, /\./); + split(ARGV[2], b, /\./); + for (i=1; i<=3; i++) { + if (a[i] && a[i] !~ /^[0-9]+$/) exit(2); + if (b[i] && b[i] !~ /^[0-9]+$/) { exit(0); } + if (a[i] < b[i]) exit(3); + else if (a[i] > b[i]) exit(0); + } + exit(4) + }' "${1#v}" "${2#v}" +} + +nvm_version_greater_than_or_equal_to() { + command awk 'BEGIN { + if (ARGV[1] == "" || ARGV[2] == "") exit(1) + split(ARGV[1], a, /\./); + split(ARGV[2], b, /\./); + for (i=1; i<=3; i++) { + if (a[i] && a[i] !~ /^[0-9]+$/) exit(2); + if (a[i] < b[i]) exit(3); + else if (a[i] > b[i]) exit(0); + } + exit(0) + }' "${1#v}" "${2#v}" +} + +nvm_version_dir() { + local NVM_WHICH_DIR + NVM_WHICH_DIR="${1-}" + if [ -z "${NVM_WHICH_DIR}" ] || [ "${NVM_WHICH_DIR}" = "new" ]; then + nvm_echo "${NVM_DIR}/versions/node" + elif [ "_${NVM_WHICH_DIR}" = "_iojs" ]; then + nvm_echo "${NVM_DIR}/versions/io.js" + elif [ "_${NVM_WHICH_DIR}" = "_old" ]; then + nvm_echo "${NVM_DIR}" + else + nvm_err 'unknown version dir' + return 3 + fi +} + +nvm_alias_path() { + nvm_echo "$(nvm_version_dir old)/alias" +} + +nvm_version_path() { + local VERSION + VERSION="${1-}" + if [ -z "${VERSION}" ]; then + nvm_err 'version is required' + return 3 + elif nvm_is_iojs_version "${VERSION}"; then + nvm_echo "$(nvm_version_dir iojs)/$(nvm_strip_iojs_prefix "${VERSION}")" + elif nvm_version_greater 0.12.0 "${VERSION}"; then + nvm_echo "$(nvm_version_dir old)/${VERSION}" + else + nvm_echo "$(nvm_version_dir new)/${VERSION}" + fi +} + +nvm_ensure_version_installed() { + local PROVIDED_VERSION + PROVIDED_VERSION="${1-}" + local IS_VERSION_FROM_NVMRC + IS_VERSION_FROM_NVMRC="${2-}" + if [ "${PROVIDED_VERSION}" = 'system' ]; then + if nvm_has_system_iojs || nvm_has_system_node; then + return 0 + fi + nvm_err "N/A: no system version of node/io.js is installed." + return 1 + fi + local LOCAL_VERSION + local EXIT_CODE + LOCAL_VERSION="$(nvm_version "${PROVIDED_VERSION}")" + EXIT_CODE="$?" + local NVM_VERSION_DIR + if [ "${EXIT_CODE}" != "0" ] || ! nvm_is_version_installed "${LOCAL_VERSION}"; then + if VERSION="$(nvm_resolve_alias "${PROVIDED_VERSION}")"; then + nvm_err "N/A: version \"${PROVIDED_VERSION} -> ${VERSION}\" is not yet installed." + else + local PREFIXED_VERSION + PREFIXED_VERSION="$(nvm_ensure_version_prefix "${PROVIDED_VERSION}")" + nvm_err "N/A: version \"${PREFIXED_VERSION:-$PROVIDED_VERSION}\" is not yet installed." + fi + nvm_err "" + if [ "${IS_VERSION_FROM_NVMRC}" != '1' ]; then + nvm_err "You need to run \`nvm install ${PROVIDED_VERSION}\` to install and use it." + else + nvm_err 'You need to run `nvm install` to install and use the node version specified in `.nvmrc`.' + fi + return 1 + fi +} + +# Expand a version using the version cache +nvm_version() { + local PATTERN + PATTERN="${1-}" + local VERSION + # The default version is the current one + if [ -z "${PATTERN}" ]; then + PATTERN='current' + fi + + if [ "${PATTERN}" = "current" ]; then + nvm_ls_current + return $? + fi + + local NVM_NODE_PREFIX + NVM_NODE_PREFIX="$(nvm_node_prefix)" + case "_${PATTERN}" in + "_${NVM_NODE_PREFIX}" | "_${NVM_NODE_PREFIX}-") + PATTERN="stable" + ;; + esac + VERSION="$(nvm_ls "${PATTERN}" | command tail -1)" + if [ -z "${VERSION}" ] || [ "_${VERSION}" = "_N/A" ]; then + nvm_echo "N/A" + return 3 + fi + nvm_echo "${VERSION}" +} + +nvm_remote_version() { + local PATTERN + PATTERN="${1-}" + local VERSION + if nvm_validate_implicit_alias "${PATTERN}" 2>/dev/null; then + case "${PATTERN}" in + "$(nvm_iojs_prefix)") + VERSION="$(NVM_LTS="${NVM_LTS-}" nvm_ls_remote_iojs | command tail -1)" &&: + ;; + *) + VERSION="$(NVM_LTS="${NVM_LTS-}" nvm_ls_remote "${PATTERN}")" &&: + ;; + esac + else + VERSION="$(NVM_LTS="${NVM_LTS-}" nvm_remote_versions "${PATTERN}" | command tail -1)" + fi + if [ -n "${NVM_VERSION_ONLY-}" ]; then + command awk 'BEGIN { + n = split(ARGV[1], a); + print a[1] + }' "${VERSION}" + else + nvm_echo "${VERSION}" + fi + if [ "${VERSION}" = 'N/A' ]; then + return 3 + fi +} + +nvm_remote_versions() { + local NVM_IOJS_PREFIX + NVM_IOJS_PREFIX="$(nvm_iojs_prefix)" + local NVM_NODE_PREFIX + NVM_NODE_PREFIX="$(nvm_node_prefix)" + + local PATTERN + PATTERN="${1-}" + + local NVM_FLAVOR + if [ -n "${NVM_LTS-}" ]; then + NVM_FLAVOR="${NVM_NODE_PREFIX}" + fi + + case "${PATTERN}" in + "${NVM_IOJS_PREFIX}" | "io.js") + NVM_FLAVOR="${NVM_IOJS_PREFIX}" + unset PATTERN + ;; + "${NVM_NODE_PREFIX}") + NVM_FLAVOR="${NVM_NODE_PREFIX}" + unset PATTERN + ;; + esac + + if nvm_validate_implicit_alias "${PATTERN-}" 2>/dev/null; then + nvm_err 'Implicit aliases are not supported in nvm_remote_versions.' + return 1 + fi + + local NVM_LS_REMOTE_EXIT_CODE + NVM_LS_REMOTE_EXIT_CODE=0 + local NVM_LS_REMOTE_PRE_MERGED_OUTPUT + NVM_LS_REMOTE_PRE_MERGED_OUTPUT='' + local NVM_LS_REMOTE_POST_MERGED_OUTPUT + NVM_LS_REMOTE_POST_MERGED_OUTPUT='' + if [ -z "${NVM_FLAVOR-}" ] || [ "${NVM_FLAVOR-}" = "${NVM_NODE_PREFIX}" ]; then + local NVM_LS_REMOTE_OUTPUT + # extra space is needed here to avoid weird behavior when `nvm_ls_remote` ends in a `*` + NVM_LS_REMOTE_OUTPUT="$(NVM_LTS="${NVM_LTS-}" nvm_ls_remote "${PATTERN-}") " &&: + NVM_LS_REMOTE_EXIT_CODE=$? + # split output into two + NVM_LS_REMOTE_PRE_MERGED_OUTPUT="${NVM_LS_REMOTE_OUTPUT%%v4\.0\.0*}" + NVM_LS_REMOTE_POST_MERGED_OUTPUT="${NVM_LS_REMOTE_OUTPUT#"$NVM_LS_REMOTE_PRE_MERGED_OUTPUT"}" + fi + + local NVM_LS_REMOTE_IOJS_EXIT_CODE + NVM_LS_REMOTE_IOJS_EXIT_CODE=0 + local NVM_LS_REMOTE_IOJS_OUTPUT + NVM_LS_REMOTE_IOJS_OUTPUT='' + if [ -z "${NVM_LTS-}" ] && { + [ -z "${NVM_FLAVOR-}" ] || [ "${NVM_FLAVOR-}" = "${NVM_IOJS_PREFIX}" ]; + }; then + NVM_LS_REMOTE_IOJS_OUTPUT=$(nvm_ls_remote_iojs "${PATTERN-}") &&: + NVM_LS_REMOTE_IOJS_EXIT_CODE=$? + fi + + # the `sed` removes both blank lines, and only-whitespace lines (see "weird behavior" ~19 lines up) + VERSIONS="$(nvm_echo "${NVM_LS_REMOTE_PRE_MERGED_OUTPUT} +${NVM_LS_REMOTE_IOJS_OUTPUT} +${NVM_LS_REMOTE_POST_MERGED_OUTPUT}" | nvm_grep -v "N/A" | command sed '/^ *$/d')" + + if [ -z "${VERSIONS}" ]; then + nvm_echo 'N/A' + return 3 + fi + # the `sed` is to remove trailing whitespaces (see "weird behavior" ~25 lines up) + nvm_echo "${VERSIONS}" | command sed 's/ *$//g' + # shellcheck disable=SC2317 + return $NVM_LS_REMOTE_EXIT_CODE || $NVM_LS_REMOTE_IOJS_EXIT_CODE +} + +nvm_is_valid_version() { + if nvm_validate_implicit_alias "${1-}" 2>/dev/null; then + return 0 + fi + case "${1-}" in + "$(nvm_iojs_prefix)" | \ + "$(nvm_node_prefix)") + return 0 + ;; + *) + local VERSION + VERSION="$(nvm_strip_iojs_prefix "${1-}")" + nvm_version_greater_than_or_equal_to "${VERSION}" 0 + ;; + esac +} + +nvm_normalize_version() { + command awk 'BEGIN { + split(ARGV[1], a, /\./); + printf "%d%06d%06d\n", a[1], a[2], a[3]; + exit; + }' "${1#v}" +} + +nvm_normalize_lts() { + local LTS + LTS="${1-}" + + case "${LTS}" in + lts/-[123456789] | lts/-[123456789][0123456789]*) + local N + N="$(echo "${LTS}" | cut -d '-' -f 2)" + N=$((N+1)) + # shellcheck disable=SC2181 + if [ $? -ne 0 ]; then + nvm_echo "${LTS}" + return 0 + fi + local NVM_ALIAS_DIR + NVM_ALIAS_DIR="$(nvm_alias_path)" + local RESULT + RESULT="$(command ls "${NVM_ALIAS_DIR}/lts" | command tail -n "${N}" | command head -n 1)" + if [ "${RESULT}" != '*' ]; then + nvm_echo "lts/${RESULT}" + else + nvm_err 'That many LTS releases do not exist yet.' + return 2 + fi + ;; + *) + nvm_echo "${LTS}" + ;; + esac +} + +nvm_ensure_version_prefix() { + local NVM_VERSION + NVM_VERSION="$(nvm_strip_iojs_prefix "${1-}" | command sed -e 's/^\([0-9]\)/v\1/g')" + if nvm_is_iojs_version "${1-}"; then + nvm_add_iojs_prefix "${NVM_VERSION}" + else + nvm_echo "${NVM_VERSION}" + fi +} + +nvm_format_version() { + local VERSION + VERSION="$(nvm_ensure_version_prefix "${1-}")" + local NUM_GROUPS + NUM_GROUPS="$(nvm_num_version_groups "${VERSION}")" + if [ "${NUM_GROUPS}" -lt 3 ]; then + nvm_format_version "${VERSION%.}.0" + else + nvm_echo "${VERSION}" | command cut -f1-3 -d. + fi +} + +nvm_num_version_groups() { + local VERSION + VERSION="${1-}" + VERSION="${VERSION#v}" + VERSION="${VERSION%.}" + if [ -z "${VERSION}" ]; then + nvm_echo "0" + return + fi + local NVM_NUM_DOTS + NVM_NUM_DOTS=$(nvm_echo "${VERSION}" | command sed -e 's/[^\.]//g') + local NVM_NUM_GROUPS + NVM_NUM_GROUPS=".${NVM_NUM_DOTS}" # add extra dot, since it's (n - 1) dots at this point + nvm_echo "${#NVM_NUM_GROUPS}" +} + +nvm_strip_path() { + if [ -z "${NVM_DIR-}" ]; then + nvm_err '${NVM_DIR} not set!' + return 1 + fi + command printf %s "${1-}" | command awk -v NVM_DIR="${NVM_DIR}" -v RS=: ' + index($0, NVM_DIR) == 1 { + path = substr($0, length(NVM_DIR) + 1) + if (path ~ "^(/versions/[^/]*)?/[^/]*'"${2-}"'.*$") { next } + } + # The final RT will contain a colon if the input has a trailing colon, or a null string otherwise + { printf "%s%s", sep, $0; sep=RS } END { printf "%s", RT }' +} + +nvm_change_path() { + # if there’s no initial path, just return the supplementary path + if [ -z "${1-}" ]; then + nvm_echo "${3-}${2-}" + # if the initial path doesn’t contain an nvm path, prepend the supplementary + # path + elif ! nvm_echo "${1-}" | nvm_grep -q "${NVM_DIR}/[^/]*${2-}" \ + && ! nvm_echo "${1-}" | nvm_grep -q "${NVM_DIR}/versions/[^/]*/[^/]*${2-}"; then + nvm_echo "${3-}${2-}:${1-}" + # if the initial path contains BOTH an nvm path (checked for above) and + # that nvm path is preceded by a system binary path, just prepend the + # supplementary path instead of replacing it. + # https://github.com/nvm-sh/nvm/issues/1652#issuecomment-342571223 + elif nvm_echo "${1-}" | nvm_grep -Eq "(^|:)(/usr(/local)?)?${2-}:.*${NVM_DIR}/[^/]*${2-}" \ + || nvm_echo "${1-}" | nvm_grep -Eq "(^|:)(/usr(/local)?)?${2-}:.*${NVM_DIR}/versions/[^/]*/[^/]*${2-}"; then + nvm_echo "${3-}${2-}:${1-}" + # use sed to replace the existing nvm path with the supplementary path. This + # preserves the order of the path. + else + nvm_echo "${1-}" | command sed \ + -e "s#${NVM_DIR}/[^/]*${2-}[^:]*#${3-}${2-}#" \ + -e "s#${NVM_DIR}/versions/[^/]*/[^/]*${2-}[^:]*#${3-}${2-}#" + fi +} + +nvm_binary_available() { + # binaries started with node 0.8.6 + nvm_version_greater_than_or_equal_to "$(nvm_strip_iojs_prefix "${1-}")" v0.8.6 +} + +nvm_set_colors() { + if [ "${#1}" -eq 5 ] && nvm_echo "$1" | nvm_grep -E "^[rRgGbBcCyYmMkKeW]{1,}$" 1>/dev/null; then + local INSTALLED_COLOR + local LTS_AND_SYSTEM_COLOR + local CURRENT_COLOR + local NOT_INSTALLED_COLOR + local DEFAULT_COLOR + + INSTALLED_COLOR="$(echo "$1" | awk '{ print substr($0, 1, 1); }')" + LTS_AND_SYSTEM_COLOR="$(echo "$1" | awk '{ print substr($0, 2, 1); }')" + CURRENT_COLOR="$(echo "$1" | awk '{ print substr($0, 3, 1); }')" + NOT_INSTALLED_COLOR="$(echo "$1" | awk '{ print substr($0, 4, 1); }')" + DEFAULT_COLOR="$(echo "$1" | awk '{ print substr($0, 5, 1); }')" + if ! nvm_has_colors; then + nvm_echo "Setting colors to: ${INSTALLED_COLOR} ${LTS_AND_SYSTEM_COLOR} ${CURRENT_COLOR} ${NOT_INSTALLED_COLOR} ${DEFAULT_COLOR}" + nvm_echo "WARNING: Colors may not display because they are not supported in this shell." + else + nvm_echo_with_colors "Setting colors to: $(nvm_wrap_with_color_code "${INSTALLED_COLOR}" "${INSTALLED_COLOR}")$(nvm_wrap_with_color_code "${LTS_AND_SYSTEM_COLOR}" "${LTS_AND_SYSTEM_COLOR}")$(nvm_wrap_with_color_code "${CURRENT_COLOR}" "${CURRENT_COLOR}")$(nvm_wrap_with_color_code "${NOT_INSTALLED_COLOR}" "${NOT_INSTALLED_COLOR}")$(nvm_wrap_with_color_code "${DEFAULT_COLOR}" "${DEFAULT_COLOR}")" + fi + export NVM_COLORS="$1" + else + return 17 + fi +} + +nvm_get_colors() { + local COLOR + local SYS_COLOR + local COLORS + COLORS="${NVM_COLORS:-bygre}" + case $1 in + 1) COLOR=$(nvm_print_color_code "$(echo "$COLORS" | awk '{ print substr($0, 1, 1); }')");; + 2) COLOR=$(nvm_print_color_code "$(echo "$COLORS" | awk '{ print substr($0, 2, 1); }')");; + 3) COLOR=$(nvm_print_color_code "$(echo "$COLORS" | awk '{ print substr($0, 3, 1); }')");; + 4) COLOR=$(nvm_print_color_code "$(echo "$COLORS" | awk '{ print substr($0, 4, 1); }')");; + 5) COLOR=$(nvm_print_color_code "$(echo "$COLORS" | awk '{ print substr($0, 5, 1); }')");; + 6) + SYS_COLOR=$(nvm_print_color_code "$(echo "$COLORS" | awk '{ print substr($0, 2, 1); }')") + COLOR=$(nvm_echo "$SYS_COLOR" | command tr '0;' '1;') + ;; + *) + nvm_err "Invalid color index, ${1-}" + return 1 + ;; + esac + + nvm_echo "$COLOR" +} + +nvm_wrap_with_color_code() { + local CODE + CODE="$(nvm_print_color_code "${1}" 2>/dev/null ||:)" + local TEXT + TEXT="${2-}" + if nvm_has_colors && [ -n "${CODE}" ]; then + nvm_echo_with_colors "\033[${CODE}${TEXT}\033[0m" + else + nvm_echo "${TEXT}" + fi +} + +nvm_print_color_code() { + case "${1-}" in + '0') return 0 ;; + 'r') nvm_echo '0;31m' ;; + 'R') nvm_echo '1;31m' ;; + 'g') nvm_echo '0;32m' ;; + 'G') nvm_echo '1;32m' ;; + 'b') nvm_echo '0;34m' ;; + 'B') nvm_echo '1;34m' ;; + 'c') nvm_echo '0;36m' ;; + 'C') nvm_echo '1;36m' ;; + 'm') nvm_echo '0;35m' ;; + 'M') nvm_echo '1;35m' ;; + 'y') nvm_echo '0;33m' ;; + 'Y') nvm_echo '1;33m' ;; + 'k') nvm_echo '0;30m' ;; + 'K') nvm_echo '1;30m' ;; + 'e') nvm_echo '0;37m' ;; + 'W') nvm_echo '1;37m' ;; + *) + nvm_err "Invalid color code: ${1-}"; + return 1 + ;; + esac +} + +nvm_print_formatted_alias() { + local ALIAS + ALIAS="${1-}" + local DEST + DEST="${2-}" + local VERSION + VERSION="${3-}" + if [ -z "${VERSION}" ]; then + VERSION="$(nvm_version "${DEST}")" ||: + fi + local VERSION_FORMAT + local ALIAS_FORMAT + local DEST_FORMAT + + local INSTALLED_COLOR + local SYSTEM_COLOR + local CURRENT_COLOR + local NOT_INSTALLED_COLOR + local DEFAULT_COLOR + local LTS_COLOR + + INSTALLED_COLOR=$(nvm_get_colors 1) + SYSTEM_COLOR=$(nvm_get_colors 2) + CURRENT_COLOR=$(nvm_get_colors 3) + NOT_INSTALLED_COLOR=$(nvm_get_colors 4) + DEFAULT_COLOR=$(nvm_get_colors 5) + LTS_COLOR=$(nvm_get_colors 6) + + ALIAS_FORMAT='%s' + DEST_FORMAT='%s' + VERSION_FORMAT='%s' + local NEWLINE + NEWLINE='\n' + if [ "_${DEFAULT}" = '_true' ]; then + NEWLINE=' (default)\n' + fi + local ARROW + ARROW='->' + if [ -z "${NVM_NO_COLORS}" ] && nvm_has_colors; then + ARROW='\033[0;90m->\033[0m' + if [ "_${DEFAULT}" = '_true' ]; then + NEWLINE=" \033[${DEFAULT_COLOR}(default)\033[0m\n" + fi + if [ "_${VERSION}" = "_${NVM_CURRENT-}" ]; then + ALIAS_FORMAT="\033[${CURRENT_COLOR}%s\033[0m" + DEST_FORMAT="\033[${CURRENT_COLOR}%s\033[0m" + VERSION_FORMAT="\033[${CURRENT_COLOR}%s\033[0m" + elif nvm_is_version_installed "${VERSION}"; then + ALIAS_FORMAT="\033[${INSTALLED_COLOR}%s\033[0m" + DEST_FORMAT="\033[${INSTALLED_COLOR}%s\033[0m" + VERSION_FORMAT="\033[${INSTALLED_COLOR}%s\033[0m" + elif [ "${VERSION}" = '∞' ] || [ "${VERSION}" = 'N/A' ]; then + ALIAS_FORMAT="\033[${NOT_INSTALLED_COLOR}%s\033[0m" + DEST_FORMAT="\033[${NOT_INSTALLED_COLOR}%s\033[0m" + VERSION_FORMAT="\033[${NOT_INSTALLED_COLOR}%s\033[0m" + fi + if [ "_${NVM_LTS-}" = '_true' ]; then + ALIAS_FORMAT="\033[${LTS_COLOR}%s\033[0m" + fi + if [ "_${DEST%/*}" = "_lts" ]; then + DEST_FORMAT="\033[${LTS_COLOR}%s\033[0m" + fi + elif [ "_${VERSION}" != '_∞' ] && [ "_${VERSION}" != '_N/A' ]; then + VERSION_FORMAT='%s *' + fi + if [ "${DEST}" = "${VERSION}" ]; then + command printf -- "${ALIAS_FORMAT} ${ARROW} ${VERSION_FORMAT}${NEWLINE}" "${ALIAS}" "${DEST}" + else + command printf -- "${ALIAS_FORMAT} ${ARROW} ${DEST_FORMAT} (${ARROW} ${VERSION_FORMAT})${NEWLINE}" "${ALIAS}" "${DEST}" "${VERSION}" + fi +} + +nvm_print_alias_path() { + local NVM_ALIAS_DIR + NVM_ALIAS_DIR="${1-}" + if [ -z "${NVM_ALIAS_DIR}" ]; then + nvm_err 'An alias dir is required.' + return 1 + fi + local ALIAS_PATH + ALIAS_PATH="${2-}" + if [ -z "${ALIAS_PATH}" ]; then + nvm_err 'An alias path is required.' + return 2 + fi + local ALIAS + ALIAS="${ALIAS_PATH##"${NVM_ALIAS_DIR}"\/}" + local DEST + DEST="$(nvm_alias "${ALIAS}" 2>/dev/null)" ||: + if [ -n "${DEST}" ]; then + NVM_NO_COLORS="${NVM_NO_COLORS-}" NVM_LTS="${NVM_LTS-}" DEFAULT=false nvm_print_formatted_alias "${ALIAS}" "${DEST}" + fi +} + +nvm_print_default_alias() { + local ALIAS + ALIAS="${1-}" + if [ -z "${ALIAS}" ]; then + nvm_err 'A default alias is required.' + return 1 + fi + local DEST + DEST="$(nvm_print_implicit_alias local "${ALIAS}")" + if [ -n "${DEST}" ]; then + NVM_NO_COLORS="${NVM_NO_COLORS-}" DEFAULT=true nvm_print_formatted_alias "${ALIAS}" "${DEST}" + fi +} + +nvm_make_alias() { + local ALIAS + ALIAS="${1-}" + if [ -z "${ALIAS}" ]; then + nvm_err "an alias name is required" + return 1 + fi + local VERSION + VERSION="${2-}" + if [ -z "${VERSION}" ]; then + nvm_err "an alias target version is required" + return 2 + fi + nvm_echo "${VERSION}" | tee "$(nvm_alias_path)/${ALIAS}" >/dev/null +} + +nvm_list_aliases() { + local ALIAS + ALIAS="${1-}" + + local NVM_CURRENT + NVM_CURRENT="$(nvm_ls_current)" + local NVM_ALIAS_DIR + NVM_ALIAS_DIR="$(nvm_alias_path)" + command mkdir -p "${NVM_ALIAS_DIR}/lts" + + if [ "${ALIAS}" != "${ALIAS#lts/}" ]; then + nvm_alias "${ALIAS}" + return $? + fi + + nvm_is_zsh && unsetopt local_options nomatch + ( + local ALIAS_PATH + for ALIAS_PATH in "${NVM_ALIAS_DIR}/${ALIAS}"*; do + NVM_NO_COLORS="${NVM_NO_COLORS-}" NVM_CURRENT="${NVM_CURRENT}" nvm_print_alias_path "${NVM_ALIAS_DIR}" "${ALIAS_PATH}" & + done + wait + ) | command sort + + ( + local ALIAS_NAME + for ALIAS_NAME in "$(nvm_node_prefix)" "stable" "unstable" "$(nvm_iojs_prefix)"; do + { + # shellcheck disable=SC2030,SC2031 # (https://github.com/koalaman/shellcheck/issues/2217) + if [ ! -f "${NVM_ALIAS_DIR}/${ALIAS_NAME}" ] && { [ -z "${ALIAS}" ] || [ "${ALIAS_NAME}" = "${ALIAS}" ]; }; then + NVM_NO_COLORS="${NVM_NO_COLORS-}" NVM_CURRENT="${NVM_CURRENT}" nvm_print_default_alias "${ALIAS_NAME}" + fi + } & + done + wait + ) | command sort + + ( + local LTS_ALIAS + # shellcheck disable=SC2030,SC2031 # (https://github.com/koalaman/shellcheck/issues/2217) + for ALIAS_PATH in "${NVM_ALIAS_DIR}/lts/${ALIAS}"*; do + { + LTS_ALIAS="$(NVM_NO_COLORS="${NVM_NO_COLORS-}" NVM_LTS=true nvm_print_alias_path "${NVM_ALIAS_DIR}" "${ALIAS_PATH}")" + if [ -n "${LTS_ALIAS}" ]; then + nvm_echo "${LTS_ALIAS}" + fi + } & + done + wait + ) | command sort + return +} + +nvm_alias() { + local ALIAS + ALIAS="${1-}" + if [ -z "${ALIAS}" ]; then + nvm_err 'An alias is required.' + return 1 + fi + ALIAS="$(nvm_normalize_lts "${ALIAS}")" + + if [ -z "${ALIAS}" ]; then + return 2 + fi + + local NVM_ALIAS_PATH + NVM_ALIAS_PATH="$(nvm_alias_path)/${ALIAS}" + if [ ! -f "${NVM_ALIAS_PATH}" ]; then + nvm_err 'Alias does not exist.' + return 2 + fi + + command awk 'NF' "${NVM_ALIAS_PATH}" +} + +nvm_ls_current() { + local NVM_LS_CURRENT_NODE_PATH + if ! NVM_LS_CURRENT_NODE_PATH="$(command which node 2>/dev/null)"; then + nvm_echo 'none' + elif nvm_tree_contains_path "$(nvm_version_dir iojs)" "${NVM_LS_CURRENT_NODE_PATH}"; then + nvm_add_iojs_prefix "$(iojs --version 2>/dev/null)" + elif nvm_tree_contains_path "${NVM_DIR}" "${NVM_LS_CURRENT_NODE_PATH}"; then + local VERSION + VERSION="$(node --version 2>/dev/null)" + if [ "${VERSION}" = "v0.6.21-pre" ]; then + nvm_echo 'v0.6.21' + else + nvm_echo "${VERSION:-none}" + fi + else + nvm_echo 'system' + fi +} + +nvm_resolve_alias() { + if [ -z "${1-}" ]; then + return 1 + fi + + local PATTERN + PATTERN="${1-}" + + local ALIAS + ALIAS="${PATTERN}" + local ALIAS_TEMP + + local SEEN_ALIASES + SEEN_ALIASES="${ALIAS}" + local NVM_ALIAS_INDEX + NVM_ALIAS_INDEX=1 + while true; do + ALIAS_TEMP="$( (nvm_alias "${ALIAS}" 2>/dev/null | command head -n "${NVM_ALIAS_INDEX}" | command tail -n 1) || nvm_echo)" + + if [ -z "${ALIAS_TEMP}" ]; then + break + fi + + if command printf "${SEEN_ALIASES}" | nvm_grep -q -e "^${ALIAS_TEMP}$"; then + ALIAS="∞" + break + fi + + SEEN_ALIASES="${SEEN_ALIASES}\\n${ALIAS_TEMP}" + ALIAS="${ALIAS_TEMP}" + done + + if [ -n "${ALIAS}" ] && [ "_${ALIAS}" != "_${PATTERN}" ]; then + local NVM_IOJS_PREFIX + NVM_IOJS_PREFIX="$(nvm_iojs_prefix)" + local NVM_NODE_PREFIX + NVM_NODE_PREFIX="$(nvm_node_prefix)" + case "${ALIAS}" in + '∞' | \ + "${NVM_IOJS_PREFIX}" | "${NVM_IOJS_PREFIX}-" | \ + "${NVM_NODE_PREFIX}") + nvm_echo "${ALIAS}" + ;; + *) + nvm_ensure_version_prefix "${ALIAS}" + ;; + esac + return 0 + fi + + if nvm_validate_implicit_alias "${PATTERN}" 2>/dev/null; then + local IMPLICIT + IMPLICIT="$(nvm_print_implicit_alias local "${PATTERN}" 2>/dev/null)" + if [ -n "${IMPLICIT}" ]; then + nvm_ensure_version_prefix "${IMPLICIT}" + fi + fi + + return 2 +} + +nvm_resolve_local_alias() { + if [ -z "${1-}" ]; then + return 1 + fi + + local VERSION + local EXIT_CODE + VERSION="$(nvm_resolve_alias "${1-}")" + EXIT_CODE=$? + if [ -z "${VERSION}" ]; then + return $EXIT_CODE + fi + if [ "_${VERSION}" != '_∞' ]; then + nvm_version "${VERSION}" + else + nvm_echo "${VERSION}" + fi +} + +nvm_iojs_prefix() { + nvm_echo 'iojs' +} +nvm_node_prefix() { + nvm_echo 'node' +} + +nvm_is_iojs_version() { + case "${1-}" in iojs-*) return 0 ;; esac + return 1 +} + +nvm_add_iojs_prefix() { + nvm_echo "$(nvm_iojs_prefix)-$(nvm_ensure_version_prefix "$(nvm_strip_iojs_prefix "${1-}")")" +} + +nvm_strip_iojs_prefix() { + local NVM_IOJS_PREFIX + NVM_IOJS_PREFIX="$(nvm_iojs_prefix)" + if [ "${1-}" = "${NVM_IOJS_PREFIX}" ]; then + nvm_echo + else + nvm_echo "${1#"${NVM_IOJS_PREFIX}"-}" + fi +} + +nvm_ls() { + local PATTERN + PATTERN="${1-}" + local VERSIONS + VERSIONS='' + if [ "${PATTERN}" = 'current' ]; then + nvm_ls_current + return + fi + + local NVM_IOJS_PREFIX + NVM_IOJS_PREFIX="$(nvm_iojs_prefix)" + local NVM_NODE_PREFIX + NVM_NODE_PREFIX="$(nvm_node_prefix)" + local NVM_VERSION_DIR_IOJS + NVM_VERSION_DIR_IOJS="$(nvm_version_dir "${NVM_IOJS_PREFIX}")" + local NVM_VERSION_DIR_NEW + NVM_VERSION_DIR_NEW="$(nvm_version_dir new)" + local NVM_VERSION_DIR_OLD + NVM_VERSION_DIR_OLD="$(nvm_version_dir old)" + + case "${PATTERN}" in + "${NVM_IOJS_PREFIX}" | "${NVM_NODE_PREFIX}") + PATTERN="${PATTERN}-" + ;; + *) + if nvm_resolve_local_alias "${PATTERN}"; then + return + fi + PATTERN="$(nvm_ensure_version_prefix "${PATTERN}")" + ;; + esac + if [ "${PATTERN}" = 'N/A' ]; then + return + fi + # If it looks like an explicit version, don't do anything funny + local NVM_PATTERN_STARTS_WITH_V + case $PATTERN in + v*) NVM_PATTERN_STARTS_WITH_V=true ;; + *) NVM_PATTERN_STARTS_WITH_V=false ;; + esac + if [ $NVM_PATTERN_STARTS_WITH_V = true ] && [ "_$(nvm_num_version_groups "${PATTERN}")" = "_3" ]; then + if nvm_is_version_installed "${PATTERN}"; then + VERSIONS="${PATTERN}" + elif nvm_is_version_installed "$(nvm_add_iojs_prefix "${PATTERN}")"; then + VERSIONS="$(nvm_add_iojs_prefix "${PATTERN}")" + fi + else + case "${PATTERN}" in + "${NVM_IOJS_PREFIX}-" | "${NVM_NODE_PREFIX}-" | "system") ;; + *) + local NUM_VERSION_GROUPS + NUM_VERSION_GROUPS="$(nvm_num_version_groups "${PATTERN}")" + if [ "${NUM_VERSION_GROUPS}" = "2" ] || [ "${NUM_VERSION_GROUPS}" = "1" ]; then + PATTERN="${PATTERN%.}." + fi + ;; + esac + + nvm_is_zsh && setopt local_options shwordsplit + nvm_is_zsh && unsetopt local_options markdirs + + local NVM_DIRS_TO_SEARCH1 + NVM_DIRS_TO_SEARCH1='' + local NVM_DIRS_TO_SEARCH2 + NVM_DIRS_TO_SEARCH2='' + local NVM_DIRS_TO_SEARCH3 + NVM_DIRS_TO_SEARCH3='' + local NVM_ADD_SYSTEM + NVM_ADD_SYSTEM=false + if nvm_is_iojs_version "${PATTERN}"; then + NVM_DIRS_TO_SEARCH1="${NVM_VERSION_DIR_IOJS}" + PATTERN="$(nvm_strip_iojs_prefix "${PATTERN}")" + if nvm_has_system_iojs; then + NVM_ADD_SYSTEM=true + fi + elif [ "${PATTERN}" = "${NVM_NODE_PREFIX}-" ]; then + NVM_DIRS_TO_SEARCH1="${NVM_VERSION_DIR_OLD}" + NVM_DIRS_TO_SEARCH2="${NVM_VERSION_DIR_NEW}" + PATTERN='' + if nvm_has_system_node; then + NVM_ADD_SYSTEM=true + fi + else + NVM_DIRS_TO_SEARCH1="${NVM_VERSION_DIR_OLD}" + NVM_DIRS_TO_SEARCH2="${NVM_VERSION_DIR_NEW}" + NVM_DIRS_TO_SEARCH3="${NVM_VERSION_DIR_IOJS}" + if nvm_has_system_iojs || nvm_has_system_node; then + NVM_ADD_SYSTEM=true + fi + fi + + if ! [ -d "${NVM_DIRS_TO_SEARCH1}" ] || ! (command ls -1qA "${NVM_DIRS_TO_SEARCH1}" | nvm_grep -q .); then + NVM_DIRS_TO_SEARCH1='' + fi + if ! [ -d "${NVM_DIRS_TO_SEARCH2}" ] || ! (command ls -1qA "${NVM_DIRS_TO_SEARCH2}" | nvm_grep -q .); then + NVM_DIRS_TO_SEARCH2="${NVM_DIRS_TO_SEARCH1}" + fi + if ! [ -d "${NVM_DIRS_TO_SEARCH3}" ] || ! (command ls -1qA "${NVM_DIRS_TO_SEARCH3}" | nvm_grep -q .); then + NVM_DIRS_TO_SEARCH3="${NVM_DIRS_TO_SEARCH2}" + fi + + local SEARCH_PATTERN + if [ -z "${PATTERN}" ]; then + PATTERN='v' + SEARCH_PATTERN='.*' + else + SEARCH_PATTERN="$(nvm_echo "${PATTERN}" | command sed 's#\.#\\\.#g;')" + fi + if [ -n "${NVM_DIRS_TO_SEARCH1}${NVM_DIRS_TO_SEARCH2}${NVM_DIRS_TO_SEARCH3}" ]; then + VERSIONS="$(command find "${NVM_DIRS_TO_SEARCH1}"/* "${NVM_DIRS_TO_SEARCH2}"/* "${NVM_DIRS_TO_SEARCH3}"/* -name . -o -type d -prune -o -path "${PATTERN}*" \ + | command sed -e " + s#${NVM_VERSION_DIR_IOJS}/#versions/${NVM_IOJS_PREFIX}/#; + s#^${NVM_DIR}/##; + \\#^[^v]# d; + \\#^versions\$# d; + s#^versions/##; + s#^v#${NVM_NODE_PREFIX}/v#; + \\#${SEARCH_PATTERN}# !d; + " \ + -e 's#^\([^/]\{1,\}\)/\(.*\)$#\2.\1#;' \ + | command sort -t. -u -k 1.2,1n -k 2,2n -k 3,3n \ + | command sed -e 's#\(.*\)\.\([^\.]\{1,\}\)$#\2-\1#;' \ + -e "s#^${NVM_NODE_PREFIX}-##;" \ + )" + fi + fi + + if [ "${NVM_ADD_SYSTEM-}" = true ]; then + if [ -z "${PATTERN}" ] || [ "${PATTERN}" = 'v' ]; then + VERSIONS="${VERSIONS} +system" + elif [ "${PATTERN}" = 'system' ]; then + VERSIONS="system" + fi + fi + + if [ -z "${VERSIONS}" ]; then + nvm_echo 'N/A' + return 3 + fi + + nvm_echo "${VERSIONS}" +} + +nvm_ls_remote() { + local PATTERN + PATTERN="${1-}" + if nvm_validate_implicit_alias "${PATTERN}" 2>/dev/null ; then + local IMPLICIT + IMPLICIT="$(nvm_print_implicit_alias remote "${PATTERN}")" + if [ -z "${IMPLICIT-}" ] || [ "${IMPLICIT}" = 'N/A' ]; then + nvm_echo "N/A" + return 3 + fi + PATTERN="$(NVM_LTS="${NVM_LTS-}" nvm_ls_remote "${IMPLICIT}" | command tail -1 | command awk '{ print $1 }')" + elif [ -n "${PATTERN}" ]; then + PATTERN="$(nvm_ensure_version_prefix "${PATTERN}")" + else + PATTERN=".*" + fi + NVM_LTS="${NVM_LTS-}" nvm_ls_remote_index_tab node std "${PATTERN}" +} + +nvm_ls_remote_iojs() { + NVM_LTS="${NVM_LTS-}" nvm_ls_remote_index_tab iojs std "${1-}" +} + +# args flavor, type, version +nvm_ls_remote_index_tab() { + local LTS + LTS="${NVM_LTS-}" + if [ "$#" -lt 3 ]; then + nvm_err 'not enough arguments' + return 5 + fi + + local FLAVOR + FLAVOR="${1-}" + + local TYPE + TYPE="${2-}" + + local MIRROR + MIRROR="$(nvm_get_mirror "${FLAVOR}" "${TYPE}")" + if [ -z "${MIRROR}" ]; then + return 3 + fi + + local PREFIX + PREFIX='' + case "${FLAVOR}-${TYPE}" in + iojs-std) PREFIX="$(nvm_iojs_prefix)-" ;; + node-std) PREFIX='' ;; + iojs-*) + nvm_err 'unknown type of io.js release' + return 4 + ;; + *) + nvm_err 'unknown type of node.js release' + return 4 + ;; + esac + local SORT_COMMAND + SORT_COMMAND='command sort' + case "${FLAVOR}" in + node) SORT_COMMAND='command sort -t. -u -k 1.2,1n -k 2,2n -k 3,3n' ;; + esac + + local PATTERN + PATTERN="${3-}" + + if [ "${PATTERN#"${PATTERN%?}"}" = '.' ]; then + PATTERN="${PATTERN%.}" + fi + + local VERSIONS + if [ -n "${PATTERN}" ] && [ "${PATTERN}" != '*' ]; then + if [ "${FLAVOR}" = 'iojs' ]; then + PATTERN="$(nvm_ensure_version_prefix "$(nvm_strip_iojs_prefix "${PATTERN}")")" + else + PATTERN="$(nvm_ensure_version_prefix "${PATTERN}")" + fi + else + unset PATTERN + fi + + nvm_is_zsh && setopt local_options shwordsplit + local VERSION_LIST + VERSION_LIST="$(nvm_download -L -s "${MIRROR}/index.tab" -o - \ + | command sed " + 1d; + s/^/${PREFIX}/; + " \ + )" + local LTS_ALIAS + local LTS_VERSION + command mkdir -p "$(nvm_alias_path)/lts" + { command awk '{ + if ($10 ~ /^\-?$/) { next } + if ($10 && !a[tolower($10)]++) { + if (alias) { print alias, version } + alias_name = "lts/" tolower($10) + if (!alias) { print "lts/*", alias_name } + alias = alias_name + version = $1 + } + } + END { + if (alias) { + print alias, version + } + }' \ + | while read -r LTS_ALIAS_LINE; do + LTS_ALIAS="${LTS_ALIAS_LINE%% *}" + LTS_VERSION="${LTS_ALIAS_LINE#* }" + nvm_make_alias "${LTS_ALIAS}" "${LTS_VERSION}" >/dev/null 2>&1 + done; } << EOF +$VERSION_LIST +EOF + + if [ -n "${LTS-}" ]; then + LTS="$(nvm_normalize_lts "lts/${LTS}")" + LTS="${LTS#lts/}" + fi + + VERSIONS="$({ command awk -v lts="${LTS-}" '{ + if (!$1) { next } + if (lts && $10 ~ /^\-?$/) { next } + if (lts && lts != "*" && tolower($10) !~ tolower(lts)) { next } + if ($10 !~ /^\-?$/) { + if ($10 && $10 != prev) { + print $1, $10, "*" + } else { + print $1, $10 + } + } else { + print $1 + } + prev=$10; + }' \ + | nvm_grep -w "${PATTERN:-.*}" \ + | $SORT_COMMAND; } << EOF +$VERSION_LIST +EOF +)" + if [ -z "${VERSIONS}" ]; then + nvm_echo 'N/A' + return 3 + fi + nvm_echo "${VERSIONS}" +} + +nvm_get_checksum_binary() { + if nvm_has_non_aliased 'sha256sum'; then + nvm_echo 'sha256sum' + elif nvm_has_non_aliased 'shasum'; then + nvm_echo 'shasum' + elif nvm_has_non_aliased 'sha256'; then + nvm_echo 'sha256' + elif nvm_has_non_aliased 'gsha256sum'; then + nvm_echo 'gsha256sum' + elif nvm_has_non_aliased 'openssl'; then + nvm_echo 'openssl' + elif nvm_has_non_aliased 'bssl'; then + nvm_echo 'bssl' + elif nvm_has_non_aliased 'sha1sum'; then + nvm_echo 'sha1sum' + elif nvm_has_non_aliased 'sha1'; then + nvm_echo 'sha1' + else + nvm_err 'Unaliased sha256sum, shasum, sha256, gsha256sum, openssl, or bssl not found.' + nvm_err 'Unaliased sha1sum or sha1 not found.' + return 1 + fi +} + +nvm_get_checksum_alg() { + local NVM_CHECKSUM_BIN + NVM_CHECKSUM_BIN="$(nvm_get_checksum_binary 2>/dev/null)" + case "${NVM_CHECKSUM_BIN-}" in + sha256sum | shasum | sha256 | gsha256sum | openssl | bssl) + nvm_echo 'sha-256' + ;; + sha1sum | sha1) + nvm_echo 'sha-1' + ;; + *) + nvm_get_checksum_binary + return $? + ;; + esac +} + +nvm_compute_checksum() { + local FILE + FILE="${1-}" + if [ -z "${FILE}" ]; then + nvm_err 'Provided file to checksum is empty.' + return 2 + elif ! [ -f "${FILE}" ]; then + nvm_err 'Provided file to checksum does not exist.' + return 1 + fi + + if nvm_has_non_aliased "sha256sum"; then + nvm_err 'Computing checksum with sha256sum' + command sha256sum "${FILE}" | command awk '{print $1}' + elif nvm_has_non_aliased "shasum"; then + nvm_err 'Computing checksum with shasum -a 256' + command shasum -a 256 "${FILE}" | command awk '{print $1}' + elif nvm_has_non_aliased "sha256"; then + nvm_err 'Computing checksum with sha256 -q' + command sha256 -q "${FILE}" | command awk '{print $1}' + elif nvm_has_non_aliased "gsha256sum"; then + nvm_err 'Computing checksum with gsha256sum' + command gsha256sum "${FILE}" | command awk '{print $1}' + elif nvm_has_non_aliased "openssl"; then + nvm_err 'Computing checksum with openssl dgst -sha256' + command openssl dgst -sha256 "${FILE}" | command awk '{print $NF}' + elif nvm_has_non_aliased "bssl"; then + nvm_err 'Computing checksum with bssl sha256sum' + command bssl sha256sum "${FILE}" | command awk '{print $1}' + elif nvm_has_non_aliased "sha1sum"; then + nvm_err 'Computing checksum with sha1sum' + command sha1sum "${FILE}" | command awk '{print $1}' + elif nvm_has_non_aliased "sha1"; then + nvm_err 'Computing checksum with sha1 -q' + command sha1 -q "${FILE}" + fi +} + +nvm_compare_checksum() { + local FILE + FILE="${1-}" + if [ -z "${FILE}" ]; then + nvm_err 'Provided file to checksum is empty.' + return 4 + elif ! [ -f "${FILE}" ]; then + nvm_err 'Provided file to checksum does not exist.' + return 3 + fi + + local COMPUTED_SUM + COMPUTED_SUM="$(nvm_compute_checksum "${FILE}")" + + local CHECKSUM + CHECKSUM="${2-}" + if [ -z "${CHECKSUM}" ]; then + nvm_err 'Provided checksum to compare to is empty.' + return 2 + fi + + if [ -z "${COMPUTED_SUM}" ]; then + nvm_err "Computed checksum of '${FILE}' is empty." # missing in raspberry pi binary + nvm_err 'WARNING: Continuing *without checksum verification*' + return + elif [ "${COMPUTED_SUM}" != "${CHECKSUM}" ] && [ "${COMPUTED_SUM}" != "\\${CHECKSUM}" ]; then + nvm_err "Checksums do not match: '${COMPUTED_SUM}' found, '${CHECKSUM}' expected." + return 1 + fi + nvm_err 'Checksums matched!' +} + +# args: flavor, type, version, slug, compression +nvm_get_checksum() { + local FLAVOR + case "${1-}" in + node | iojs) FLAVOR="${1}" ;; + *) + nvm_err 'supported flavors: node, iojs' + return 2 + ;; + esac + + local MIRROR + MIRROR="$(nvm_get_mirror "${FLAVOR}" "${2-}")" + if [ -z "${MIRROR}" ]; then + return 1 + fi + + local SHASUMS_URL + if [ "$(nvm_get_checksum_alg)" = 'sha-256' ]; then + SHASUMS_URL="${MIRROR}/${3}/SHASUMS256.txt" + else + SHASUMS_URL="${MIRROR}/${3}/SHASUMS.txt" + fi + + nvm_download -L -s "${SHASUMS_URL}" -o - | command awk "{ if (\"${4}.${5}\" == \$2) print \$1}" +} + +nvm_print_versions() { + local NVM_CURRENT + NVM_CURRENT=$(nvm_ls_current) + + local INSTALLED_COLOR + local SYSTEM_COLOR + local CURRENT_COLOR + local NOT_INSTALLED_COLOR + local DEFAULT_COLOR + local LTS_COLOR + local NVM_HAS_COLORS + NVM_HAS_COLORS=0 + + INSTALLED_COLOR=$(nvm_get_colors 1) + SYSTEM_COLOR=$(nvm_get_colors 2) + CURRENT_COLOR=$(nvm_get_colors 3) + NOT_INSTALLED_COLOR=$(nvm_get_colors 4) + DEFAULT_COLOR=$(nvm_get_colors 5) + LTS_COLOR=$(nvm_get_colors 6) + + if [ -z "${NVM_NO_COLORS-}" ] && nvm_has_colors; then + NVM_HAS_COLORS=1 + fi + + command awk \ + -v remote_versions="$(printf '%s' "${1-}" | tr '\n' '|')" \ + -v installed_versions="$(nvm_ls | tr '\n' '|')" -v current="$NVM_CURRENT" \ + -v installed_color="$INSTALLED_COLOR" -v system_color="$SYSTEM_COLOR" \ + -v current_color="$CURRENT_COLOR" -v default_color="$DEFAULT_COLOR" \ + -v old_lts_color="$DEFAULT_COLOR" -v has_colors="$NVM_HAS_COLORS" ' +function alen(arr, i, len) { len=0; for(i in arr) len++; return len; } +BEGIN { + fmt_installed = has_colors ? (installed_color ? "\033[" installed_color "%15s\033[0m" : "%15s") : "%15s *"; + fmt_system = has_colors ? (system_color ? "\033[" system_color "%15s\033[0m" : "%15s") : "%15s *"; + fmt_current = has_colors ? (current_color ? "\033[" current_color "->%13s\033[0m" : "%15s") : "->%13s *"; + + latest_lts_color = current_color; + sub(/0;/, "1;", latest_lts_color); + + fmt_latest_lts = has_colors && latest_lts_color ? ("\033[" latest_lts_color " (Latest LTS: %s)\033[0m") : " (Latest LTS: %s)"; + fmt_old_lts = has_colors && old_lts_color ? ("\033[" old_lts_color " (LTS: %s)\033[0m") : " (LTS: %s)"; + + split(remote_versions, lines, "|"); + split(installed_versions, installed, "|"); + rows = alen(lines); + + for (n = 1; n <= rows; n++) { + split(lines[n], fields, "[[:blank:]]+"); + cols = alen(fields); + version = fields[1]; + is_installed = 0; + + for (i in installed) { + if (version == installed[i]) { + is_installed = 1; + break; + } + } + + fmt_version = "%15s"; + if (version == current) { + fmt_version = fmt_current; + } else if (version == "system") { + fmt_version = fmt_system; + } else if (is_installed) { + fmt_version = fmt_installed; + } + + padding = (!has_colors && is_installed) ? "" : " "; + + if (cols == 1) { + formatted = sprintf(fmt_version, version); + } else if (cols == 2) { + formatted = sprintf((fmt_version padding fmt_old_lts), version, fields[2]); + } else if (cols == 3 && fields[3] == "*") { + formatted = sprintf((fmt_version padding fmt_latest_lts), version, fields[2]); + } + + output[n] = formatted; + } + + for (n = 1; n <= rows; n++) { + print output[n] + } + + exit +}' +} + +nvm_validate_implicit_alias() { + local NVM_IOJS_PREFIX + NVM_IOJS_PREFIX="$(nvm_iojs_prefix)" + local NVM_NODE_PREFIX + NVM_NODE_PREFIX="$(nvm_node_prefix)" + + case "$1" in + "stable" | "unstable" | "${NVM_IOJS_PREFIX}" | "${NVM_NODE_PREFIX}") + return + ;; + *) + nvm_err "Only implicit aliases 'stable', 'unstable', '${NVM_IOJS_PREFIX}', and '${NVM_NODE_PREFIX}' are supported." + return 1 + ;; + esac +} + +nvm_print_implicit_alias() { + if [ "_$1" != "_local" ] && [ "_$1" != "_remote" ]; then + nvm_err "nvm_print_implicit_alias must be specified with local or remote as the first argument." + return 1 + fi + + local NVM_IMPLICIT + NVM_IMPLICIT="$2" + if ! nvm_validate_implicit_alias "${NVM_IMPLICIT}"; then + return 2 + fi + + local NVM_IOJS_PREFIX + NVM_IOJS_PREFIX="$(nvm_iojs_prefix)" + local NVM_NODE_PREFIX + NVM_NODE_PREFIX="$(nvm_node_prefix)" + local NVM_COMMAND + local NVM_ADD_PREFIX_COMMAND + local LAST_TWO + case "${NVM_IMPLICIT}" in + "${NVM_IOJS_PREFIX}") + NVM_COMMAND="nvm_ls_remote_iojs" + NVM_ADD_PREFIX_COMMAND="nvm_add_iojs_prefix" + if [ "_$1" = "_local" ]; then + NVM_COMMAND="nvm_ls ${NVM_IMPLICIT}" + fi + + nvm_is_zsh && setopt local_options shwordsplit + + local NVM_IOJS_VERSION + local EXIT_CODE + NVM_IOJS_VERSION="$(${NVM_COMMAND})" &&: + EXIT_CODE="$?" + if [ "_${EXIT_CODE}" = "_0" ]; then + NVM_IOJS_VERSION="$(nvm_echo "${NVM_IOJS_VERSION}" | command sed "s/^${NVM_IMPLICIT}-//" | nvm_grep -e '^v' | command cut -c2- | command cut -d . -f 1,2 | uniq | command tail -1)" + fi + + if [ "_$NVM_IOJS_VERSION" = "_N/A" ]; then + nvm_echo 'N/A' + else + ${NVM_ADD_PREFIX_COMMAND} "${NVM_IOJS_VERSION}" + fi + return $EXIT_CODE + ;; + "${NVM_NODE_PREFIX}") + nvm_echo 'stable' + return + ;; + *) + NVM_COMMAND="nvm_ls_remote" + if [ "_$1" = "_local" ]; then + NVM_COMMAND="nvm_ls node" + fi + + nvm_is_zsh && setopt local_options shwordsplit + + LAST_TWO=$($NVM_COMMAND | nvm_grep -e '^v' | command cut -c2- | command cut -d . -f 1,2 | uniq) + ;; + esac + local MINOR + local STABLE + local UNSTABLE + local MOD + local NORMALIZED_VERSION + + nvm_is_zsh && setopt local_options shwordsplit + for MINOR in $LAST_TWO; do + NORMALIZED_VERSION="$(nvm_normalize_version "$MINOR")" + if [ "_0${NORMALIZED_VERSION#?}" != "_$NORMALIZED_VERSION" ]; then + STABLE="$MINOR" + else + MOD="$(awk 'BEGIN { print int(ARGV[1] / 1000000) % 2 ; exit(0) }' "${NORMALIZED_VERSION}")" + if [ "${MOD}" -eq 0 ]; then + STABLE="${MINOR}" + elif [ "${MOD}" -eq 1 ]; then + UNSTABLE="${MINOR}" + fi + fi + done + + if [ "_$2" = '_stable' ]; then + nvm_echo "${STABLE}" + elif [ "_$2" = '_unstable' ]; then + nvm_echo "${UNSTABLE:-"N/A"}" + fi +} + +nvm_get_os() { + local NVM_UNAME + NVM_UNAME="$(command uname -a)" + local NVM_OS + case "${NVM_UNAME}" in + Linux\ *) NVM_OS=linux ;; + Darwin\ *) NVM_OS=darwin ;; + SunOS\ *) NVM_OS=sunos ;; + FreeBSD\ *) NVM_OS=freebsd ;; + OpenBSD\ *) NVM_OS=openbsd ;; + AIX\ *) NVM_OS=aix ;; + CYGWIN* | MSYS* | MINGW*) NVM_OS=win ;; + esac + nvm_echo "${NVM_OS-}" +} + +nvm_get_arch() { + local HOST_ARCH + local NVM_OS + local EXIT_CODE + local LONG_BIT + + NVM_OS="$(nvm_get_os)" + # If the OS is SunOS, first try to use pkgsrc to guess + # the most appropriate arch. If it's not available, use + # isainfo to get the instruction set supported by the + # kernel. + if [ "_${NVM_OS}" = "_sunos" ]; then + if HOST_ARCH=$(pkg_info -Q MACHINE_ARCH pkg_install); then + HOST_ARCH=$(nvm_echo "${HOST_ARCH}" | command tail -1) + else + HOST_ARCH=$(isainfo -n) + fi + elif [ "_${NVM_OS}" = "_aix" ]; then + HOST_ARCH=ppc64 + else + HOST_ARCH="$(command uname -m)" + LONG_BIT="$(getconf LONG_BIT 2>/dev/null)" + fi + + local NVM_ARCH + case "${HOST_ARCH}" in + x86_64 | amd64) NVM_ARCH="x64" ;; + i*86) NVM_ARCH="x86" ;; + aarch64 | armv8l) NVM_ARCH="arm64" ;; + *) NVM_ARCH="${HOST_ARCH}" ;; + esac + + # If running inside a 32Bit docker container the kernel still is 64bit + # change ARCH to 32bit if LONG_BIT is 32 + if [ "_${LONG_BIT}" = "_32" ] && [ "${NVM_ARCH}" = "x64" ]; then + NVM_ARCH="x86" + fi + + # If running a 64bit ARM kernel but a 32bit ARM userland, + # change ARCH to 32bit ARM (armv7l) if /sbin/init is 32bit executable + if [ "$(uname)" = "Linux" ] \ + && [ "${NVM_ARCH}" = arm64 ] \ + && [ "$(command od -An -t x1 -j 4 -N 1 "/sbin/init" 2>/dev/null)" = ' 01' ]\ + ; then + NVM_ARCH=armv7l + HOST_ARCH=armv7l + fi + + if [ -f "/etc/alpine-release" ]; then + NVM_ARCH=x64-musl + fi + + nvm_echo "${NVM_ARCH}" +} + +nvm_get_minor_version() { + local VERSION + VERSION="$1" + + if [ -z "${VERSION}" ]; then + nvm_err 'a version is required' + return 1 + fi + + case "${VERSION}" in + v | .* | *..* | v*[!.0123456789]* | [!v]*[!.0123456789]* | [!v0123456789]* | v[!0123456789]*) + nvm_err 'invalid version number' + return 2 + ;; + esac + + local PREFIXED_VERSION + PREFIXED_VERSION="$(nvm_format_version "${VERSION}")" + + local MINOR + MINOR="$(nvm_echo "${PREFIXED_VERSION}" | nvm_grep -e '^v' | command cut -c2- | command cut -d . -f 1,2)" + if [ -z "${MINOR}" ]; then + nvm_err 'invalid version number! (please report this)' + return 3 + fi + nvm_echo "${MINOR}" +} + +nvm_ensure_default_set() { + local VERSION + VERSION="$1" + if [ -z "${VERSION}" ]; then + nvm_err 'nvm_ensure_default_set: a version is required' + return 1 + elif nvm_alias default >/dev/null 2>&1; then + # default already set + return 0 + fi + local OUTPUT + OUTPUT="$(nvm alias default "${VERSION}")" + local EXIT_CODE + EXIT_CODE="$?" + nvm_echo "Creating default alias: ${OUTPUT}" + return $EXIT_CODE +} + +nvm_is_merged_node_version() { + nvm_version_greater_than_or_equal_to "$1" v4.0.0 +} + +nvm_get_mirror() { + local NVM_MIRROR + NVM_MIRROR='' + case "${1}-${2}" in + node-std) NVM_MIRROR="${NVM_NODEJS_ORG_MIRROR:-https://nodejs.org/dist}" ;; + iojs-std) NVM_MIRROR="${NVM_IOJS_ORG_MIRROR:-https://iojs.org/dist}" ;; + *) + nvm_err 'unknown type of node.js or io.js release' + return 1 + ;; + esac + + case "${NVM_MIRROR}" in + *\`* | *\\* | *\'* | *\(* | *' '* ) + nvm_err '$NVM_NODEJS_ORG_MIRROR and $NVM_IOJS_ORG_MIRROR may only contain a URL' + return 2 + ;; + esac + + + if ! nvm_echo "${NVM_MIRROR}" | command awk '{ $0 ~ "^https?://[a-zA-Z0-9./_-]+$" }'; then + nvm_err '$NVM_NODEJS_ORG_MIRROR and $NVM_IOJS_ORG_MIRROR may only contain a URL' + return 2 + fi + + nvm_echo "${NVM_MIRROR}" +} + +# args: os, prefixed version, version, tarball, extract directory +nvm_install_binary_extract() { + if [ "$#" -ne 5 ]; then + nvm_err 'nvm_install_binary_extract needs 5 parameters' + return 1 + fi + + local NVM_OS + local PREFIXED_VERSION + local VERSION + local TARBALL + local TMPDIR + NVM_OS="${1}" + PREFIXED_VERSION="${2}" + VERSION="${3}" + TARBALL="${4}" + TMPDIR="${5}" + + local VERSION_PATH + + [ -n "${TMPDIR-}" ] && \ + command mkdir -p "${TMPDIR}" && \ + VERSION_PATH="$(nvm_version_path "${PREFIXED_VERSION}")" || return 1 + + # For Windows system (GitBash with MSYS, Cygwin) + if [ "${NVM_OS}" = 'win' ]; then + VERSION_PATH="${VERSION_PATH}/bin" + command unzip -q "${TARBALL}" -d "${TMPDIR}" || return 1 + # For non Windows system (including WSL running on Windows) + else + nvm_extract_tarball "${NVM_OS}" "${VERSION}" "${TARBALL}" "${TMPDIR}" + fi + + command mkdir -p "${VERSION_PATH}" || return 1 + + if [ "${NVM_OS}" = 'win' ]; then + command mv "${TMPDIR}/"*/* "${VERSION_PATH}" || return 1 + command chmod +x "${VERSION_PATH}"/node.exe || return 1 + command chmod +x "${VERSION_PATH}"/npm || return 1 + command chmod +x "${VERSION_PATH}"/npx 2>/dev/null + else + command mv "${TMPDIR}/"* "${VERSION_PATH}" || return 1 + fi + + command rm -rf "${TMPDIR}" + + return 0 +} + +# args: flavor, type, version, reinstall +nvm_install_binary() { + local FLAVOR + case "${1-}" in + node | iojs) FLAVOR="${1}" ;; + *) + nvm_err 'supported flavors: node, iojs' + return 4 + ;; + esac + + local TYPE + TYPE="${2-}" + + local PREFIXED_VERSION + PREFIXED_VERSION="${3-}" + if [ -z "${PREFIXED_VERSION}" ]; then + nvm_err 'A version number is required.' + return 3 + fi + + local nosource + nosource="${4-}" + + local VERSION + VERSION="$(nvm_strip_iojs_prefix "${PREFIXED_VERSION}")" + + local NVM_OS + NVM_OS="$(nvm_get_os)" + + if [ -z "${NVM_OS}" ]; then + return 2 + fi + + local TARBALL + local TMPDIR + + local PROGRESS_BAR + local NODE_OR_IOJS + if [ "${FLAVOR}" = 'node' ]; then + NODE_OR_IOJS="${FLAVOR}" + elif [ "${FLAVOR}" = 'iojs' ]; then + NODE_OR_IOJS="io.js" + fi + if [ "${NVM_NO_PROGRESS-}" = "1" ]; then + # --silent, --show-error, use short option as @samrocketman mentions the compatibility issue. + PROGRESS_BAR="-sS" + else + PROGRESS_BAR="--progress-bar" + fi + nvm_echo "Downloading and installing ${NODE_OR_IOJS-} ${VERSION}..." + TARBALL="$(PROGRESS_BAR="${PROGRESS_BAR}" nvm_download_artifact "${FLAVOR}" binary "${TYPE-}" "${VERSION}" | command tail -1)" + if [ -f "${TARBALL}" ]; then + TMPDIR="$(dirname "${TARBALL}")/files" + fi + + if nvm_install_binary_extract "${NVM_OS}" "${PREFIXED_VERSION}" "${VERSION}" "${TARBALL}" "${TMPDIR}"; then + if [ -n "${ALIAS-}" ]; then + nvm alias "${ALIAS}" "${provided_version}" + fi + return 0 + fi + + + # Read nosource from arguments + if [ "${nosource-}" = '1' ]; then + nvm_err 'Binary download failed. Download from source aborted.' + return 0 + fi + + nvm_err 'Binary download failed, trying source.' + if [ -n "${TMPDIR-}" ]; then + command rm -rf "${TMPDIR}" + fi + return 1 +} + +# args: flavor, kind, version +nvm_get_download_slug() { + local FLAVOR + case "${1-}" in + node | iojs) FLAVOR="${1}" ;; + *) + nvm_err 'supported flavors: node, iojs' + return 1 + ;; + esac + + local KIND + case "${2-}" in + binary | source) KIND="${2}" ;; + *) + nvm_err 'supported kinds: binary, source' + return 2 + ;; + esac + + local VERSION + VERSION="${3-}" + + local NVM_OS + NVM_OS="$(nvm_get_os)" + + local NVM_ARCH + NVM_ARCH="$(nvm_get_arch)" + if ! nvm_is_merged_node_version "${VERSION}"; then + if [ "${NVM_ARCH}" = 'armv6l' ] || [ "${NVM_ARCH}" = 'armv7l' ]; then + NVM_ARCH="arm-pi" + fi + fi + + # If running MAC M1 :: Node v14.17.0 was the first version to offer official experimental support: + # https://github.com/nodejs/node/issues/40126 (although binary distributions aren't available until v16) + if \ + nvm_version_greater '14.17.0' "${VERSION}" \ + || (nvm_version_greater_than_or_equal_to "${VERSION}" '15.0.0' && nvm_version_greater '16.0.0' "${VERSION}") \ + ; then + if [ "_${NVM_OS}" = '_darwin' ] && [ "${NVM_ARCH}" = 'arm64' ]; then + NVM_ARCH=x64 + fi + fi + + if [ "${KIND}" = 'binary' ]; then + nvm_echo "${FLAVOR}-${VERSION}-${NVM_OS}-${NVM_ARCH}" + elif [ "${KIND}" = 'source' ]; then + nvm_echo "${FLAVOR}-${VERSION}" + fi +} + +nvm_get_artifact_compression() { + local VERSION + VERSION="${1-}" + + local NVM_OS + NVM_OS="$(nvm_get_os)" + + local COMPRESSION + COMPRESSION='tar.gz' + if [ "_${NVM_OS}" = '_win' ]; then + COMPRESSION='zip' + elif nvm_supports_xz "${VERSION}"; then + COMPRESSION='tar.xz' + fi + + nvm_echo "${COMPRESSION}" +} + +# args: flavor, kind, type, version +nvm_download_artifact() { + local FLAVOR + case "${1-}" in + node | iojs) FLAVOR="${1}" ;; + *) + nvm_err 'supported flavors: node, iojs' + return 1 + ;; + esac + + local KIND + case "${2-}" in + binary | source) KIND="${2}" ;; + *) + nvm_err 'supported kinds: binary, source' + return 1 + ;; + esac + + local TYPE + TYPE="${3-}" + + local MIRROR + MIRROR="$(nvm_get_mirror "${FLAVOR}" "${TYPE}")" + if [ -z "${MIRROR}" ]; then + return 2 + fi + + local VERSION + VERSION="${4}" + + if [ -z "${VERSION}" ]; then + nvm_err 'A version number is required.' + return 3 + fi + + if [ "${KIND}" = 'binary' ] && ! nvm_binary_available "${VERSION}"; then + nvm_err "No precompiled binary available for ${VERSION}." + return + fi + + local SLUG + SLUG="$(nvm_get_download_slug "${FLAVOR}" "${KIND}" "${VERSION}")" + + local COMPRESSION + COMPRESSION="$(nvm_get_artifact_compression "${VERSION}")" + + local CHECKSUM + CHECKSUM="$(nvm_get_checksum "${FLAVOR}" "${TYPE}" "${VERSION}" "${SLUG}" "${COMPRESSION}")" + + local tmpdir + if [ "${KIND}" = 'binary' ]; then + tmpdir="$(nvm_cache_dir)/bin/${SLUG}" + else + tmpdir="$(nvm_cache_dir)/src/${SLUG}" + fi + command mkdir -p "${tmpdir}/files" || ( + nvm_err "creating directory ${tmpdir}/files failed" + return 3 + ) + + local TARBALL + TARBALL="${tmpdir}/${SLUG}.${COMPRESSION}" + local TARBALL_URL + if nvm_version_greater_than_or_equal_to "${VERSION}" 0.1.14; then + TARBALL_URL="${MIRROR}/${VERSION}/${SLUG}.${COMPRESSION}" + else + # node <= 0.1.13 does not have a directory + TARBALL_URL="${MIRROR}/${SLUG}.${COMPRESSION}" + fi + + if [ -r "${TARBALL}" ]; then + nvm_err "Local cache found: $(nvm_sanitize_path "${TARBALL}")" + if nvm_compare_checksum "${TARBALL}" "${CHECKSUM}" >/dev/null 2>&1; then + nvm_err "Checksums match! Using existing downloaded archive $(nvm_sanitize_path "${TARBALL}")" + nvm_echo "${TARBALL}" + return 0 + fi + nvm_compare_checksum "${TARBALL}" "${CHECKSUM}" + nvm_err "Checksum check failed!" + nvm_err "Removing the broken local cache..." + command rm -rf "${TARBALL}" + fi + nvm_err "Downloading ${TARBALL_URL}..." + nvm_download -L -C - "${PROGRESS_BAR}" "${TARBALL_URL}" -o "${TARBALL}" || ( + command rm -rf "${TARBALL}" "${tmpdir}" + nvm_err "Binary download from ${TARBALL_URL} failed, trying source." + return 4 + ) + + if nvm_grep '404 Not Found' "${TARBALL}" >/dev/null; then + command rm -rf "${TARBALL}" "${tmpdir}" + nvm_err "HTTP 404 at URL ${TARBALL_URL}" + return 5 + fi + + nvm_compare_checksum "${TARBALL}" "${CHECKSUM}" || ( + command rm -rf "${tmpdir}/files" + return 6 + ) + + nvm_echo "${TARBALL}" +} + +# args: nvm_os, version, tarball, tmpdir +nvm_extract_tarball() { + if [ "$#" -ne 4 ]; then + nvm_err 'nvm_extract_tarball requires exactly 4 arguments' + return 5 + fi + + local NVM_OS + NVM_OS="${1-}" + + local VERSION + VERSION="${2-}" + + local TARBALL + TARBALL="${3-}" + + local TMPDIR + TMPDIR="${4-}" + + local tar_compression_flag + tar_compression_flag='z' + if nvm_supports_xz "${VERSION}"; then + tar_compression_flag='J' + fi + + local tar + tar='tar' + if [ "${NVM_OS}" = 'aix' ]; then + tar='gtar' + fi + + if [ "${NVM_OS}" = 'openbsd' ]; then + if [ "${tar_compression_flag}" = 'J' ]; then + command xzcat "${TARBALL}" | "${tar}" -xf - -C "${TMPDIR}" -s '/[^\/]*\///' || return 1 + else + command "${tar}" -x${tar_compression_flag}f "${TARBALL}" -C "${TMPDIR}" -s '/[^\/]*\///' || return 1 + fi + else + command "${tar}" -x${tar_compression_flag}f "${TARBALL}" -C "${TMPDIR}" --strip-components 1 || return 1 + fi +} + +nvm_get_make_jobs() { + if nvm_is_natural_num "${1-}"; then + NVM_MAKE_JOBS="$1" + nvm_echo "number of \`make\` jobs: ${NVM_MAKE_JOBS}" + return + elif [ -n "${1-}" ]; then + unset NVM_MAKE_JOBS + nvm_err "$1 is invalid for number of \`make\` jobs, must be a natural number" + fi + local NVM_OS + NVM_OS="$(nvm_get_os)" + local NVM_CPU_CORES + case "_${NVM_OS}" in + "_linux") + NVM_CPU_CORES="$(nvm_grep -c -E '^processor.+: [0-9]+' /proc/cpuinfo)" + ;; + "_freebsd" | "_darwin" | "_openbsd") + NVM_CPU_CORES="$(sysctl -n hw.ncpu)" + ;; + "_sunos") + NVM_CPU_CORES="$(psrinfo | wc -l)" + ;; + "_aix") + NVM_CPU_CORES="$(pmcycles -m | wc -l)" + ;; + esac + if ! nvm_is_natural_num "${NVM_CPU_CORES}"; then + nvm_err 'Can not determine how many core(s) are available, running in single-threaded mode.' + nvm_err 'Please report an issue on GitHub to help us make nvm run faster on your computer!' + NVM_MAKE_JOBS=1 + else + nvm_echo "Detected that you have ${NVM_CPU_CORES} CPU core(s)" + if [ "${NVM_CPU_CORES}" -gt 2 ]; then + NVM_MAKE_JOBS=$((NVM_CPU_CORES - 1)) + nvm_echo "Running with ${NVM_MAKE_JOBS} threads to speed up the build" + else + NVM_MAKE_JOBS=1 + nvm_echo 'Number of CPU core(s) less than or equal to 2, running in single-threaded mode' + fi + fi +} + +# args: flavor, type, version, make jobs, additional +nvm_install_source() { + local FLAVOR + case "${1-}" in + node | iojs) FLAVOR="${1}" ;; + *) + nvm_err 'supported flavors: node, iojs' + return 4 + ;; + esac + + local TYPE + TYPE="${2-}" + + local PREFIXED_VERSION + PREFIXED_VERSION="${3-}" + if [ -z "${PREFIXED_VERSION}" ]; then + nvm_err 'A version number is required.' + return 3 + fi + + local VERSION + VERSION="$(nvm_strip_iojs_prefix "${PREFIXED_VERSION}")" + + local NVM_MAKE_JOBS + NVM_MAKE_JOBS="${4-}" + + local ADDITIONAL_PARAMETERS + ADDITIONAL_PARAMETERS="${5-}" + + local NVM_ARCH + NVM_ARCH="$(nvm_get_arch)" + if [ "${NVM_ARCH}" = 'armv6l' ] || [ "${NVM_ARCH}" = 'armv7l' ]; then + if [ -n "${ADDITIONAL_PARAMETERS}" ]; then + ADDITIONAL_PARAMETERS="--without-snapshot ${ADDITIONAL_PARAMETERS}" + else + ADDITIONAL_PARAMETERS='--without-snapshot' + fi + fi + + if [ -n "${ADDITIONAL_PARAMETERS}" ]; then + nvm_echo "Additional options while compiling: ${ADDITIONAL_PARAMETERS}" + fi + + local NVM_OS + NVM_OS="$(nvm_get_os)" + + local make + make='make' + local MAKE_CXX + case "${NVM_OS}" in + 'freebsd' | 'openbsd') + make='gmake' + MAKE_CXX="CC=${CC:-cc} CXX=${CXX:-c++}" + ;; + 'darwin') + MAKE_CXX="CC=${CC:-cc} CXX=${CXX:-c++}" + ;; + 'aix') + make='gmake' + ;; + esac + if nvm_has "clang++" && nvm_has "clang" && nvm_version_greater_than_or_equal_to "$(nvm_clang_version)" 3.5; then + if [ -z "${CC-}" ] || [ -z "${CXX-}" ]; then + nvm_echo "Clang v3.5+ detected! CC or CXX not specified, will use Clang as C/C++ compiler!" + MAKE_CXX="CC=${CC:-cc} CXX=${CXX:-c++}" + fi + fi + + local TARBALL + local TMPDIR + local VERSION_PATH + + if [ "${NVM_NO_PROGRESS-}" = "1" ]; then + # --silent, --show-error, use short option as @samrocketman mentions the compatibility issue. + PROGRESS_BAR="-sS" + else + PROGRESS_BAR="--progress-bar" + fi + + nvm_is_zsh && setopt local_options shwordsplit + + TARBALL="$(PROGRESS_BAR="${PROGRESS_BAR}" nvm_download_artifact "${FLAVOR}" source "${TYPE}" "${VERSION}" | command tail -1)" && \ + [ -f "${TARBALL}" ] && \ + TMPDIR="$(dirname "${TARBALL}")/files" && \ + if ! ( + # shellcheck disable=SC2086 + command mkdir -p "${TMPDIR}" && \ + nvm_extract_tarball "${NVM_OS}" "${VERSION}" "${TARBALL}" "${TMPDIR}" && \ + VERSION_PATH="$(nvm_version_path "${PREFIXED_VERSION}")" && \ + nvm_cd "${TMPDIR}" && \ + nvm_echo '$>'./configure --prefix="${VERSION_PATH}" $ADDITIONAL_PARAMETERS'<' && \ + ./configure --prefix="${VERSION_PATH}" $ADDITIONAL_PARAMETERS && \ + $make -j "${NVM_MAKE_JOBS}" ${MAKE_CXX-} && \ + command rm -f "${VERSION_PATH}" 2>/dev/null && \ + $make -j "${NVM_MAKE_JOBS}" ${MAKE_CXX-} install + ); then + nvm_err "nvm: install ${VERSION} failed!" + command rm -rf "${TMPDIR-}" + return 1 + fi +} + +nvm_use_if_needed() { + if [ "_${1-}" = "_$(nvm_ls_current)" ]; then + return + fi + nvm use "$@" +} + +nvm_install_npm_if_needed() { + local VERSION + VERSION="$(nvm_ls_current)" + if ! nvm_has "npm"; then + nvm_echo 'Installing npm...' + if nvm_version_greater 0.2.0 "${VERSION}"; then + nvm_err 'npm requires node v0.2.3 or higher' + elif nvm_version_greater_than_or_equal_to "${VERSION}" 0.2.0; then + if nvm_version_greater 0.2.3 "${VERSION}"; then + nvm_err 'npm requires node v0.2.3 or higher' + else + nvm_download -L https://npmjs.org/install.sh -o - | clean=yes npm_install=0.2.19 sh + fi + else + nvm_download -L https://npmjs.org/install.sh -o - | clean=yes sh + fi + fi + return $? +} + +nvm_match_version() { + local NVM_IOJS_PREFIX + NVM_IOJS_PREFIX="$(nvm_iojs_prefix)" + local PROVIDED_VERSION + PROVIDED_VERSION="$1" + case "_${PROVIDED_VERSION}" in + "_${NVM_IOJS_PREFIX}" | '_io.js') + nvm_version "${NVM_IOJS_PREFIX}" + ;; + '_system') + nvm_echo 'system' + ;; + *) + nvm_version "${PROVIDED_VERSION}" + ;; + esac +} + +nvm_npm_global_modules() { + local NPMLIST + local VERSION + VERSION="$1" + NPMLIST=$(nvm use "${VERSION}" >/dev/null && npm list -g --depth=0 2>/dev/null | command sed 1,1d | nvm_grep -v 'UNMET PEER DEPENDENCY') + + local INSTALLS + INSTALLS=$(nvm_echo "${NPMLIST}" | command sed -e '/ -> / d' -e '/\(empty\)/ d' -e 's/^.* \(.*@[^ ]*\).*/\1/' -e '/^npm@[^ ]*.*$/ d' | command xargs) + + local LINKS + LINKS="$(nvm_echo "${NPMLIST}" | command sed -n 's/.* -> \(.*\)/\1/ p')" + + nvm_echo "${INSTALLS} //// ${LINKS}" +} + +nvm_npmrc_bad_news_bears() { + local NVM_NPMRC + NVM_NPMRC="${1-}" + if [ -n "${NVM_NPMRC}" ] && [ -f "${NVM_NPMRC}" ] && nvm_grep -Ee '^(prefix|globalconfig) *=' <"${NVM_NPMRC}" >/dev/null; then + return 0 + fi + return 1 +} + +nvm_die_on_prefix() { + local NVM_DELETE_PREFIX + NVM_DELETE_PREFIX="${1-}" + case "${NVM_DELETE_PREFIX}" in + 0 | 1) ;; + *) + nvm_err 'First argument "delete the prefix" must be zero or one' + return 1 + ;; + esac + local NVM_COMMAND + NVM_COMMAND="${2-}" + local NVM_VERSION_DIR + NVM_VERSION_DIR="${3-}" + if [ -z "${NVM_COMMAND}" ] || [ -z "${NVM_VERSION_DIR}" ]; then + nvm_err 'Second argument "nvm command", and third argument "nvm version dir", must both be nonempty' + return 2 + fi + + # npm first looks at $PREFIX (case-sensitive) + # we do not bother to test the value here; if this env var is set, unset it to continue. + # however, `npm exec` in npm v7.2+ sets $PREFIX; if set, inherit it + if [ -n "${PREFIX-}" ] && [ "$(nvm_version_path "$(node -v)")" != "${PREFIX}" ]; then + nvm deactivate >/dev/null 2>&1 + nvm_err "nvm is not compatible with the \"PREFIX\" environment variable: currently set to \"${PREFIX}\"" + nvm_err 'Run `unset PREFIX` to unset it.' + return 3 + fi + + local NVM_OS + NVM_OS="$(nvm_get_os)" + + # npm normalizes NPM_CONFIG_-prefixed env vars + # https://github.com/npm/npmconf/blob/22827e4038d6eebaafeb5c13ed2b92cf97b8fb82/npmconf.js#L331-L348 + # https://github.com/npm/npm/blob/5e426a78ca02d0044f8dd26e0c5f881217081cbd/lib/config/core.js#L343-L359 + # + # here, we avoid trying to replicate "which one wins" or testing the value; if any are defined, it errors + # until none are left. + local NVM_NPM_CONFIG_x_PREFIX_ENV + NVM_NPM_CONFIG_x_PREFIX_ENV="$(command awk 'BEGIN { for (name in ENVIRON) if (toupper(name) == "NPM_CONFIG_PREFIX") { print name; break } }')" + if [ -n "${NVM_NPM_CONFIG_x_PREFIX_ENV-}" ]; then + local NVM_CONFIG_VALUE + eval "NVM_CONFIG_VALUE=\"\$${NVM_NPM_CONFIG_x_PREFIX_ENV}\"" + if [ -n "${NVM_CONFIG_VALUE-}" ] && [ "_${NVM_OS}" = "_win" ]; then + NVM_CONFIG_VALUE="$(cd "$NVM_CONFIG_VALUE" 2>/dev/null && pwd)" + fi + if [ -n "${NVM_CONFIG_VALUE-}" ] && ! nvm_tree_contains_path "${NVM_DIR}" "${NVM_CONFIG_VALUE}"; then + nvm deactivate >/dev/null 2>&1 + nvm_err "nvm is not compatible with the \"${NVM_NPM_CONFIG_x_PREFIX_ENV}\" environment variable: currently set to \"${NVM_CONFIG_VALUE}\"" + nvm_err "Run \`unset ${NVM_NPM_CONFIG_x_PREFIX_ENV}\` to unset it." + return 4 + fi + fi + + # here, npm config checks npmrc files. + # the stack is: cli, env, project, user, global, builtin, defaults + # cli does not apply; env is covered above, defaults don't exist for prefix + # there are 4 npmrc locations to check: project, global, user, and builtin + # project: find the closest node_modules or package.json-containing dir, `.npmrc` + # global: default prefix + `/etc/npmrc` + # user: $HOME/.npmrc + # builtin: npm install location, `npmrc` + # + # if any of them have a `prefix`, fail. + # if any have `globalconfig`, fail also, just in case, to avoid spidering configs. + + local NVM_NPM_BUILTIN_NPMRC + NVM_NPM_BUILTIN_NPMRC="${NVM_VERSION_DIR}/lib/node_modules/npm/npmrc" + if nvm_npmrc_bad_news_bears "${NVM_NPM_BUILTIN_NPMRC}"; then + if [ "_${NVM_DELETE_PREFIX}" = "_1" ]; then + npm config --loglevel=warn delete prefix --userconfig="${NVM_NPM_BUILTIN_NPMRC}" + npm config --loglevel=warn delete globalconfig --userconfig="${NVM_NPM_BUILTIN_NPMRC}" + else + nvm_err "Your builtin npmrc file ($(nvm_sanitize_path "${NVM_NPM_BUILTIN_NPMRC}"))" + nvm_err 'has a `globalconfig` and/or a `prefix` setting, which are incompatible with nvm.' + nvm_err "Run \`${NVM_COMMAND}\` to unset it." + return 10 + fi + fi + + local NVM_NPM_GLOBAL_NPMRC + NVM_NPM_GLOBAL_NPMRC="${NVM_VERSION_DIR}/etc/npmrc" + if nvm_npmrc_bad_news_bears "${NVM_NPM_GLOBAL_NPMRC}"; then + if [ "_${NVM_DELETE_PREFIX}" = "_1" ]; then + npm config --global --loglevel=warn delete prefix + npm config --global --loglevel=warn delete globalconfig + else + nvm_err "Your global npmrc file ($(nvm_sanitize_path "${NVM_NPM_GLOBAL_NPMRC}"))" + nvm_err 'has a `globalconfig` and/or a `prefix` setting, which are incompatible with nvm.' + nvm_err "Run \`${NVM_COMMAND}\` to unset it." + return 10 + fi + fi + + local NVM_NPM_USER_NPMRC + NVM_NPM_USER_NPMRC="${HOME}/.npmrc" + if nvm_npmrc_bad_news_bears "${NVM_NPM_USER_NPMRC}"; then + if [ "_${NVM_DELETE_PREFIX}" = "_1" ]; then + npm config --loglevel=warn delete prefix --userconfig="${NVM_NPM_USER_NPMRC}" + npm config --loglevel=warn delete globalconfig --userconfig="${NVM_NPM_USER_NPMRC}" + else + nvm_err "Your user’s .npmrc file ($(nvm_sanitize_path "${NVM_NPM_USER_NPMRC}"))" + nvm_err 'has a `globalconfig` and/or a `prefix` setting, which are incompatible with nvm.' + nvm_err "Run \`${NVM_COMMAND}\` to unset it." + return 10 + fi + fi + + local NVM_NPM_PROJECT_NPMRC + NVM_NPM_PROJECT_NPMRC="$(nvm_find_project_dir)/.npmrc" + if nvm_npmrc_bad_news_bears "${NVM_NPM_PROJECT_NPMRC}"; then + if [ "_${NVM_DELETE_PREFIX}" = "_1" ]; then + npm config --loglevel=warn delete prefix + npm config --loglevel=warn delete globalconfig + else + nvm_err "Your project npmrc file ($(nvm_sanitize_path "${NVM_NPM_PROJECT_NPMRC}"))" + nvm_err 'has a `globalconfig` and/or a `prefix` setting, which are incompatible with nvm.' + nvm_err "Run \`${NVM_COMMAND}\` to unset it." + return 10 + fi + fi +} + +# Succeeds if $IOJS_VERSION represents an io.js version that has a +# Solaris binary, fails otherwise. +# Currently, only io.js 3.3.1 has a Solaris binary available, and it's the +# latest io.js version available. The expectation is that any potential io.js +# version later than v3.3.1 will also have Solaris binaries. +nvm_iojs_version_has_solaris_binary() { + local IOJS_VERSION + IOJS_VERSION="$1" + local STRIPPED_IOJS_VERSION + STRIPPED_IOJS_VERSION="$(nvm_strip_iojs_prefix "${IOJS_VERSION}")" + if [ "_${STRIPPED_IOJS_VERSION}" = "${IOJS_VERSION}" ]; then + return 1 + fi + + # io.js started shipping Solaris binaries with io.js v3.3.1 + nvm_version_greater_than_or_equal_to "${STRIPPED_IOJS_VERSION}" v3.3.1 +} + +# Succeeds if $NODE_VERSION represents a node version that has a +# Solaris binary, fails otherwise. +# Currently, node versions starting from v0.8.6 have a Solaris binary +# available. +nvm_node_version_has_solaris_binary() { + local NODE_VERSION + NODE_VERSION="$1" + # Error out if $NODE_VERSION is actually an io.js version + local STRIPPED_IOJS_VERSION + STRIPPED_IOJS_VERSION="$(nvm_strip_iojs_prefix "${NODE_VERSION}")" + if [ "_${STRIPPED_IOJS_VERSION}" != "_${NODE_VERSION}" ]; then + return 1 + fi + + # node (unmerged) started shipping Solaris binaries with v0.8.6 and + # node versions v1.0.0 or greater are not considered valid "unmerged" node + # versions. + nvm_version_greater_than_or_equal_to "${NODE_VERSION}" v0.8.6 \ + && ! nvm_version_greater_than_or_equal_to "${NODE_VERSION}" v1.0.0 +} + +# Succeeds if $VERSION represents a version (node, io.js or merged) that has a +# Solaris binary, fails otherwise. +nvm_has_solaris_binary() { + local VERSION="${1-}" + if nvm_is_merged_node_version "${VERSION}"; then + return 0 # All merged node versions have a Solaris binary + elif nvm_is_iojs_version "${VERSION}"; then + nvm_iojs_version_has_solaris_binary "${VERSION}" + else + nvm_node_version_has_solaris_binary "${VERSION}" + fi +} + +nvm_sanitize_path() { + local SANITIZED_PATH + SANITIZED_PATH="${1-}" + if [ "_${SANITIZED_PATH}" != "_${NVM_DIR}" ]; then + SANITIZED_PATH="$(nvm_echo "${SANITIZED_PATH}" | command sed -e "s#${NVM_DIR}#\${NVM_DIR}#g")" + fi + if [ "_${SANITIZED_PATH}" != "_${HOME}" ]; then + SANITIZED_PATH="$(nvm_echo "${SANITIZED_PATH}" | command sed -e "s#${HOME}#\${HOME}#g")" + fi + nvm_echo "${SANITIZED_PATH}" +} + +nvm_is_natural_num() { + if [ -z "$1" ]; then + return 4 + fi + case "$1" in + 0) return 1 ;; + -*) return 3 ;; # some BSDs return false positives for double-negated args + *) + [ "$1" -eq "$1" ] 2>/dev/null # returns 2 if it doesn't match + ;; + esac +} + +# Check version dir permissions +nvm_check_file_permissions() { + nvm_is_zsh && setopt local_options nonomatch + for FILE in "$1"/* "$1"/.[!.]* "$1"/..?* ; do + if [ -d "$FILE" ]; then + if [ -n "${NVM_DEBUG-}" ]; then + nvm_err "${FILE}" + fi + if [ ! -L "${FILE}" ] && ! nvm_check_file_permissions "${FILE}"; then + return 2 + fi + elif [ -e "$FILE" ] && [ ! -w "$FILE" ] && [ ! -O "$FILE" ]; then + nvm_err "file is not writable or self-owned: $(nvm_sanitize_path "$FILE")" + return 1 + fi + done + return 0 +} + +nvm_cache_dir() { + nvm_echo "${NVM_DIR}/.cache" +} + +nvm() { + if [ "$#" -lt 1 ]; then + nvm --help + return + fi + + local DEFAULT_IFS + DEFAULT_IFS=" $(nvm_echo t | command tr t \\t) +" + if [ "${-#*e}" != "$-" ]; then + set +e + local EXIT_CODE + IFS="${DEFAULT_IFS}" nvm "$@" + EXIT_CODE="$?" + set -e + return "$EXIT_CODE" + elif [ "${-#*a}" != "$-" ]; then + set +a + local EXIT_CODE + IFS="${DEFAULT_IFS}" nvm "$@" + EXIT_CODE="$?" + set -a + return "$EXIT_CODE" + elif [ -n "${BASH-}" ] && [ "${-#*E}" != "$-" ]; then + # shellcheck disable=SC3041 + set +E + local EXIT_CODE + IFS="${DEFAULT_IFS}" nvm "$@" + EXIT_CODE="$?" + # shellcheck disable=SC3041 + set -E + return "$EXIT_CODE" + elif [ "${IFS}" != "${DEFAULT_IFS}" ]; then + IFS="${DEFAULT_IFS}" nvm "$@" + return "$?" + fi + + local i + for i in "$@" + do + case $i in + --) break ;; + '-h'|'help'|'--help') + NVM_NO_COLORS="" + for j in "$@"; do + if [ "${j}" = '--no-colors' ]; then + NVM_NO_COLORS="${j}" + break + fi + done + + local NVM_IOJS_PREFIX + NVM_IOJS_PREFIX="$(nvm_iojs_prefix)" + local NVM_NODE_PREFIX + NVM_NODE_PREFIX="$(nvm_node_prefix)" + NVM_VERSION="$(nvm --version)" + nvm_echo + nvm_echo "Node Version Manager (v${NVM_VERSION})" + nvm_echo + nvm_echo 'Note: refers to any version-like string nvm understands. This includes:' + nvm_echo ' - full or partial version numbers, starting with an optional "v" (0.10, v0.1.2, v1)' + nvm_echo " - default (built-in) aliases: ${NVM_NODE_PREFIX}, stable, unstable, ${NVM_IOJS_PREFIX}, system" + nvm_echo ' - custom aliases you define with `nvm alias foo`' + nvm_echo + nvm_echo ' Any options that produce colorized output should respect the `--no-colors` option.' + nvm_echo + nvm_echo 'Usage:' + nvm_echo ' nvm --help Show this message' + nvm_echo ' --no-colors Suppress colored output' + nvm_echo ' nvm --version Print out the installed version of nvm' + nvm_echo ' nvm install [] Download and install a . Uses .nvmrc if available and version is omitted.' + nvm_echo ' The following optional arguments, if provided, must appear directly after `nvm install`:' + nvm_echo ' -s Skip binary download, install from source only.' + nvm_echo ' -b Skip source download, install from binary only.' + nvm_echo ' --reinstall-packages-from= When installing, reinstall packages installed in ' + nvm_echo ' --lts When installing, only select from LTS (long-term support) versions' + nvm_echo ' --lts= When installing, only select from versions for a specific LTS line' + nvm_echo ' --skip-default-packages When installing, skip the default-packages file if it exists' + nvm_echo ' --latest-npm After installing, attempt to upgrade to the latest working npm on the given node version' + nvm_echo ' --no-progress Disable the progress bar on any downloads' + nvm_echo ' --alias= After installing, set the alias specified to the version specified. (same as: nvm alias )' + nvm_echo ' --default After installing, set default alias to the version specified. (same as: nvm alias default )' + nvm_echo ' nvm uninstall Uninstall a version' + nvm_echo ' nvm uninstall --lts Uninstall using automatic LTS (long-term support) alias `lts/*`, if available.' + nvm_echo ' nvm uninstall --lts= Uninstall using automatic alias for provided LTS line, if available.' + nvm_echo ' nvm use [] Modify PATH to use . Uses .nvmrc if available and version is omitted.' + nvm_echo ' The following optional arguments, if provided, must appear directly after `nvm use`:' + nvm_echo ' --silent Silences stdout/stderr output' + nvm_echo ' --lts Uses automatic LTS (long-term support) alias `lts/*`, if available.' + nvm_echo ' --lts= Uses automatic alias for provided LTS line, if available.' + nvm_echo ' nvm exec [] [] Run on . Uses .nvmrc if available and version is omitted.' + nvm_echo ' The following optional arguments, if provided, must appear directly after `nvm exec`:' + nvm_echo ' --silent Silences stdout/stderr output' + nvm_echo ' --lts Uses automatic LTS (long-term support) alias `lts/*`, if available.' + nvm_echo ' --lts= Uses automatic alias for provided LTS line, if available.' + nvm_echo ' nvm run [] [] Run `node` on with as arguments. Uses .nvmrc if available and version is omitted.' + nvm_echo ' The following optional arguments, if provided, must appear directly after `nvm run`:' + nvm_echo ' --silent Silences stdout/stderr output' + nvm_echo ' --lts Uses automatic LTS (long-term support) alias `lts/*`, if available.' + nvm_echo ' --lts= Uses automatic alias for provided LTS line, if available.' + nvm_echo ' nvm current Display currently activated version of Node' + nvm_echo ' nvm ls [] List installed versions, matching a given if provided' + nvm_echo ' --no-colors Suppress colored output' + nvm_echo ' --no-alias Suppress `nvm alias` output' + nvm_echo ' nvm ls-remote [] List remote versions available for install, matching a given if provided' + nvm_echo ' --lts When listing, only show LTS (long-term support) versions' + nvm_echo ' --lts= When listing, only show versions for a specific LTS line' + nvm_echo ' --no-colors Suppress colored output' + nvm_echo ' nvm version Resolve the given description to a single local version' + nvm_echo ' nvm version-remote Resolve the given description to a single remote version' + nvm_echo ' --lts When listing, only select from LTS (long-term support) versions' + nvm_echo ' --lts= When listing, only select from versions for a specific LTS line' + nvm_echo ' nvm deactivate Undo effects of `nvm` on current shell' + nvm_echo ' --silent Silences stdout/stderr output' + nvm_echo ' nvm alias [] Show all aliases beginning with ' + nvm_echo ' --no-colors Suppress colored output' + nvm_echo ' nvm alias Set an alias named pointing to ' + nvm_echo ' nvm unalias Deletes the alias named ' + nvm_echo ' nvm install-latest-npm Attempt to upgrade to the latest working `npm` on the current node version' + nvm_echo ' nvm reinstall-packages Reinstall global `npm` packages contained in to current version' + nvm_echo ' nvm unload Unload `nvm` from shell' + nvm_echo ' nvm which [current | ] Display path to installed node version. Uses .nvmrc if available and version is omitted.' + nvm_echo ' --silent Silences stdout/stderr output when a version is omitted' + nvm_echo ' nvm cache dir Display path to the cache directory for nvm' + nvm_echo ' nvm cache clear Empty cache directory for nvm' + nvm_echo ' nvm set-colors [] Set five text colors using format "yMeBg". Available when supported.' + nvm_echo ' Initial colors are:' + nvm_echo_with_colors " $(nvm_wrap_with_color_code b b)$(nvm_wrap_with_color_code y y)$(nvm_wrap_with_color_code g g)$(nvm_wrap_with_color_code r r)$(nvm_wrap_with_color_code e e)" + nvm_echo ' Color codes:' + nvm_echo_with_colors " $(nvm_wrap_with_color_code r r)/$(nvm_wrap_with_color_code R R) = $(nvm_wrap_with_color_code r red) / $(nvm_wrap_with_color_code R 'bold red')" + nvm_echo_with_colors " $(nvm_wrap_with_color_code g g)/$(nvm_wrap_with_color_code G G) = $(nvm_wrap_with_color_code g green) / $(nvm_wrap_with_color_code G 'bold green')" + nvm_echo_with_colors " $(nvm_wrap_with_color_code b b)/$(nvm_wrap_with_color_code B B) = $(nvm_wrap_with_color_code b blue) / $(nvm_wrap_with_color_code B 'bold blue')" + nvm_echo_with_colors " $(nvm_wrap_with_color_code c c)/$(nvm_wrap_with_color_code C C) = $(nvm_wrap_with_color_code c cyan) / $(nvm_wrap_with_color_code C 'bold cyan')" + nvm_echo_with_colors " $(nvm_wrap_with_color_code m m)/$(nvm_wrap_with_color_code M M) = $(nvm_wrap_with_color_code m magenta) / $(nvm_wrap_with_color_code M 'bold magenta')" + nvm_echo_with_colors " $(nvm_wrap_with_color_code y y)/$(nvm_wrap_with_color_code Y Y) = $(nvm_wrap_with_color_code y yellow) / $(nvm_wrap_with_color_code Y 'bold yellow')" + nvm_echo_with_colors " $(nvm_wrap_with_color_code k k)/$(nvm_wrap_with_color_code K K) = $(nvm_wrap_with_color_code k black) / $(nvm_wrap_with_color_code K 'bold black')" + nvm_echo_with_colors " $(nvm_wrap_with_color_code e e)/$(nvm_wrap_with_color_code W W) = $(nvm_wrap_with_color_code e 'light grey') / $(nvm_wrap_with_color_code W white)" + nvm_echo 'Example:' + nvm_echo ' nvm install 8.0.0 Install a specific version number' + nvm_echo ' nvm use 8.0 Use the latest available 8.0.x release' + nvm_echo ' nvm run 6.10.3 app.js Run app.js using node 6.10.3' + nvm_echo ' nvm exec 4.8.3 node app.js Run `node app.js` with the PATH pointing to node 4.8.3' + nvm_echo ' nvm alias default 8.1.0 Set default node version on a shell' + nvm_echo ' nvm alias default node Always default to the latest available node version on a shell' + nvm_echo + nvm_echo ' nvm install node Install the latest available version' + nvm_echo ' nvm use node Use the latest version' + nvm_echo ' nvm install --lts Install the latest LTS version' + nvm_echo ' nvm use --lts Use the latest LTS version' + nvm_echo + nvm_echo ' nvm set-colors cgYmW Set text colors to cyan, green, bold yellow, magenta, and white' + nvm_echo + nvm_echo 'Note:' + nvm_echo ' to remove, delete, or uninstall nvm - just remove the `$NVM_DIR` folder (usually `~/.nvm`)' + nvm_echo + return 0; + ;; + esac + done + + local COMMAND + COMMAND="${1-}" + shift + + # initialize local variables + local VERSION + local ADDITIONAL_PARAMETERS + + case $COMMAND in + "cache") + case "${1-}" in + dir) nvm_cache_dir ;; + clear) + local DIR + DIR="$(nvm_cache_dir)" + if command rm -rf "${DIR}" && command mkdir -p "${DIR}"; then + nvm_echo 'nvm cache cleared.' + else + nvm_err "Unable to clear nvm cache: ${DIR}" + return 1 + fi + ;; + *) + >&2 nvm --help + return 127 + ;; + esac + ;; + + "debug") + local OS_VERSION + nvm_is_zsh && setopt local_options shwordsplit + nvm_err "nvm --version: v$(nvm --version)" + if [ -n "${TERM_PROGRAM-}" ]; then + nvm_err "\$TERM_PROGRAM: ${TERM_PROGRAM}" + fi + nvm_err "\$SHELL: ${SHELL}" + # shellcheck disable=SC2169,SC3028 + nvm_err "\$SHLVL: ${SHLVL-}" + nvm_err "whoami: '$(whoami)'" + nvm_err "\${HOME}: ${HOME}" + nvm_err "\${NVM_DIR}: '$(nvm_sanitize_path "${NVM_DIR}")'" + nvm_err "\${PATH}: $(nvm_sanitize_path "${PATH}")" + nvm_err "\$PREFIX: '$(nvm_sanitize_path "${PREFIX}")'" + nvm_err "\${NPM_CONFIG_PREFIX}: '$(nvm_sanitize_path "${NPM_CONFIG_PREFIX}")'" + nvm_err "\$NVM_NODEJS_ORG_MIRROR: '${NVM_NODEJS_ORG_MIRROR}'" + nvm_err "\$NVM_IOJS_ORG_MIRROR: '${NVM_IOJS_ORG_MIRROR}'" + nvm_err "shell version: '$(${SHELL} --version | command head -n 1)'" + nvm_err "uname -a: '$(command uname -a | command awk '{$2=""; print}' | command xargs)'" + nvm_err "checksum binary: '$(nvm_get_checksum_binary 2>/dev/null)'" + if [ "$(nvm_get_os)" = "darwin" ] && nvm_has sw_vers; then + OS_VERSION="$(sw_vers | command awk '{print $2}' | command xargs)" + elif [ -r "/etc/issue" ]; then + OS_VERSION="$(command head -n 1 /etc/issue | command sed 's/\\.//g')" + if [ -z "${OS_VERSION}" ] && [ -r "/etc/os-release" ]; then + # shellcheck disable=SC1091 + OS_VERSION="$(. /etc/os-release && echo "${NAME}" "${VERSION}")" + fi + fi + if [ -n "${OS_VERSION}" ]; then + nvm_err "OS version: ${OS_VERSION}" + fi + if nvm_has "awk"; then + nvm_err "awk: $(nvm_command_info awk), $({ command awk --version 2>/dev/null || command awk -W version; } \ + | command head -n 1)" + else + nvm_err "awk: not found" + fi + if nvm_has "curl"; then + nvm_err "curl: $(nvm_command_info curl), $(command curl -V | command head -n 1)" + else + nvm_err "curl: not found" + fi + if nvm_has "wget"; then + nvm_err "wget: $(nvm_command_info wget), $(command wget -V | command head -n 1)" + else + nvm_err "wget: not found" + fi + + local TEST_TOOLS ADD_TEST_TOOLS + TEST_TOOLS="git grep" + ADD_TEST_TOOLS="sed cut basename rm mkdir xargs" + if [ "darwin" != "$(nvm_get_os)" ] && [ "freebsd" != "$(nvm_get_os)" ]; then + TEST_TOOLS="${TEST_TOOLS} ${ADD_TEST_TOOLS}" + else + for tool in ${ADD_TEST_TOOLS} ; do + if nvm_has "${tool}"; then + nvm_err "${tool}: $(nvm_command_info "${tool}")" + else + nvm_err "${tool}: not found" + fi + done + fi + for tool in ${TEST_TOOLS} ; do + local NVM_TOOL_VERSION + if nvm_has "${tool}"; then + if command ls -l "$(nvm_command_info "${tool}" | command awk '{print $1}')" | command grep -q busybox; then + NVM_TOOL_VERSION="$(command "${tool}" --help 2>&1 | command head -n 1)" + else + NVM_TOOL_VERSION="$(command "${tool}" --version 2>&1 | command head -n 1)" + fi + nvm_err "${tool}: $(nvm_command_info "${tool}"), ${NVM_TOOL_VERSION}" + else + nvm_err "${tool}: not found" + fi + unset NVM_TOOL_VERSION + done + unset TEST_TOOLS ADD_TEST_TOOLS + + local NVM_DEBUG_OUTPUT + for NVM_DEBUG_COMMAND in 'nvm current' 'which node' 'which iojs' 'which npm' 'npm config get prefix' 'npm root -g'; do + NVM_DEBUG_OUTPUT="$(${NVM_DEBUG_COMMAND} 2>&1)" + nvm_err "${NVM_DEBUG_COMMAND}: $(nvm_sanitize_path "${NVM_DEBUG_OUTPUT}")" + done + return 42 + ;; + + "install" | "i") + local version_not_provided + version_not_provided=0 + local NVM_OS + NVM_OS="$(nvm_get_os)" + + if ! nvm_has "curl" && ! nvm_has "wget"; then + nvm_err 'nvm needs curl or wget to proceed.' + return 1 + fi + + if [ $# -lt 1 ]; then + version_not_provided=1 + fi + + local nobinary + local nosource + local noprogress + nobinary=0 + noprogress=0 + nosource=0 + local LTS + local ALIAS + local NVM_UPGRADE_NPM + NVM_UPGRADE_NPM=0 + + local PROVIDED_REINSTALL_PACKAGES_FROM + local REINSTALL_PACKAGES_FROM + local SKIP_DEFAULT_PACKAGES + + while [ $# -ne 0 ]; do + case "$1" in + ---*) + nvm_err 'arguments with `---` are not supported - this is likely a typo' + return 55; + ;; + -s) + shift # consume "-s" + nobinary=1 + if [ $nosource -eq 1 ]; then + nvm err '-s and -b cannot be set together since they would skip install from both binary and source' + return 6 + fi + ;; + -b) + shift # consume "-b" + nosource=1 + if [ $nobinary -eq 1 ]; then + nvm err '-s and -b cannot be set together since they would skip install from both binary and source' + return 6 + fi + ;; + -j) + shift # consume "-j" + nvm_get_make_jobs "$1" + shift # consume job count + ;; + --no-progress) + noprogress=1 + shift + ;; + --lts) + LTS='*' + shift + ;; + --lts=*) + LTS="${1##--lts=}" + shift + ;; + --latest-npm) + NVM_UPGRADE_NPM=1 + shift + ;; + --default) + if [ -n "${ALIAS-}" ]; then + nvm_err '--default and --alias are mutually exclusive, and may not be provided more than once' + return 6 + fi + ALIAS='default' + shift + ;; + --alias=*) + if [ -n "${ALIAS-}" ]; then + nvm_err '--default and --alias are mutually exclusive, and may not be provided more than once' + return 6 + fi + ALIAS="${1##--alias=}" + shift + ;; + --reinstall-packages-from=*) + if [ -n "${PROVIDED_REINSTALL_PACKAGES_FROM-}" ]; then + nvm_err '--reinstall-packages-from may not be provided more than once' + return 6 + fi + PROVIDED_REINSTALL_PACKAGES_FROM="$(nvm_echo "$1" | command cut -c 27-)" + if [ -z "${PROVIDED_REINSTALL_PACKAGES_FROM}" ]; then + nvm_err 'If --reinstall-packages-from is provided, it must point to an installed version of node.' + return 6 + fi + REINSTALL_PACKAGES_FROM="$(nvm_version "${PROVIDED_REINSTALL_PACKAGES_FROM}")" ||: + shift + ;; + --copy-packages-from=*) + if [ -n "${PROVIDED_REINSTALL_PACKAGES_FROM-}" ]; then + nvm_err '--reinstall-packages-from may not be provided more than once, or combined with `--copy-packages-from`' + return 6 + fi + PROVIDED_REINSTALL_PACKAGES_FROM="$(nvm_echo "$1" | command cut -c 22-)" + if [ -z "${PROVIDED_REINSTALL_PACKAGES_FROM}" ]; then + nvm_err 'If --copy-packages-from is provided, it must point to an installed version of node.' + return 6 + fi + REINSTALL_PACKAGES_FROM="$(nvm_version "${PROVIDED_REINSTALL_PACKAGES_FROM}")" ||: + shift + ;; + --reinstall-packages-from | --copy-packages-from) + nvm_err "If ${1} is provided, it must point to an installed version of node using \`=\`." + return 6 + ;; + --skip-default-packages) + SKIP_DEFAULT_PACKAGES=true + shift + ;; + *) + break # stop parsing args + ;; + esac + done + + local provided_version + provided_version="${1-}" + + if [ -z "${provided_version}" ]; then + if [ "_${LTS-}" = '_*' ]; then + nvm_echo 'Installing latest LTS version.' + if [ $# -gt 0 ]; then + shift + fi + elif [ "_${LTS-}" != '_' ]; then + nvm_echo "Installing with latest version of LTS line: ${LTS}" + if [ $# -gt 0 ]; then + shift + fi + else + nvm_rc_version + if [ $version_not_provided -eq 1 ] && [ -z "${NVM_RC_VERSION}" ]; then + unset NVM_RC_VERSION + >&2 nvm --help + return 127 + fi + provided_version="${NVM_RC_VERSION}" + unset NVM_RC_VERSION + fi + elif [ $# -gt 0 ]; then + shift + fi + + case "${provided_version}" in + 'lts/*') + LTS='*' + provided_version='' + ;; + lts/*) + LTS="${provided_version##lts/}" + provided_version='' + ;; + esac + + VERSION="$(NVM_VERSION_ONLY=true NVM_LTS="${LTS-}" nvm_remote_version "${provided_version}")" + + if [ "${VERSION}" = 'N/A' ]; then + local LTS_MSG + local REMOTE_CMD + if [ "${LTS-}" = '*' ]; then + LTS_MSG='(with LTS filter) ' + REMOTE_CMD='nvm ls-remote --lts' + elif [ -n "${LTS-}" ]; then + LTS_MSG="(with LTS filter '${LTS}') " + REMOTE_CMD="nvm ls-remote --lts=${LTS}" + else + REMOTE_CMD='nvm ls-remote' + fi + nvm_err "Version '${provided_version}' ${LTS_MSG-}not found - try \`${REMOTE_CMD}\` to browse available versions." + return 3 + fi + + ADDITIONAL_PARAMETERS='' + + while [ $# -ne 0 ]; do + case "$1" in + --reinstall-packages-from=*) + if [ -n "${PROVIDED_REINSTALL_PACKAGES_FROM-}" ]; then + nvm_err '--reinstall-packages-from may not be provided more than once' + return 6 + fi + PROVIDED_REINSTALL_PACKAGES_FROM="$(nvm_echo "$1" | command cut -c 27-)" + if [ -z "${PROVIDED_REINSTALL_PACKAGES_FROM}" ]; then + nvm_err 'If --reinstall-packages-from is provided, it must point to an installed version of node.' + return 6 + fi + REINSTALL_PACKAGES_FROM="$(nvm_version "${PROVIDED_REINSTALL_PACKAGES_FROM}")" ||: + ;; + --copy-packages-from=*) + if [ -n "${PROVIDED_REINSTALL_PACKAGES_FROM-}" ]; then + nvm_err '--reinstall-packages-from may not be provided more than once, or combined with `--copy-packages-from`' + return 6 + fi + PROVIDED_REINSTALL_PACKAGES_FROM="$(nvm_echo "$1" | command cut -c 22-)" + if [ -z "${PROVIDED_REINSTALL_PACKAGES_FROM}" ]; then + nvm_err 'If --copy-packages-from is provided, it must point to an installed version of node.' + return 6 + fi + REINSTALL_PACKAGES_FROM="$(nvm_version "${PROVIDED_REINSTALL_PACKAGES_FROM}")" ||: + ;; + --reinstall-packages-from | --copy-packages-from) + nvm_err "If ${1} is provided, it must point to an installed version of node using \`=\`." + return 6 + ;; + --skip-default-packages) + SKIP_DEFAULT_PACKAGES=true + ;; + *) + ADDITIONAL_PARAMETERS="${ADDITIONAL_PARAMETERS} $1" + ;; + esac + shift + done + + if [ -n "${PROVIDED_REINSTALL_PACKAGES_FROM-}" ] && [ "$(nvm_ensure_version_prefix "${PROVIDED_REINSTALL_PACKAGES_FROM}")" = "${VERSION}" ]; then + nvm_err "You can't reinstall global packages from the same version of node you're installing." + return 4 + elif [ "${REINSTALL_PACKAGES_FROM-}" = 'N/A' ]; then + nvm_err "If --reinstall-packages-from is provided, it must point to an installed version of node." + return 5 + fi + + local FLAVOR + if nvm_is_iojs_version "${VERSION}"; then + FLAVOR="$(nvm_iojs_prefix)" + else + FLAVOR="$(nvm_node_prefix)" + fi + + local EXIT_CODE + EXIT_CODE=0 + + if nvm_is_version_installed "${VERSION}"; then + nvm_err "${VERSION} is already installed." + nvm use "${VERSION}" + EXIT_CODE=$? + if [ $EXIT_CODE -eq 0 ]; then + if [ "${NVM_UPGRADE_NPM}" = 1 ]; then + nvm install-latest-npm + EXIT_CODE=$? + fi + if [ $EXIT_CODE -ne 0 ] && [ -z "${SKIP_DEFAULT_PACKAGES-}" ]; then + nvm_install_default_packages + fi + if [ $EXIT_CODE -ne 0 ] && [ -n "${REINSTALL_PACKAGES_FROM-}" ] && [ "_${REINSTALL_PACKAGES_FROM}" != "_N/A" ]; then + nvm reinstall-packages "${REINSTALL_PACKAGES_FROM}" + EXIT_CODE=$? + fi + fi + + if [ -n "${LTS-}" ]; then + LTS="$(echo "${LTS}" | tr '[:upper:]' '[:lower:]')" + nvm_ensure_default_set "lts/${LTS}" + else + nvm_ensure_default_set "${provided_version}" + fi + + if [ $EXIT_CODE -ne 0 ] && [ -n "${ALIAS-}" ]; then + nvm alias "${ALIAS}" "${provided_version}" + EXIT_CODE=$? + fi + + return $EXIT_CODE + fi + + if [ -n "${NVM_INSTALL_THIRD_PARTY_HOOK-}" ]; then + nvm_err '** $NVM_INSTALL_THIRD_PARTY_HOOK env var set; dispatching to third-party installation method **' + local NVM_METHOD_PREFERENCE + NVM_METHOD_PREFERENCE='binary' + if [ $nobinary -eq 1 ]; then + NVM_METHOD_PREFERENCE='source' + fi + local VERSION_PATH + VERSION_PATH="$(nvm_version_path "${VERSION}")" + "${NVM_INSTALL_THIRD_PARTY_HOOK}" "${VERSION}" "${FLAVOR}" std "${NVM_METHOD_PREFERENCE}" "${VERSION_PATH}" || { + EXIT_CODE=$? + nvm_err '*** Third-party $NVM_INSTALL_THIRD_PARTY_HOOK env var failed to install! ***' + return $EXIT_CODE + } + if ! nvm_is_version_installed "${VERSION}"; then + nvm_err '*** Third-party $NVM_INSTALL_THIRD_PARTY_HOOK env var claimed to succeed, but failed to install! ***' + return 33 + fi + EXIT_CODE=0 + else + + if [ "_${NVM_OS}" = "_freebsd" ]; then + # node.js and io.js do not have a FreeBSD binary + nobinary=1 + nvm_err "Currently, there is no binary for FreeBSD" + elif [ "_$NVM_OS" = "_openbsd" ]; then + # node.js and io.js do not have a OpenBSD binary + nobinary=1 + nvm_err "Currently, there is no binary for OpenBSD" + elif [ "_${NVM_OS}" = "_sunos" ]; then + # Not all node/io.js versions have a Solaris binary + if ! nvm_has_solaris_binary "${VERSION}"; then + nobinary=1 + nvm_err "Currently, there is no binary of version ${VERSION} for SunOS" + fi + fi + + # skip binary install if "nobinary" option specified. + if [ $nobinary -ne 1 ] && nvm_binary_available "${VERSION}"; then + NVM_NO_PROGRESS="${NVM_NO_PROGRESS:-${noprogress}}" nvm_install_binary "${FLAVOR}" std "${VERSION}" "${nosource}" + EXIT_CODE=$? + else + EXIT_CODE=-1 + fi + + if [ $EXIT_CODE -ne 0 ]; then + if [ -z "${NVM_MAKE_JOBS-}" ]; then + nvm_get_make_jobs + fi + + if [ "_${NVM_OS}" = "_win" ]; then + nvm_err 'Installing from source on non-WSL Windows is not supported' + EXIT_CODE=87 + else + NVM_NO_PROGRESS="${NVM_NO_PROGRESS:-${noprogress}}" nvm_install_source "${FLAVOR}" std "${VERSION}" "${NVM_MAKE_JOBS}" "${ADDITIONAL_PARAMETERS}" + EXIT_CODE=$? + fi + fi + fi + + if [ $EXIT_CODE -eq 0 ] && nvm_use_if_needed "${VERSION}" && nvm_install_npm_if_needed "${VERSION}"; then + if [ -n "${LTS-}" ]; then + nvm_ensure_default_set "lts/${LTS}" + else + nvm_ensure_default_set "${provided_version}" + fi + if [ "${NVM_UPGRADE_NPM}" = 1 ]; then + nvm install-latest-npm + EXIT_CODE=$? + fi + if [ $EXIT_CODE -eq 0 ] && [ -z "${SKIP_DEFAULT_PACKAGES-}" ]; then + nvm_install_default_packages + fi + if [ $EXIT_CODE -eq 0 ] && [ -n "${REINSTALL_PACKAGES_FROM-}" ] && [ "_${REINSTALL_PACKAGES_FROM}" != "_N/A" ]; then + nvm reinstall-packages "${REINSTALL_PACKAGES_FROM}" + EXIT_CODE=$? + fi + else + EXIT_CODE=$? + fi + return $EXIT_CODE + ;; + "uninstall") + if [ $# -ne 1 ]; then + >&2 nvm --help + return 127 + fi + + local PATTERN + PATTERN="${1-}" + case "${PATTERN-}" in + --) ;; + --lts | 'lts/*') + VERSION="$(nvm_match_version "lts/*")" + ;; + lts/*) + VERSION="$(nvm_match_version "lts/${PATTERN##lts/}")" + ;; + --lts=*) + VERSION="$(nvm_match_version "lts/${PATTERN##--lts=}")" + ;; + *) + VERSION="$(nvm_version "${PATTERN}")" + ;; + esac + + if [ "_${VERSION}" = "_$(nvm_ls_current)" ]; then + if nvm_is_iojs_version "${VERSION}"; then + nvm_err "nvm: Cannot uninstall currently-active io.js version, ${VERSION} (inferred from ${PATTERN})." + else + nvm_err "nvm: Cannot uninstall currently-active node version, ${VERSION} (inferred from ${PATTERN})." + fi + return 1 + fi + + if ! nvm_is_version_installed "${VERSION}"; then + nvm_err "${VERSION} version is not installed..." + return + fi + + local SLUG_BINARY + local SLUG_SOURCE + if nvm_is_iojs_version "${VERSION}"; then + SLUG_BINARY="$(nvm_get_download_slug iojs binary std "${VERSION}")" + SLUG_SOURCE="$(nvm_get_download_slug iojs source std "${VERSION}")" + else + SLUG_BINARY="$(nvm_get_download_slug node binary std "${VERSION}")" + SLUG_SOURCE="$(nvm_get_download_slug node source std "${VERSION}")" + fi + + local NVM_SUCCESS_MSG + if nvm_is_iojs_version "${VERSION}"; then + NVM_SUCCESS_MSG="Uninstalled io.js $(nvm_strip_iojs_prefix "${VERSION}")" + else + NVM_SUCCESS_MSG="Uninstalled node ${VERSION}" + fi + + local VERSION_PATH + VERSION_PATH="$(nvm_version_path "${VERSION}")" + if ! nvm_check_file_permissions "${VERSION_PATH}"; then + nvm_err 'Cannot uninstall, incorrect permissions on installation folder.' + nvm_err 'This is usually caused by running `npm install -g` as root. Run the following commands as root to fix the permissions and then try again.' + nvm_err + nvm_err " chown -R $(whoami) \"$(nvm_sanitize_path "${VERSION_PATH}")\"" + nvm_err " chmod -R u+w \"$(nvm_sanitize_path "${VERSION_PATH}")\"" + return 1 + fi + + # Delete all files related to target version. + local CACHE_DIR + CACHE_DIR="$(nvm_cache_dir)" + command rm -rf \ + "${CACHE_DIR}/bin/${SLUG_BINARY}/files" \ + "${CACHE_DIR}/src/${SLUG_SOURCE}/files" \ + "${VERSION_PATH}" 2>/dev/null + nvm_echo "${NVM_SUCCESS_MSG}" + + # rm any aliases that point to uninstalled version. + for ALIAS in $(nvm_grep -l "${VERSION}" "$(nvm_alias_path)/*" 2>/dev/null); do + nvm unalias "$(command basename "${ALIAS}")" + done + ;; + "deactivate") + local NVM_SILENT + while [ $# -ne 0 ]; do + case "${1}" in + --silent) NVM_SILENT=1 ;; + --) ;; + esac + shift + done + local NEWPATH + NEWPATH="$(nvm_strip_path "${PATH}" "/bin")" + if [ "_${PATH}" = "_${NEWPATH}" ]; then + if [ "${NVM_SILENT:-0}" -ne 1 ]; then + nvm_err "Could not find ${NVM_DIR}/*/bin in \${PATH}" + fi + else + export PATH="${NEWPATH}" + \hash -r + if [ "${NVM_SILENT:-0}" -ne 1 ]; then + nvm_echo "${NVM_DIR}/*/bin removed from \${PATH}" + fi + fi + + if [ -n "${MANPATH-}" ]; then + NEWPATH="$(nvm_strip_path "${MANPATH}" "/share/man")" + if [ "_${MANPATH}" = "_${NEWPATH}" ]; then + if [ "${NVM_SILENT:-0}" -ne 1 ]; then + nvm_err "Could not find ${NVM_DIR}/*/share/man in \${MANPATH}" + fi + else + export MANPATH="${NEWPATH}" + if [ "${NVM_SILENT:-0}" -ne 1 ]; then + nvm_echo "${NVM_DIR}/*/share/man removed from \${MANPATH}" + fi + fi + fi + + if [ -n "${NODE_PATH-}" ]; then + NEWPATH="$(nvm_strip_path "${NODE_PATH}" "/lib/node_modules")" + if [ "_${NODE_PATH}" != "_${NEWPATH}" ]; then + export NODE_PATH="${NEWPATH}" + if [ "${NVM_SILENT:-0}" -ne 1 ]; then + nvm_echo "${NVM_DIR}/*/lib/node_modules removed from \${NODE_PATH}" + fi + fi + fi + unset NVM_BIN + unset NVM_INC + ;; + "use") + local PROVIDED_VERSION + local NVM_SILENT + local NVM_SILENT_ARG + local NVM_DELETE_PREFIX + NVM_DELETE_PREFIX=0 + local NVM_LTS + local IS_VERSION_FROM_NVMRC + IS_VERSION_FROM_NVMRC=0 + + while [ $# -ne 0 ]; do + case "$1" in + --silent) + NVM_SILENT=1 + NVM_SILENT_ARG='--silent' + ;; + --delete-prefix) NVM_DELETE_PREFIX=1 ;; + --) ;; + --lts) NVM_LTS='*' ;; + --lts=*) NVM_LTS="${1##--lts=}" ;; + --*) ;; + *) + if [ -n "${1-}" ]; then + PROVIDED_VERSION="$1" + fi + ;; + esac + shift + done + + if [ -n "${NVM_LTS-}" ]; then + VERSION="$(nvm_match_version "lts/${NVM_LTS:-*}")" + elif [ -z "${PROVIDED_VERSION-}" ]; then + NVM_SILENT="${NVM_SILENT:-0}" nvm_rc_version + if [ -n "${NVM_RC_VERSION-}" ]; then + PROVIDED_VERSION="${NVM_RC_VERSION}" + IS_VERSION_FROM_NVMRC=1 + VERSION="$(nvm_version "${PROVIDED_VERSION}")" + fi + unset NVM_RC_VERSION + if [ -z "${VERSION}" ]; then + nvm_err 'Please see `nvm --help` or https://github.com/nvm-sh/nvm#nvmrc for more information.' + return 127 + fi + else + VERSION="$(nvm_match_version "${PROVIDED_VERSION}")" + fi + + if [ -z "${VERSION}" ]; then + >&2 nvm --help + return 127 + fi + + if [ "_${VERSION}" = '_system' ]; then + if nvm_has_system_node && nvm deactivate "${NVM_SILENT_ARG-}" >/dev/null 2>&1; then + if [ "${NVM_SILENT:-0}" -ne 1 ]; then + nvm_echo "Now using system version of node: $(node -v 2>/dev/null)$(nvm_print_npm_version)" + fi + return + elif nvm_has_system_iojs && nvm deactivate "${NVM_SILENT_ARG-}" >/dev/null 2>&1; then + if [ "${NVM_SILENT:-0}" -ne 1 ]; then + nvm_echo "Now using system version of io.js: $(iojs --version 2>/dev/null)$(nvm_print_npm_version)" + fi + return + elif [ "${NVM_SILENT:-0}" -ne 1 ]; then + nvm_err 'System version of node not found.' + fi + return 127 + elif [ "_${VERSION}" = "_∞" ]; then + if [ "${NVM_SILENT:-0}" -ne 1 ]; then + nvm_err "The alias \"${PROVIDED_VERSION}\" leads to an infinite loop. Aborting." + fi + return 8 + fi + if [ "${VERSION}" = 'N/A' ]; then + if [ "${NVM_SILENT:-0}" -ne 1 ]; then + nvm_ensure_version_installed "${PROVIDED_VERSION}" "${IS_VERSION_FROM_NVMRC}" + fi + return 3 + # This nvm_ensure_version_installed call can be a performance bottleneck + # on shell startup. Perhaps we can optimize it away or make it faster. + elif ! nvm_ensure_version_installed "${VERSION}" "${IS_VERSION_FROM_NVMRC}"; then + return $? + fi + + local NVM_VERSION_DIR + NVM_VERSION_DIR="$(nvm_version_path "${VERSION}")" + + # Change current version + PATH="$(nvm_change_path "${PATH}" "/bin" "${NVM_VERSION_DIR}")" + if nvm_has manpath; then + if [ -z "${MANPATH-}" ]; then + local MANPATH + MANPATH=$(manpath) + fi + # Change current version + MANPATH="$(nvm_change_path "${MANPATH}" "/share/man" "${NVM_VERSION_DIR}")" + export MANPATH + fi + export PATH + \hash -r + export NVM_BIN="${NVM_VERSION_DIR}/bin" + export NVM_INC="${NVM_VERSION_DIR}/include/node" + if [ "${NVM_SYMLINK_CURRENT-}" = true ]; then + command rm -f "${NVM_DIR}/current" && ln -s "${NVM_VERSION_DIR}" "${NVM_DIR}/current" + fi + local NVM_USE_OUTPUT + NVM_USE_OUTPUT='' + if [ "${NVM_SILENT:-0}" -ne 1 ]; then + if nvm_is_iojs_version "${VERSION}"; then + NVM_USE_OUTPUT="Now using io.js $(nvm_strip_iojs_prefix "${VERSION}")$(nvm_print_npm_version)" + else + NVM_USE_OUTPUT="Now using node ${VERSION}$(nvm_print_npm_version)" + fi + fi + if [ "_${VERSION}" != "_system" ]; then + local NVM_USE_CMD + NVM_USE_CMD="nvm use --delete-prefix" + if [ -n "${PROVIDED_VERSION}" ]; then + NVM_USE_CMD="${NVM_USE_CMD} ${VERSION}" + fi + if [ "${NVM_SILENT:-0}" -eq 1 ]; then + NVM_USE_CMD="${NVM_USE_CMD} --silent" + fi + if ! nvm_die_on_prefix "${NVM_DELETE_PREFIX}" "${NVM_USE_CMD}" "${NVM_VERSION_DIR}"; then + return 11 + fi + fi + if [ -n "${NVM_USE_OUTPUT-}" ] && [ "${NVM_SILENT:-0}" -ne 1 ]; then + nvm_echo "${NVM_USE_OUTPUT}" + fi + ;; + "run") + local provided_version + local has_checked_nvmrc + has_checked_nvmrc=0 + local IS_VERSION_FROM_NVMRC + IS_VERSION_FROM_NVMRC=0 + # run given version of node + + local NVM_SILENT + local NVM_SILENT_ARG + local NVM_LTS + while [ $# -gt 0 ]; do + case "$1" in + --silent) + NVM_SILENT=1 + NVM_SILENT_ARG='--silent' + shift + ;; + --lts) NVM_LTS='*' ; shift ;; + --lts=*) NVM_LTS="${1##--lts=}" ; shift ;; + *) + if [ -n "$1" ]; then + break + else + shift + fi + ;; # stop processing arguments + esac + done + + if [ $# -lt 1 ] && [ -z "${NVM_LTS-}" ]; then + NVM_SILENT="${NVM_SILENT:-0}" nvm_rc_version && has_checked_nvmrc=1 + if [ -n "${NVM_RC_VERSION-}" ]; then + VERSION="$(nvm_version "${NVM_RC_VERSION-}")" ||: + fi + unset NVM_RC_VERSION + if [ "${VERSION:-N/A}" = 'N/A' ]; then + >&2 nvm --help + return 127 + fi + fi + + if [ -z "${NVM_LTS-}" ]; then + provided_version="$1" + if [ -n "${provided_version}" ]; then + VERSION="$(nvm_version "${provided_version}")" ||: + if [ "_${VERSION:-N/A}" = '_N/A' ] && ! nvm_is_valid_version "${provided_version}"; then + provided_version='' + if [ $has_checked_nvmrc -ne 1 ]; then + NVM_SILENT="${NVM_SILENT:-0}" nvm_rc_version && has_checked_nvmrc=1 + fi + provided_version="${NVM_RC_VERSION}" + IS_VERSION_FROM_NVMRC=1 + VERSION="$(nvm_version "${NVM_RC_VERSION}")" ||: + unset NVM_RC_VERSION + else + shift + fi + fi + fi + + local NVM_IOJS + if nvm_is_iojs_version "${VERSION}"; then + NVM_IOJS=true + fi + + local EXIT_CODE + + nvm_is_zsh && setopt local_options shwordsplit + local LTS_ARG + if [ -n "${NVM_LTS-}" ]; then + LTS_ARG="--lts=${NVM_LTS-}" + VERSION='' + fi + if [ "_${VERSION}" = "_N/A" ]; then + nvm_ensure_version_installed "${provided_version}" "${IS_VERSION_FROM_NVMRC}" + elif [ "${NVM_IOJS}" = true ]; then + nvm exec "${NVM_SILENT_ARG-}" "${LTS_ARG-}" "${VERSION}" iojs "$@" + else + nvm exec "${NVM_SILENT_ARG-}" "${LTS_ARG-}" "${VERSION}" node "$@" + fi + EXIT_CODE="$?" + return $EXIT_CODE + ;; + "exec") + local NVM_SILENT + local NVM_LTS + while [ $# -gt 0 ]; do + case "$1" in + --silent) NVM_SILENT=1 ; shift ;; + --lts) NVM_LTS='*' ; shift ;; + --lts=*) NVM_LTS="${1##--lts=}" ; shift ;; + --) break ;; + --*) + nvm_err "Unsupported option \"$1\"." + return 55 + ;; + *) + if [ -n "$1" ]; then + break + else + shift + fi + ;; # stop processing arguments + esac + done + + local provided_version + provided_version="$1" + if [ "${NVM_LTS-}" != '' ]; then + provided_version="lts/${NVM_LTS:-*}" + VERSION="${provided_version}" + elif [ -n "${provided_version}" ]; then + VERSION="$(nvm_version "${provided_version}")" ||: + if [ "_${VERSION}" = '_N/A' ] && ! nvm_is_valid_version "${provided_version}"; then + NVM_SILENT="${NVM_SILENT:-0}" nvm_rc_version && has_checked_nvmrc=1 + provided_version="${NVM_RC_VERSION}" + unset NVM_RC_VERSION + VERSION="$(nvm_version "${provided_version}")" ||: + else + shift + fi + fi + + nvm_ensure_version_installed "${provided_version}" + EXIT_CODE=$? + if [ "${EXIT_CODE}" != "0" ]; then + # shellcheck disable=SC2086 + return $EXIT_CODE + fi + + if [ "${NVM_SILENT:-0}" -ne 1 ]; then + if [ "${NVM_LTS-}" = '*' ]; then + nvm_echo "Running node latest LTS -> $(nvm_version "${VERSION}")$(nvm use --silent "${VERSION}" && nvm_print_npm_version)" + elif [ -n "${NVM_LTS-}" ]; then + nvm_echo "Running node LTS \"${NVM_LTS-}\" -> $(nvm_version "${VERSION}")$(nvm use --silent "${VERSION}" && nvm_print_npm_version)" + elif nvm_is_iojs_version "${VERSION}"; then + nvm_echo "Running io.js $(nvm_strip_iojs_prefix "${VERSION}")$(nvm use --silent "${VERSION}" && nvm_print_npm_version)" + else + nvm_echo "Running node ${VERSION}$(nvm use --silent "${VERSION}" && nvm_print_npm_version)" + fi + fi + NODE_VERSION="${VERSION}" "${NVM_DIR}/nvm-exec" "$@" + ;; + "ls" | "list") + local PATTERN + local NVM_NO_COLORS + local NVM_NO_ALIAS + + while [ $# -gt 0 ]; do + case "${1}" in + --) ;; + --no-colors) NVM_NO_COLORS="${1}" ;; + --no-alias) NVM_NO_ALIAS="${1}" ;; + --*) + nvm_err "Unsupported option \"${1}\"." + return 55 + ;; + *) + PATTERN="${PATTERN:-$1}" + ;; + esac + shift + done + if [ -n "${PATTERN-}" ] && [ -n "${NVM_NO_ALIAS-}" ]; then + nvm_err '`--no-alias` is not supported when a pattern is provided.' + return 55 + fi + local NVM_LS_OUTPUT + local NVM_LS_EXIT_CODE + NVM_LS_OUTPUT=$(nvm_ls "${PATTERN-}") + NVM_LS_EXIT_CODE=$? + NVM_NO_COLORS="${NVM_NO_COLORS-}" nvm_print_versions "${NVM_LS_OUTPUT}" + if [ -z "${NVM_NO_ALIAS-}" ] && [ -z "${PATTERN-}" ]; then + if [ -n "${NVM_NO_COLORS-}" ]; then + nvm alias --no-colors + else + nvm alias + fi + fi + return $NVM_LS_EXIT_CODE + ;; + "ls-remote" | "list-remote") + local NVM_LTS + local PATTERN + local NVM_NO_COLORS + + while [ $# -gt 0 ]; do + case "${1-}" in + --) ;; + --lts) + NVM_LTS='*' + ;; + --lts=*) + NVM_LTS="${1##--lts=}" + ;; + --no-colors) NVM_NO_COLORS="${1}" ;; + --*) + nvm_err "Unsupported option \"${1}\"." + return 55 + ;; + *) + if [ -z "${PATTERN-}" ]; then + PATTERN="${1-}" + if [ -z "${NVM_LTS-}" ]; then + case "${PATTERN}" in + 'lts/*') + NVM_LTS='*' + PATTERN='' + ;; + lts/*) + NVM_LTS="${PATTERN##lts/}" + PATTERN='' + ;; + esac + fi + fi + ;; + esac + shift + done + + local NVM_OUTPUT + local EXIT_CODE + NVM_OUTPUT="$(NVM_LTS="${NVM_LTS-}" nvm_remote_versions "${PATTERN}" &&:)" + EXIT_CODE=$? + if [ -n "${NVM_OUTPUT}" ]; then + NVM_NO_COLORS="${NVM_NO_COLORS-}" nvm_print_versions "${NVM_OUTPUT}" + return $EXIT_CODE + fi + NVM_NO_COLORS="${NVM_NO_COLORS-}" nvm_print_versions "N/A" + return 3 + ;; + "current") + nvm_version current + ;; + "which") + local NVM_SILENT + local provided_version + while [ $# -ne 0 ]; do + case "${1}" in + --silent) NVM_SILENT=1 ;; + --) ;; + *) provided_version="${1-}" ;; + esac + shift + done + if [ -z "${provided_version-}" ]; then + NVM_SILENT="${NVM_SILENT:-0}" nvm_rc_version + if [ -n "${NVM_RC_VERSION}" ]; then + provided_version="${NVM_RC_VERSION}" + VERSION=$(nvm_version "${NVM_RC_VERSION}") ||: + fi + unset NVM_RC_VERSION + elif [ "${provided_version}" != 'system' ]; then + VERSION="$(nvm_version "${provided_version}")" ||: + else + VERSION="${provided_version-}" + fi + if [ -z "${VERSION}" ]; then + >&2 nvm --help + return 127 + fi + + if [ "_${VERSION}" = '_system' ]; then + if nvm_has_system_iojs >/dev/null 2>&1 || nvm_has_system_node >/dev/null 2>&1; then + local NVM_BIN + NVM_BIN="$(nvm use system >/dev/null 2>&1 && command which node)" + if [ -n "${NVM_BIN}" ]; then + nvm_echo "${NVM_BIN}" + return + fi + return 1 + fi + nvm_err 'System version of node not found.' + return 127 + elif [ "${VERSION}" = '∞' ]; then + nvm_err "The alias \"${2}\" leads to an infinite loop. Aborting." + return 8 + fi + + nvm_ensure_version_installed "${provided_version}" + EXIT_CODE=$? + if [ "${EXIT_CODE}" != "0" ]; then + # shellcheck disable=SC2086 + return $EXIT_CODE + fi + local NVM_VERSION_DIR + NVM_VERSION_DIR="$(nvm_version_path "${VERSION}")" + nvm_echo "${NVM_VERSION_DIR}/bin/node" + ;; + "alias") + local NVM_ALIAS_DIR + NVM_ALIAS_DIR="$(nvm_alias_path)" + local NVM_CURRENT + NVM_CURRENT="$(nvm_ls_current)" + + command mkdir -p "${NVM_ALIAS_DIR}/lts" + + local ALIAS + local TARGET + local NVM_NO_COLORS + ALIAS='--' + TARGET='--' + + while [ $# -gt 0 ]; do + case "${1-}" in + --) ;; + --no-colors) NVM_NO_COLORS="${1}" ;; + --*) + nvm_err "Unsupported option \"${1}\"." + return 55 + ;; + *) + if [ "${ALIAS}" = '--' ]; then + ALIAS="${1-}" + elif [ "${TARGET}" = '--' ]; then + TARGET="${1-}" + fi + ;; + esac + shift + done + + if [ -z "${TARGET}" ]; then + # for some reason the empty string was explicitly passed as the target + # so, unalias it. + nvm unalias "${ALIAS}" + return $? + elif echo "${ALIAS}" | grep -q "#"; then + nvm_err 'Aliases with a comment delimiter (#) are not supported.' + return 1 + elif [ "${TARGET}" != '--' ]; then + # a target was passed: create an alias + if [ "${ALIAS#*\/}" != "${ALIAS}" ]; then + nvm_err 'Aliases in subdirectories are not supported.' + return 1 + fi + VERSION="$(nvm_version "${TARGET}")" ||: + if [ "${VERSION}" = 'N/A' ]; then + nvm_err "! WARNING: Version '${TARGET}' does not exist." + fi + nvm_make_alias "${ALIAS}" "${TARGET}" + NVM_NO_COLORS="${NVM_NO_COLORS-}" NVM_CURRENT="${NVM_CURRENT-}" DEFAULT=false nvm_print_formatted_alias "${ALIAS}" "${TARGET}" "${VERSION}" + else + if [ "${ALIAS-}" = '--' ]; then + unset ALIAS + fi + + nvm_list_aliases "${ALIAS-}" + fi + ;; + "unalias") + local NVM_ALIAS_DIR + NVM_ALIAS_DIR="$(nvm_alias_path)" + command mkdir -p "${NVM_ALIAS_DIR}" + if [ $# -ne 1 ]; then + >&2 nvm --help + return 127 + fi + if [ "${1#*\/}" != "${1-}" ]; then + nvm_err 'Aliases in subdirectories are not supported.' + return 1 + fi + + local NVM_IOJS_PREFIX + local NVM_NODE_PREFIX + NVM_IOJS_PREFIX="$(nvm_iojs_prefix)" + NVM_NODE_PREFIX="$(nvm_node_prefix)" + local NVM_ALIAS_EXISTS + NVM_ALIAS_EXISTS=0 + if [ -f "${NVM_ALIAS_DIR}/${1-}" ]; then + NVM_ALIAS_EXISTS=1 + fi + + if [ $NVM_ALIAS_EXISTS -eq 0 ]; then + case "$1" in + "stable" | "unstable" | "${NVM_IOJS_PREFIX}" | "${NVM_NODE_PREFIX}" | "system") + nvm_err "${1-} is a default (built-in) alias and cannot be deleted." + return 1 + ;; + esac + + nvm_err "Alias ${1-} doesn't exist!" + return + fi + + local NVM_ALIAS_ORIGINAL + NVM_ALIAS_ORIGINAL="$(nvm_alias "${1}")" + command rm -f "${NVM_ALIAS_DIR}/${1}" + nvm_echo "Deleted alias ${1} - restore it with \`nvm alias \"${1}\" \"${NVM_ALIAS_ORIGINAL}\"\`" + ;; + "install-latest-npm") + if [ $# -ne 0 ]; then + >&2 nvm --help + return 127 + fi + + nvm_install_latest_npm + ;; + "reinstall-packages" | "copy-packages") + if [ $# -ne 1 ]; then + >&2 nvm --help + return 127 + fi + + local PROVIDED_VERSION + PROVIDED_VERSION="${1-}" + + if [ "${PROVIDED_VERSION}" = "$(nvm_ls_current)" ] || [ "$(nvm_version "${PROVIDED_VERSION}" ||:)" = "$(nvm_ls_current)" ]; then + nvm_err 'Can not reinstall packages from the current version of node.' + return 2 + fi + + local VERSION + if [ "_${PROVIDED_VERSION}" = "_system" ]; then + if ! nvm_has_system_node && ! nvm_has_system_iojs; then + nvm_err 'No system version of node or io.js detected.' + return 3 + fi + VERSION="system" + else + VERSION="$(nvm_version "${PROVIDED_VERSION}")" ||: + fi + + local NPMLIST + NPMLIST="$(nvm_npm_global_modules "${VERSION}")" + local INSTALLS + local LINKS + INSTALLS="${NPMLIST%% //// *}" + LINKS="${NPMLIST##* //// }" + + nvm_echo "Reinstalling global packages from ${VERSION}..." + if [ -n "${INSTALLS}" ]; then + nvm_echo "${INSTALLS}" | command xargs npm install -g --quiet + else + nvm_echo "No installed global packages found..." + fi + + nvm_echo "Linking global packages from ${VERSION}..." + if [ -n "${LINKS}" ]; then + ( + set -f; IFS=' +' # necessary to turn off variable expansion except for newlines + for LINK in ${LINKS}; do + set +f; unset IFS # restore variable expansion + if [ -n "${LINK}" ]; then + case "${LINK}" in + '/'*) (nvm_cd "${LINK}" && npm link) ;; + *) (nvm_cd "$(npm root -g)/../${LINK}" && npm link) + esac + fi + done + ) + else + nvm_echo "No linked global packages found..." + fi + ;; + "clear-cache") + command rm -f "${NVM_DIR}/v*" "$(nvm_version_dir)" 2>/dev/null + nvm_echo 'nvm cache cleared.' + ;; + "version") + nvm_version "${1}" + ;; + "version-remote") + local NVM_LTS + local PATTERN + while [ $# -gt 0 ]; do + case "${1-}" in + --) ;; + --lts) + NVM_LTS='*' + ;; + --lts=*) + NVM_LTS="${1##--lts=}" + ;; + --*) + nvm_err "Unsupported option \"${1}\"." + return 55 + ;; + *) + PATTERN="${PATTERN:-${1}}" + ;; + esac + shift + done + case "${PATTERN-}" in + 'lts/*') + NVM_LTS='*' + unset PATTERN + ;; + lts/*) + NVM_LTS="${PATTERN##lts/}" + unset PATTERN + ;; + esac + NVM_VERSION_ONLY=true NVM_LTS="${NVM_LTS-}" nvm_remote_version "${PATTERN:-node}" + ;; + "--version" | "-v") + nvm_echo '0.39.7' + ;; + "unload") + nvm deactivate >/dev/null 2>&1 + unset -f nvm \ + nvm_iojs_prefix nvm_node_prefix \ + nvm_add_iojs_prefix nvm_strip_iojs_prefix \ + nvm_is_iojs_version nvm_is_alias nvm_has_non_aliased \ + nvm_ls_remote nvm_ls_remote_iojs nvm_ls_remote_index_tab \ + nvm_ls nvm_remote_version nvm_remote_versions \ + nvm_install_binary nvm_install_source nvm_clang_version \ + nvm_get_mirror nvm_get_download_slug nvm_download_artifact \ + nvm_install_npm_if_needed nvm_use_if_needed nvm_check_file_permissions \ + nvm_print_versions nvm_compute_checksum \ + nvm_get_checksum_binary \ + nvm_get_checksum_alg nvm_get_checksum nvm_compare_checksum \ + nvm_version nvm_rc_version nvm_match_version \ + nvm_ensure_default_set nvm_get_arch nvm_get_os \ + nvm_print_implicit_alias nvm_validate_implicit_alias \ + nvm_resolve_alias nvm_ls_current nvm_alias \ + nvm_binary_available nvm_change_path nvm_strip_path \ + nvm_num_version_groups nvm_format_version nvm_ensure_version_prefix \ + nvm_normalize_version nvm_is_valid_version nvm_normalize_lts \ + nvm_ensure_version_installed nvm_cache_dir \ + nvm_version_path nvm_alias_path nvm_version_dir \ + nvm_find_nvmrc nvm_find_up nvm_find_project_dir nvm_tree_contains_path \ + nvm_version_greater nvm_version_greater_than_or_equal_to \ + nvm_print_npm_version nvm_install_latest_npm nvm_npm_global_modules \ + nvm_has_system_node nvm_has_system_iojs \ + nvm_download nvm_get_latest nvm_has nvm_install_default_packages nvm_get_default_packages \ + nvm_curl_use_compression nvm_curl_version \ + nvm_auto nvm_supports_xz \ + nvm_echo nvm_err nvm_grep nvm_cd \ + nvm_die_on_prefix nvm_get_make_jobs nvm_get_minor_version \ + nvm_has_solaris_binary nvm_is_merged_node_version \ + nvm_is_natural_num nvm_is_version_installed \ + nvm_list_aliases nvm_make_alias nvm_print_alias_path \ + nvm_print_default_alias nvm_print_formatted_alias nvm_resolve_local_alias \ + nvm_sanitize_path nvm_has_colors nvm_process_parameters \ + nvm_node_version_has_solaris_binary nvm_iojs_version_has_solaris_binary \ + nvm_curl_libz_support nvm_command_info nvm_is_zsh nvm_stdout_is_terminal \ + nvm_npmrc_bad_news_bears \ + nvm_get_colors nvm_set_colors nvm_print_color_code nvm_wrap_with_color_code nvm_format_help_message_colors \ + nvm_echo_with_colors nvm_err_with_colors \ + nvm_get_artifact_compression nvm_install_binary_extract nvm_extract_tarball \ + nvm_process_nvmrc nvm_nvmrc_invalid_msg \ + >/dev/null 2>&1 + unset NVM_RC_VERSION NVM_NODEJS_ORG_MIRROR NVM_IOJS_ORG_MIRROR NVM_DIR \ + NVM_CD_FLAGS NVM_BIN NVM_INC NVM_MAKE_JOBS \ + NVM_COLORS INSTALLED_COLOR SYSTEM_COLOR \ + CURRENT_COLOR NOT_INSTALLED_COLOR DEFAULT_COLOR LTS_COLOR \ + >/dev/null 2>&1 + ;; + "set-colors") + local EXIT_CODE + nvm_set_colors "${1-}" + EXIT_CODE=$? + if [ "$EXIT_CODE" -eq 17 ]; then + >&2 nvm --help + nvm_echo + nvm_err_with_colors "\033[1;37mPlease pass in five \033[1;31mvalid color codes\033[1;37m. Choose from: rRgGbBcCyYmMkKeW\033[0m" + fi + ;; + *) + >&2 nvm --help + return 127 + ;; + esac +} + +nvm_get_default_packages() { + local NVM_DEFAULT_PACKAGE_FILE="${NVM_DIR}/default-packages" + if [ -f "${NVM_DEFAULT_PACKAGE_FILE}" ]; then + local DEFAULT_PACKAGES + DEFAULT_PACKAGES='' + + # Read lines from $NVM_DIR/default-packages + local line + # ensure a trailing newline + WORK=$(mktemp -d) || exit $? + # shellcheck disable=SC2064 + trap "command rm -rf '$WORK'" EXIT + # shellcheck disable=SC1003 + sed -e '$a\' "${NVM_DEFAULT_PACKAGE_FILE}" > "${WORK}/default-packages" + while IFS=' ' read -r line; do + # Skip empty lines. + [ -n "${line-}" ] || continue + + # Skip comment lines that begin with `#`. + [ "$(nvm_echo "${line}" | command cut -c1)" != "#" ] || continue + + # Fail on lines that have multiple space-separated words + case $line in + *\ *) + nvm_err "Only one package per line is allowed in the ${NVM_DIR}/default-packages file. Please remove any lines with multiple space-separated values." + return 1 + ;; + esac + + DEFAULT_PACKAGES="${DEFAULT_PACKAGES}${line} " + done < "${WORK}/default-packages" + echo "${DEFAULT_PACKAGES}" | command xargs + fi +} + +nvm_install_default_packages() { + local DEFAULT_PACKAGES + DEFAULT_PACKAGES="$(nvm_get_default_packages)" + EXIT_CODE=$? + if [ $EXIT_CODE -ne 0 ] || [ -z "${DEFAULT_PACKAGES}" ]; then + return $EXIT_CODE + fi + nvm_echo "Installing default global packages from ${NVM_DIR}/default-packages..." + nvm_echo "npm install -g --quiet ${DEFAULT_PACKAGES}" + + if ! nvm_echo "${DEFAULT_PACKAGES}" | command xargs npm install -g --quiet; then + nvm_err "Failed installing default packages. Please check if your default-packages file or a package in it has problems!" + return 1 + fi +} + +nvm_supports_xz() { + if [ -z "${1-}" ]; then + return 1 + fi + + local NVM_OS + NVM_OS="$(nvm_get_os)" + if [ "_${NVM_OS}" = '_darwin' ]; then + local MACOS_VERSION + MACOS_VERSION="$(sw_vers -productVersion)" + if nvm_version_greater "10.9.0" "${MACOS_VERSION}"; then + # macOS 10.8 and earlier doesn't support extracting xz-compressed tarballs with tar + return 1 + fi + elif [ "_${NVM_OS}" = '_freebsd' ]; then + if ! [ -e '/usr/lib/liblzma.so' ]; then + # FreeBSD without /usr/lib/liblzma.so doesn't support extracting xz-compressed tarballs with tar + return 1 + fi + else + if ! command which xz >/dev/null 2>&1; then + # Most OSes without xz on the PATH don't support extracting xz-compressed tarballs with tar + # (Should correctly handle Linux, SmartOS, maybe more) + return 1 + fi + fi + + # all node versions v4.0.0 and later have xz + if nvm_is_merged_node_version "${1}"; then + return 0 + fi + + # 0.12x: node v0.12.10 and later have xz + if nvm_version_greater_than_or_equal_to "${1}" "0.12.10" && nvm_version_greater "0.13.0" "${1}"; then + return 0 + fi + + # 0.10x: node v0.10.42 and later have xz + if nvm_version_greater_than_or_equal_to "${1}" "0.10.42" && nvm_version_greater "0.11.0" "${1}"; then + return 0 + fi + + case "${NVM_OS}" in + darwin) + # darwin only has xz for io.js v2.3.2 and later + nvm_version_greater_than_or_equal_to "${1}" "2.3.2" + ;; + *) + nvm_version_greater_than_or_equal_to "${1}" "1.0.0" + ;; + esac + return $? +} + +nvm_auto() { + local NVM_MODE + NVM_MODE="${1-}" + local VERSION + local NVM_CURRENT + if [ "_${NVM_MODE}" = '_install' ]; then + VERSION="$(nvm_alias default 2>/dev/null || nvm_echo)" + if [ -n "${VERSION}" ]; then + nvm install "${VERSION}" >/dev/null + elif nvm_rc_version >/dev/null 2>&1; then + nvm install >/dev/null + fi + elif [ "_$NVM_MODE" = '_use' ]; then + NVM_CURRENT="$(nvm_ls_current)" + if [ "_${NVM_CURRENT}" = '_none' ] || [ "_${NVM_CURRENT}" = '_system' ]; then + VERSION="$(nvm_resolve_local_alias default 2>/dev/null || nvm_echo)" + if [ -n "${VERSION}" ]; then + nvm use --silent "${VERSION}" >/dev/null + elif nvm_rc_version >/dev/null 2>&1; then + nvm use --silent >/dev/null + fi + else + nvm use --silent "${NVM_CURRENT}" >/dev/null + fi + elif [ "_${NVM_MODE}" != '_none' ]; then + nvm_err 'Invalid auto mode supplied.' + return 1 + fi +} + +nvm_process_parameters() { + local NVM_AUTO_MODE + NVM_AUTO_MODE='use' + while [ "$#" -ne 0 ]; do + case "$1" in + --install) NVM_AUTO_MODE='install' ;; + --no-use) NVM_AUTO_MODE='none' ;; + esac + shift + done + nvm_auto "${NVM_AUTO_MODE}" +} + +nvm_process_parameters "$@" + +} # this ensures the entire script is downloaded # \ No newline at end of file diff --git a/scripts/update_secrets.sh b/scripts/update_secrets.sh new file mode 100755 index 000000000..399d5b9ef --- /dev/null +++ b/scripts/update_secrets.sh @@ -0,0 +1,34 @@ +#!/bin/bash +set -euo pipefail + +source "$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )/common-functions.sh" + +require_dev_aws_session + +STREAMING_ENV="$(dirname "$0")/../aoe-streaming-app/.env" +SEMANTIC_ENV="$(dirname "$0")/../aoe-semantic-apis/.env" +WEB_BACKEND_ENV="$(dirname "$0")/../aoe-web-backend/.env" +DATA_ANALYTICS_ENV="$(dirname "$0")/../aoe-data-analytics/.env" +DATA_SERVICES_ENV="$(dirname "$0")/../aoe-data-services/.env" + +upload_env_to_s3() { + local ENV=$1 + local S3_BUCKET_FOLDER=$2 + + if [[ -f "$ENV" ]]; then + aws s3 cp "$ENV" "s3://aoe-local-dev/$S3_BUCKET_FOLDER/.env" --profile aoe-dev + echo "Copied $ENV to S3" + else + echo "Error: Missing $ENV file." + exit 1 + fi +} + +echo "Checking and copying .env files to S3." +upload_env_to_s3 "$STREAMING_ENV" "streaming-app" +upload_env_to_s3 "$SEMANTIC_ENV" "semantic-api" +upload_env_to_s3 "$WEB_BACKEND_ENV" "web-backend" +upload_env_to_s3 "$DATA_ANALYTICS_ENV" "data-analytics" +upload_env_to_s3 "$DATA_SERVICES_ENV" "data-services" + + diff --git a/start-local-env.sh b/start-local-env.sh new file mode 100755 index 000000000..64567b0a4 --- /dev/null +++ b/start-local-env.sh @@ -0,0 +1,244 @@ +#!/usr/bin/env bash +set -o errexit -o nounset -o pipefail +source "$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )/scripts/common-functions.sh" +FETCH_SECRETS_SCRIPT="$(dirname "$0")/scripts/fetch_secrets.sh" + + +AOE_WEB_BACKEND_ENV="$(dirname "$0")/aoe-web-backend/.env" +AOE_STREAMING_APP_ENV="$(dirname "$0")/aoe-streaming-app/.env" +AOE_DATA_ANALYTICS_ENV="$(dirname "$0")/aoe-data-analytics/.env" +AOE_SEMANTIC_APIS_ENV="$(dirname "$0")/aoe-semantic-apis/.env" +AOE_DATA_SERVICES_ENV="$(dirname "$0")/aoe-data-services/.env" + +generate_cert() { + CERT_NAME="nginx-selfsigned" + CERT_DIR="$(dirname "$0")/docker/dev" + KEY_FILE="${CERT_DIR}/${CERT_NAME}.key" + CSR_FILE="${CERT_DIR}/${CERT_NAME}.csr" + CERT_FILE="${CERT_DIR}/${CERT_NAME}.crt" + CONFIG_FILE="$(dirname "$0")/docker/nginx/san.cnf" + DAYS_VALID=365 + + if [ -f "$CERT_FILE" ]; then + echo "Certificate already exists at $CERT_FILE. Skipping creation." + return 0 + fi + + # Generate OpenSSL configuration file with SAN + echo "OpenSSL configuration file created at ${CONFIG_FILE}" + + # Generate private key + echo "Generating private key..." + openssl genrsa -out "${KEY_FILE}" 2048 + + # Generate CSR with SAN + echo "Generating CSR with SAN..." + openssl req -new -key "${KEY_FILE}" -out "${CSR_FILE}" -config "${CONFIG_FILE}" + + # Generate self-signed certificate with SAN + echo "Generating self-signed certificate with SAN..." + openssl x509 -req -in "${CSR_FILE}" -signkey "${KEY_FILE}" -out "${CERT_FILE}" -days "${DAYS_VALID}" -extensions req_ext -extfile "${CONFIG_FILE}" + + echo "Certificate and key generated:" + echo "Private Key: ${KEY_FILE}" + echo "Certificate Signing Request: ${CSR_FILE}" + echo "Certificate: ${CERT_FILE}" +} + +generate_cert + +check_env_files() { + missing_files=() + # Check each variable and add to missing_files if the file is missing + for env_var in AOE_WEB_BACKEND_ENV AOE_STREAMING_APP_ENV AOE_DATA_ANALYTICS_ENV AOE_SEMANTIC_APIS_ENV AOE_DATA_SERVICES_ENV; do + file_path="${!env_var}" + if [ ! -f "$file_path" ]; then + missing_files+=("$env_var") + else + echo "Found ${!env_var}" + fi + done + + # Print results and exit if any files are missing + if [ ${#missing_files[@]} -ne 0 ]; then + echo "Missing or non-existent environment files: ${missing_files[*]}" + return 1 # Exit with a non-zero status to indicate failure + else + echo "All required environment files exist." + return 0 + fi +} + + +if ! check_env_files; then + echo "Secrets not found, logging in to AWS SSO.." + require_dev_aws_session + + echo "running fetch-secrets.sh..." + bash "$FETCH_SECRETS_SCRIPT" + + if ! check_env_files; then + echo "Failed to fetch secrets. Please check the fetch_secrets.sh script and your AWS credentials." + exit 1 + fi +fi + +export TRUST_STORE_PASSWORD=myPassword + +export REVISION=${revision} +compose="docker compose -f ./docker-compose.yml" +compose="$compose -f ./docker-compose.local-dev.yml" + +readonly compose + +function stop() { + $compose down --remove-orphans || true +} +trap stop EXIT + +function init { + require_command tmux + require_docker +} + +function rename_infra_panes_to_match_the_script_they_run { + tmux select-pane -t 0.0 -T redis + tmux select-pane -t 0.1 -T localstack + tmux select-pane -t 0.2 -T mongo + tmux select-pane -t 0.3 -T postgres + tmux select-pane -t 0.4 -T oidc + tmux select-pane -t 0.5 -T opensearch +} + +function rename_infra2_panes_to_match_the_script_they_run_window_2 { + tmux select-pane -t 1.0 -T zookeeper + tmux select-pane -t 1.1 -T nginx + tmux select-pane -t 1.2 -T kafka + tmux select-pane -t 1.3 -T kafka2 +} + +function rename_services_panes_to_match_the_script_they_run_window_3 { + tmux select-pane -t 2.0 -T aoe-web-backend + tmux select-pane -t 2.1 -T aoe-data-services + tmux select-pane -t 2.2 -T aoe-streaming-app + tmux select-pane -t 2.3 -T aoe-data-analytics + tmux select-pane -t 2.4 -T aoe-semantic-apis + tmux select-pane -t 2.5 -T aoe-web-frontend +} + +init + +$compose create --build -- aoe-web-frontend aoe-web-backend aoe-data-analytics aoe-semantic-apis aoe-data-services aoe-streaming-app aoe-oidc-server localstack redis mongo postgres zookeeper kafka kafka2 opensearch nginx + +session="aoe" + +tmux kill-session -t $session || true +tmux start-server +tmux new-session -d -s $session -c "$repo" -x "$(tput cols)" -y "$(tput lines)" + +readonly up_cmd="$compose up --no-log-prefix --build" +tmux set -g pane-border-status bottom +tmux rename-window -t $session:0 'infra' +tmux select-pane -t 0 + +tmux split-window -h -p 50 + +# In the first column (Pane 0), split into 3 rows +tmux select-pane -t 0 +tmux split-window -v # Pane 2 +tmux split-window -v # Pane 3 + +# In the second column (Pane 1), split into 3 rows +tmux select-pane -t 1 +tmux split-window -v # Pane 4 +tmux split-window -v # Pane 5 + +tmux select-layout tiled + +# Pane 0: Redis +tmux select-pane -t 0 +tmux send-keys "$up_cmd redis" C-m + +# Pane 1: Localstack +tmux select-pane -t 1 +tmux send-keys "$up_cmd localstack" C-m + +# Pane 2: MongoDB +tmux select-pane -t 2 +tmux send-keys "$up_cmd mongo" C-m + +# Pane 3: PostgreSQL +tmux select-pane -t 3 +tmux send-keys "$up_cmd postgres" C-m + +# Pane 4: oidc +tmux select-pane -t 4 +tmux send-keys "$up_cmd aoe-oidc-server" C-m + +# Pane 5: elasticsearch +tmux select-pane -t 5 +tmux send-keys "$up_cmd opensearch" C-m + +rename_infra_panes_to_match_the_script_they_run + +tmux new-window -t $session:1 -n 'infra2' +tmux select-window -t 1 +tmux select-pane -t 1.0 +tmux split-window -h -p 50 + +tmux select-pane -t 1.0 +tmux send-keys "$up_cmd zookeeper" C-m +tmux split-window -v + +tmux select-pane -t 1.2 +tmux send-keys "$up_cmd kafka" C-m +tmux split-window -v + +wait_for_container_to_be_healthy "kafka" + +tmux select-pane -t 1.3 +tmux send-keys "$up_cmd kafka2" C-m + +rename_infra2_panes_to_match_the_script_they_run_window_2 + +tmux new-window -t $session:2 -n 'services' +tmux select-window -t 2 +tmux select-pane -t 2.0 +tmux split-window -h -p 50 + +tmux select-pane -t 2.0 +tmux split-window -v # Pane 2 +tmux split-window -v # Pane 3 + +tmux select-pane -t 2.3 +tmux split-window -v # Pane 4 +tmux split-window -v # Pane 5 + +tmux select-pane -t 2.0 +tmux send-keys "$up_cmd aoe-web-backend" C-m + +wait_for_container_to_be_healthy "aoe-web-backend" + +tmux select-pane -t 2.1 +tmux send-keys "$up_cmd aoe-data-services" C-m + +tmux select-pane -t 2.2 +tmux send-keys "$up_cmd aoe-streaming-app" C-m + +tmux select-pane -t 2.3 +tmux send-keys "$up_cmd aoe-data-analytics" C-m + +tmux select-pane -t 2.4 +tmux send-keys "$up_cmd aoe-semantic-apis" C-m + +tmux select-pane -t 2.5 +tmux send-keys "$up_cmd aoe-web-frontend" C-m + +tmux select-window -t 1 +tmux select-pane -t 1.1 +tmux send-keys "$up_cmd nginx" C-m + +rename_services_panes_to_match_the_script_they_run_window_3 +tmux select-window -t 2 +tmux select-pane -t 2.0 +tmux attach-session -t $session