From dff50d502cb6755d47d5444ac4c940f30d64f9f0 Mon Sep 17 00:00:00 2001 From: bertrand Date: Fri, 22 Mar 2024 19:38:38 +0100 Subject: [PATCH] wip --- app/logic/cloud_logic.R | 3 +- app/logic/constant.R | 9 +--- app/logic/trisk_button_logic.R | 3 +- scripts/trisk_knative/Dockerfile | 6 --- scripts/trisk_knative/api.R | 33 ++++++-------- scripts/trisk_knative/deploy.sh | 16 +------ scripts/trisk_knative/k8s-trisk-api.yaml | 40 +---------------- scripts/trisk_knative/utils.R | 57 +++++++----------------- 8 files changed, 37 insertions(+), 130 deletions(-) diff --git a/app/logic/cloud_logic.R b/app/logic/cloud_logic.R index 6f6fd7f..fa2862e 100644 --- a/app/logic/cloud_logic.R +++ b/app/logic/cloud_logic.R @@ -1,7 +1,6 @@ -trigger_trisk_api_computation <- function(trisk_run_params, api_endpoint) { +trigger_trisk_api_computation <- function(trisk_run_params, trisk_api_service) { # Define the URL # by defaylt trisk_api_service should be equalt to "trisk-api-service" - trisk_api_service <- Sys.getenv("TRISK_API_SERVICE") url <- paste0("http://", trisk_api_service, ":80/compute_trisk") # Define the body of the request diff --git a/app/logic/constant.R b/app/logic/constant.R index 888a4c9..d4855ca 100644 --- a/app/logic/constant.R +++ b/app/logic/constant.R @@ -1,14 +1,9 @@ -# export constant values from here - -# INFRASTRUCTURE CONSTANTS ==================================== +# PROJECT CONSTANTS ==================================== -api_endpoint <- Sys.getenv("TRISK_API_ENDPOINT") +TRISK_API_SERVICE <- Sys.getenv("TRISK_API_SERVICE") trisk_input_path <- file.path("app", "data", "st_inputs") backend_trisk_run_folder <- file.path("app", "data", "backend_db") - -# PROJECT CONSTANTS ==================================== - # Filter outliers in crispy when generating the analysis data # see stress.test.plot.report:::load_input_plots_data_from_tibble documentation for more details filter_crispy_outliers <- TRUE diff --git a/app/logic/trisk_button_logic.R b/app/logic/trisk_button_logic.R index 1471151..228508b 100644 --- a/app/logic/trisk_button_logic.R +++ b/app/logic/trisk_button_logic.R @@ -1,4 +1,5 @@ box::use( + app/logic/constant[TRISK_API_SERVICE], app / logic / trisk_mgmt[ run_trisk_with_params, format_error_message @@ -75,7 +76,7 @@ trisk_generator <- function( run_id <- NULL } } else if (Sys.getenv("CRISPY_APP_ENV") == "prod") { - run_id <- trigger_trisk_api_computation(trisk_run_params) + run_id <- trigger_trisk_api_computation(trisk_run_params, trisk_api_service=TRISK_API_SERVICE) } else { stop("must set environment variable CRISPY_APP_ENV to 'dev' or 'prod'") } diff --git a/scripts/trisk_knative/Dockerfile b/scripts/trisk_knative/Dockerfile index 3a78e12..7c05a18 100644 --- a/scripts/trisk_knative/Dockerfile +++ b/scripts/trisk_knative/Dockerfile @@ -38,12 +38,6 @@ ENV POSTGRES_PASSWORD=NULL ENV POSTGRES_HOST=NULL ENV POSTGRES_PORT=NULL -ENV S3_URL=NULL -ENV S3_ACCESS_KEY=NULL -ENV S3_SECRET_KEY=NULL -ENV S3_BUCKET=NULL -ENV S3_REGION=NULL - # Command to run the app # Expose the port your app will run on EXPOSE 8080 diff --git a/scripts/trisk_knative/api.R b/scripts/trisk_knative/api.R index d5558f0..e2bfa14 100644 --- a/scripts/trisk_knative/api.R +++ b/scripts/trisk_knative/api.R @@ -1,29 +1,24 @@ # Define an endpoint that accepts POST requests # Assume the JSON payload is directly analogous to the R list structure for trisk_run_param -source("./trisk_compute.R") -source("./utils.R") +source(file.path(".","trisk_compute.R")) +source(file.path(".","utils.R")) # Create a plumber router pr <- plumber::Plumber$new() # hardcoded input fp while the data is still part of the docker image -trisk_input_path <- file.path(".", "st_inputs") -s3_folder_path <- "st_inputs/" +TRISK_INPUT_PATH <- file.path(".", "st_inputs") +tables <- c( + "Scenarios_AnalysisInput", + "abcd_stress_test_input", + "ngfs_carbon_price", + "prewrangled_capacity_factors", + "prewrangled_financial_data_stress_test", + "price_data_long" + ) - - -if (!dir.exists(trisk_input_path)) { - download_files_from_s3( - s3_url = Sys.getenv("S3_URL"), - s3_folder_path = s3_folder_path, - local_folder_path = trisk_input_path, - s3_access_key = Sys.getenv("S3_ACCESS_KEY"), - s3_secret_key = Sys.getenv("S3_SECRET_KEY"), - s3_bucket = Sys.getenv("S3_BUCKET"), - s3_region = Sys.getenv("S3_REGION") - ) -} +download_db_tables_postgres(tables=tables, folder_path=TRISK_INPUT_PATH) validate_trisk_run_params <- function(trisk_run_params) { @@ -50,7 +45,7 @@ pr$handle("POST", "/compute_trisk", function(req, res) { run_id <- run_trisk_and_upload_results_to_db_conn( trisk_run_params = trisk_run_params, - trisk_input_path = trisk_input_path, + trisk_input_path = TRISK_INPUT_PATH, postgres_conn = postgres_conn ) @@ -62,7 +57,7 @@ pr$handle("POST", "/compute_trisk", function(req, res) { }) pr$handle("GET", "/get_possible_trisk_combinations", function(req, res) { - possible_trisk_combinations <- r2dii.climate.stress.test::get_scenario_geography_x_ald_sector(trisk_input_path) + possible_trisk_combinations <- r2dii.climate.stress.test::get_scenario_geography_x_ald_sector(TRISK_INPUT_PATH) response <- list(possible_trisk_combinations = possible_trisk_combinations) response <- jsonlite::toJSON(response, auto_unbox = TRUE) return(response) diff --git a/scripts/trisk_knative/deploy.sh b/scripts/trisk_knative/deploy.sh index 3b7fdb1..46345dd 100644 --- a/scripts/trisk_knative/deploy.sh +++ b/scripts/trisk_knative/deploy.sh @@ -2,17 +2,10 @@ # Check for correct number of arguments if [ "$#" -ne 10 ]; then - echo "Usage: $0 S3_URL S3_ACCESS_KEY S3_SECRET_KEY S3_BUCKET S3_REGION POSTGRES_USERNAME POSTGRES_PASSWORD POSTGRES_HOST POSTGRES_PORT POSTGRES_DB" + echo "Usage: $0 POSTGRES_USERNAME POSTGRES_PASSWORD POSTGRES_HOST POSTGRES_PORT POSTGRES_DB" exit 1 fi -# Assigning arguments to variables -S3_URL=$(echo -n "$1" | base64) -S3_ACCESS_KEY=$(echo -n "$2" | base64) -S3_SECRET_KEY=$(echo -n "$3" | base64) -S3_BUCKET=$(echo -n "$4" | base64) -S3_REGION=$(echo -n "$5" | base64) - POSTGRES_USERNAME=$(echo -n "$6" | base64) POSTGRES_PASSWORD=$(echo -n "$7" | base64) POSTGRES_HOST=$(echo -n "$8" | base64) @@ -24,11 +17,6 @@ sed -e "s|\${POSTGRES_USERNAME}|${POSTGRES_USERNAME}|g" \ -e "s|\${POSTGRES_PASSWORD}|${POSTGRES_PASSWORD}|g" \ -e "s|\${POSTGRES_HOST}|${POSTGRES_HOST}|g" \ -e "s|\${POSTGRES_PORT}|${POSTGRES_PORT}|g" \ - -e "s|\${POSTGRES_DB}|${POSTGRES_DB}|g" \ - -e "s|\${S3_URL}|${S3_URL}|g" \ - -e "s|\${S3_ACCESS_KEY}|${S3_ACCESS_KEY}|g" \ - -e "s|\${S3_SECRET_KEY}|${S3_SECRET_KEY}|g" \ - -e "s|\${S3_BUCKET}|${S3_BUCKET}|g" \ - -e "s|\${S3_REGION}|${S3_REGION}|g" k8s-trisk-api.yaml > k8s-trisk-api.yaml + -e "s|\${POSTGRES_DB}|${POSTGRES_DB}|g" k8s-trisk-api.yaml > k8s-trisk-api.yaml kubectl apply -f k8s-trisk-api.yaml diff --git a/scripts/trisk_knative/k8s-trisk-api.yaml b/scripts/trisk_knative/k8s-trisk-api.yaml index c72ce81..80eea8e 100644 --- a/scripts/trisk_knative/k8s-trisk-api.yaml +++ b/scripts/trisk_knative/k8s-trisk-api.yaml @@ -1,17 +1,5 @@ apiVersion: v1 kind: Secret -metadata: - name: s3-credentials -type: Opaque -data: - s3_url: ${S3_URL} - s3_access_key: ${S3_ACCESS_KEY} - s3_secret_key: ${S3_SECRET_KEY} - s3_bucket: ${S3_BUCKET} - s3_region: ${S3_REGION} ---- -apiVersion: v1 -kind: Secret metadata: name: db-credentials type: Opaque @@ -71,30 +59,4 @@ spec: valueFrom: secretKeyRef: name: db-credentials - key: postgres_db - - name: S3_URL - valueFrom: - secretKeyRef: - name: s3-credentials - key: crispy_s3_url - - name: S3_ACCESS_KEY - valueFrom: - secretKeyRef: - name: s3-credentials - key: crispy_s3_access_key - - name: S3_SECRET_KEY - valueFrom: - secretKeyRef: - name: s3-credentials - key: crispy_s3_secret_key - - name: S3_BUCKET - valueFrom: - secretKeyRef: - name: s3-credentials - key: crispy_s3_bucket - - name: S3_REGION - valueFrom: - secretKeyRef: - name: s3-credentials - key: crispy_s3_region - + key: postgres_db \ No newline at end of file diff --git a/scripts/trisk_knative/utils.R b/scripts/trisk_knative/utils.R index f66ef95..e02b011 100644 --- a/scripts/trisk_knative/utils.R +++ b/scripts/trisk_knative/utils.R @@ -1,44 +1,17 @@ +download_db_tables_postgres <- function(tables, folder_path) { + # Example function call + conn <- DBI::dbConnect( + RPostgres::Postgres(), + dbname = Sys.getenv("POSTGRES_DB"), + host = Sys.getenv("POSTGRES_HOST"), + port = Sys.getenv("POSTGRES_PORT"), + user = Sys.getenv("ST_POSTGRES_USERNAME"), + password = Sys.getenv("POSTGRES_PASSWORD") + ) - -download_files_from_s3 <- function( - local_folder_path, - s3_url, - s3_bucket, - s3_folder_path, - s3_access_key, - s3_secret_key, - s3_region) { - # Configure the S3 client to use DigitalOcean Spaces - Sys.setenv( - "AWS_ACCESS_KEY_ID" = s3_access_key, - "AWS_SECRET_ACCESS_KEY" = s3_secret_key, - "AWS_S3_ENDPOINT" = s3_url, - "AWS_DEFAULT_REGION" = s3_region - ) - - # Check and create the local directory if it doesn't exist - if (!dir.exists(local_folder_path)) { - dir.create(local_folder_path, recursive = TRUE) - } - - # List all files in the folder - response <- aws.s3::get_bucket( - bucket = s3_bucket, - prefix = s3_folder_path, - delimiter = "/", - parse_response = TRUE - ) - - - for (i in 1:length(response)) { - file_key <- response[i]$Contents$Key - if (grepl("\\.csv$", file_key)) { - # Download file - aws.s3::save_object( - file = paste0(local_folder_path, "/", basename(file_key)), - object = file_key, - bucket = s3_bucket - ) - } - } + lapply(tables, function(table_name) { + query <- sprintf("SELECT * FROM %s", table_name) + data <- DBI::dbGetQuery(conn, query) + readr::write_csv(data, file = file.path(folder_path, paste0(table_name, ".csv"))) + }) }