Skip to content

Commit

Permalink
wip
Browse files Browse the repository at this point in the history
  • Loading branch information
bergalli committed Mar 22, 2024
1 parent 2940ceb commit dff50d5
Show file tree
Hide file tree
Showing 8 changed files with 37 additions and 130 deletions.
3 changes: 1 addition & 2 deletions app/logic/cloud_logic.R
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
trigger_trisk_api_computation <- function(trisk_run_params, api_endpoint) {
trigger_trisk_api_computation <- function(trisk_run_params, trisk_api_service) {
# Define the URL
# by defaylt trisk_api_service should be equalt to "trisk-api-service"
trisk_api_service <- Sys.getenv("TRISK_API_SERVICE")
url <- paste0("http://", trisk_api_service, ":80/compute_trisk")

# Define the body of the request
Expand Down
9 changes: 2 additions & 7 deletions app/logic/constant.R
Original file line number Diff line number Diff line change
@@ -1,14 +1,9 @@
# export constant values from here

# INFRASTRUCTURE CONSTANTS ====================================
# PROJECT CONSTANTS ====================================

api_endpoint <- Sys.getenv("TRISK_API_ENDPOINT")
TRISK_API_SERVICE <- Sys.getenv("TRISK_API_SERVICE")
trisk_input_path <- file.path("app", "data", "st_inputs")
backend_trisk_run_folder <- file.path("app", "data", "backend_db")


# PROJECT CONSTANTS ====================================

# Filter outliers in crispy when generating the analysis data
# see stress.test.plot.report:::load_input_plots_data_from_tibble documentation for more details
filter_crispy_outliers <- TRUE
Expand Down
3 changes: 2 additions & 1 deletion app/logic/trisk_button_logic.R
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
box::use(
app/logic/constant[TRISK_API_SERVICE],
app / logic / trisk_mgmt[
run_trisk_with_params,
format_error_message
Expand Down Expand Up @@ -75,7 +76,7 @@ trisk_generator <- function(
run_id <- NULL
}
} else if (Sys.getenv("CRISPY_APP_ENV") == "prod") {
run_id <- trigger_trisk_api_computation(trisk_run_params)
run_id <- trigger_trisk_api_computation(trisk_run_params, trisk_api_service=TRISK_API_SERVICE)
} else {
stop("must set environment variable CRISPY_APP_ENV to 'dev' or 'prod'")
}
Expand Down
6 changes: 0 additions & 6 deletions scripts/trisk_knative/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -38,12 +38,6 @@ ENV POSTGRES_PASSWORD=NULL
ENV POSTGRES_HOST=NULL
ENV POSTGRES_PORT=NULL

ENV S3_URL=NULL
ENV S3_ACCESS_KEY=NULL
ENV S3_SECRET_KEY=NULL
ENV S3_BUCKET=NULL
ENV S3_REGION=NULL

# Command to run the app
# Expose the port your app will run on
EXPOSE 8080
Expand Down
33 changes: 14 additions & 19 deletions scripts/trisk_knative/api.R
Original file line number Diff line number Diff line change
@@ -1,29 +1,24 @@
# Define an endpoint that accepts POST requests
# Assume the JSON payload is directly analogous to the R list structure for trisk_run_param

source("./trisk_compute.R")
source("./utils.R")
source(file.path(".","trisk_compute.R"))
source(file.path(".","utils.R"))

# Create a plumber router
pr <- plumber::Plumber$new()

# hardcoded input fp while the data is still part of the docker image
trisk_input_path <- file.path(".", "st_inputs")
s3_folder_path <- "st_inputs/"
TRISK_INPUT_PATH <- file.path(".", "st_inputs")
tables <- c(
"Scenarios_AnalysisInput",
"abcd_stress_test_input",
"ngfs_carbon_price",
"prewrangled_capacity_factors",
"prewrangled_financial_data_stress_test",
"price_data_long"
)



if (!dir.exists(trisk_input_path)) {
download_files_from_s3(
s3_url = Sys.getenv("S3_URL"),
s3_folder_path = s3_folder_path,
local_folder_path = trisk_input_path,
s3_access_key = Sys.getenv("S3_ACCESS_KEY"),
s3_secret_key = Sys.getenv("S3_SECRET_KEY"),
s3_bucket = Sys.getenv("S3_BUCKET"),
s3_region = Sys.getenv("S3_REGION")
)
}
download_db_tables_postgres(tables=tables, folder_path=TRISK_INPUT_PATH)


validate_trisk_run_params <- function(trisk_run_params) {
Expand All @@ -50,7 +45,7 @@ pr$handle("POST", "/compute_trisk", function(req, res) {

run_id <- run_trisk_and_upload_results_to_db_conn(
trisk_run_params = trisk_run_params,
trisk_input_path = trisk_input_path,
trisk_input_path = TRISK_INPUT_PATH,
postgres_conn = postgres_conn
)

Expand All @@ -62,7 +57,7 @@ pr$handle("POST", "/compute_trisk", function(req, res) {
})

pr$handle("GET", "/get_possible_trisk_combinations", function(req, res) {
possible_trisk_combinations <- r2dii.climate.stress.test::get_scenario_geography_x_ald_sector(trisk_input_path)
possible_trisk_combinations <- r2dii.climate.stress.test::get_scenario_geography_x_ald_sector(TRISK_INPUT_PATH)
response <- list(possible_trisk_combinations = possible_trisk_combinations)
response <- jsonlite::toJSON(response, auto_unbox = TRUE)
return(response)
Expand Down
16 changes: 2 additions & 14 deletions scripts/trisk_knative/deploy.sh
Original file line number Diff line number Diff line change
Expand Up @@ -2,17 +2,10 @@

# Check for correct number of arguments
if [ "$#" -ne 10 ]; then
echo "Usage: $0 S3_URL S3_ACCESS_KEY S3_SECRET_KEY S3_BUCKET S3_REGION POSTGRES_USERNAME POSTGRES_PASSWORD POSTGRES_HOST POSTGRES_PORT POSTGRES_DB"
echo "Usage: $0 POSTGRES_USERNAME POSTGRES_PASSWORD POSTGRES_HOST POSTGRES_PORT POSTGRES_DB"
exit 1
fi

# Assigning arguments to variables
S3_URL=$(echo -n "$1" | base64)
S3_ACCESS_KEY=$(echo -n "$2" | base64)
S3_SECRET_KEY=$(echo -n "$3" | base64)
S3_BUCKET=$(echo -n "$4" | base64)
S3_REGION=$(echo -n "$5" | base64)

POSTGRES_USERNAME=$(echo -n "$6" | base64)
POSTGRES_PASSWORD=$(echo -n "$7" | base64)
POSTGRES_HOST=$(echo -n "$8" | base64)
Expand All @@ -24,11 +17,6 @@ sed -e "s|\${POSTGRES_USERNAME}|${POSTGRES_USERNAME}|g" \
-e "s|\${POSTGRES_PASSWORD}|${POSTGRES_PASSWORD}|g" \
-e "s|\${POSTGRES_HOST}|${POSTGRES_HOST}|g" \
-e "s|\${POSTGRES_PORT}|${POSTGRES_PORT}|g" \
-e "s|\${POSTGRES_DB}|${POSTGRES_DB}|g" \
-e "s|\${S3_URL}|${S3_URL}|g" \
-e "s|\${S3_ACCESS_KEY}|${S3_ACCESS_KEY}|g" \
-e "s|\${S3_SECRET_KEY}|${S3_SECRET_KEY}|g" \
-e "s|\${S3_BUCKET}|${S3_BUCKET}|g" \
-e "s|\${S3_REGION}|${S3_REGION}|g" k8s-trisk-api.yaml > k8s-trisk-api.yaml
-e "s|\${POSTGRES_DB}|${POSTGRES_DB}|g" k8s-trisk-api.yaml > k8s-trisk-api.yaml

kubectl apply -f k8s-trisk-api.yaml
40 changes: 1 addition & 39 deletions scripts/trisk_knative/k8s-trisk-api.yaml
Original file line number Diff line number Diff line change
@@ -1,17 +1,5 @@
apiVersion: v1
kind: Secret
metadata:
name: s3-credentials
type: Opaque
data:
s3_url: ${S3_URL}
s3_access_key: ${S3_ACCESS_KEY}
s3_secret_key: ${S3_SECRET_KEY}
s3_bucket: ${S3_BUCKET}
s3_region: ${S3_REGION}
---
apiVersion: v1
kind: Secret
metadata:
name: db-credentials
type: Opaque
Expand Down Expand Up @@ -71,30 +59,4 @@ spec:
valueFrom:
secretKeyRef:
name: db-credentials
key: postgres_db
- name: S3_URL
valueFrom:
secretKeyRef:
name: s3-credentials
key: crispy_s3_url
- name: S3_ACCESS_KEY
valueFrom:
secretKeyRef:
name: s3-credentials
key: crispy_s3_access_key
- name: S3_SECRET_KEY
valueFrom:
secretKeyRef:
name: s3-credentials
key: crispy_s3_secret_key
- name: S3_BUCKET
valueFrom:
secretKeyRef:
name: s3-credentials
key: crispy_s3_bucket
- name: S3_REGION
valueFrom:
secretKeyRef:
name: s3-credentials
key: crispy_s3_region

key: postgres_db
57 changes: 15 additions & 42 deletions scripts/trisk_knative/utils.R
Original file line number Diff line number Diff line change
@@ -1,44 +1,17 @@
download_db_tables_postgres <- function(tables, folder_path) {
# Example function call
conn <- DBI::dbConnect(
RPostgres::Postgres(),
dbname = Sys.getenv("POSTGRES_DB"),
host = Sys.getenv("POSTGRES_HOST"),
port = Sys.getenv("POSTGRES_PORT"),
user = Sys.getenv("ST_POSTGRES_USERNAME"),
password = Sys.getenv("POSTGRES_PASSWORD")
)


download_files_from_s3 <- function(
local_folder_path,
s3_url,
s3_bucket,
s3_folder_path,
s3_access_key,
s3_secret_key,
s3_region) {
# Configure the S3 client to use DigitalOcean Spaces
Sys.setenv(
"AWS_ACCESS_KEY_ID" = s3_access_key,
"AWS_SECRET_ACCESS_KEY" = s3_secret_key,
"AWS_S3_ENDPOINT" = s3_url,
"AWS_DEFAULT_REGION" = s3_region
)

# Check and create the local directory if it doesn't exist
if (!dir.exists(local_folder_path)) {
dir.create(local_folder_path, recursive = TRUE)
}

# List all files in the folder
response <- aws.s3::get_bucket(
bucket = s3_bucket,
prefix = s3_folder_path,
delimiter = "/",
parse_response = TRUE
)


for (i in 1:length(response)) {
file_key <- response[i]$Contents$Key
if (grepl("\\.csv$", file_key)) {
# Download file
aws.s3::save_object(
file = paste0(local_folder_path, "/", basename(file_key)),
object = file_key,
bucket = s3_bucket
)
}
}
lapply(tables, function(table_name) {
query <- sprintf("SELECT * FROM %s", table_name)
data <- DBI::dbGetQuery(conn, query)
readr::write_csv(data, file = file.path(folder_path, paste0(table_name, ".csv")))
})
}

0 comments on commit dff50d5

Please sign in to comment.