diff --git a/.Rbuildignore b/.Rbuildignore new file mode 100644 index 0000000..91114bf --- /dev/null +++ b/.Rbuildignore @@ -0,0 +1,2 @@ +^.*\.Rproj$ +^\.Rproj\.user$ diff --git a/.gitignore b/.gitignore index 32ce999..5b80efb 100644 --- a/.gitignore +++ b/.gitignore @@ -2,24 +2,43 @@ .Rhistory .Rapp.history .RData -plotgdx_witch_*.R -plotgdx_rice_*.R -plotgdx_fidelio_*.R +.Rproj.user/ + # Example code in package build process *-Ex.R + # RStudio files .Rproj.user/ -# produced vignettes + +# Produced vignettes vignettes/*.html vignettes/*.pdf -!plotgdx_witch.R -!plotgdx_rice.R -!plotgdx_fidelio.R + +# OAuth tokens .httr-oauth -.Rproj.user -# data from online deployed gdxcompaR -gdxcompaR/witch/*.RData -gdxcompaR/witch/rsconnect -gdxcompaR/iiasadb/*.RData -gdxcompaR/iiasadb/rsconnect + +# Data from online deployed gdxcompaR +inst/gdxcompaR/witch/*.RData +inst/gdxcompaR/witch/rsconnect +inst/gdxcompaR/iiasadb/*.RData +inst/gdxcompaR/iiasadb/rsconnect iiasa_credentials.yml + +# Package builds +*.tar.gz + +# Other folders +.claude/ +TEACHING/ +binary_packages + +# Development files (optional - remove these lines to track them) +dev_workflow.R +ROXYGEN_WORKFLOW.md +test* + +# Source historical data (not released with package) +# Only processed regional files in data/ are released +data-raw/data_historical_values.gdx + + diff --git a/DESCRIPTION b/DESCRIPTION new file mode 100644 index 0000000..5921f7f --- /dev/null +++ b/DESCRIPTION @@ -0,0 +1,51 @@ +Package: witchplot +Type: Package +Title: Interactive Visualization Toolkit for GAMS IAM Model Results (WITCH, RICE, DICE, FIDELIO) +Version: 1.0.0 +Date: 2025-10-24 +Authors@R: c( + person("Johannes", "Emmerling", role=c("aut", "cre"), email="johannes.emmerling@cmcc.it"), + person("WITCH Team", role="ctb")) +Description: Dynamic comparison and plotting tools for Integrated Assessment Model (IAM) results from GAMS models including WITCH, RICE50+, DICE, FIDELIO, and IIASADB. Provides Shiny-based interactive applications for scenario comparison, regional analysis, and comprehensive visualization of energy-climate model outputs. +License: MIT + file LICENSE +Copyright: Copyright (c) 2025 Johannes Emmerling and WITCH Team +Encoding: UTF-8 +LazyData: true +Depends: R (>= 4.0.0) +Imports: + data.table, + stringr, + ggplot2, + dplyr, + tidyr, + shiny, + shinyWidgets, + plotly, + witchtools, + gdxtools, + memoise, + scales, + RColorBrewer, + openxlsx, + countrycode +Suggests: + ggpubr, + purrr, + forcats, + arrow, + rworldmap, + sf, + rnaturalearth, + reldist, + bslib, + shinythemes, + gsubfn, + rlang, + docopt, + tidytidbits, + gdxrrw, + testthat (>= 3.0.0) +Remotes: + witch-team/witchtools, + lolow/gdxtools +RoxygenNote: 7.3.3 diff --git a/INSTALLATION_GUIDE.md b/INSTALLATION_GUIDE.md new file mode 100644 index 0000000..3b4c9d3 --- /dev/null +++ b/INSTALLATION_GUIDE.md @@ -0,0 +1,151 @@ +# witchplot Installation Guide + +This guide provides installation instructions for the `witchplot` package and its dependencies. + +## Two Installation Methods + +### Method 1: Install from Binary Packages (Recommended - No Rtools needed) + +This is the **easiest method** and does not require Rtools or compilation. + +**Steps:** + +1. Download the shared folder containing: + - `gdxtools_x.x.x.zip` + - `witchtools_x.x.x.zip` + - `witchplot_x.x.x.zip` + - `install_witchplot.R` + +2. Open R or RStudio + +3. Set your working directory to the folder containing the files: + ```r + setwd("path/to/downloaded/folder") + ``` + +4. Run the installation script: + ```r + source("install_witchplot.R") + ``` + +5. Once installation completes, load the package: + ```r + library(witchplot) + run_witch() + ``` + +--- + +### Method 2: Install from GitHub (Requires Rtools on Windows) + +This method installs directly from GitHub but **requires Rtools** on Windows because some dependencies need compilation. + +**Prerequisites:** +- For Windows users: Install [Rtools](https://cran.r-project.org/bin/windows/Rtools/) first + +**Steps:** + +1. Download `install_from_github.R` + +2. Open the file and update the `github_repo` variable with the correct GitHub repository path + +3. Run the installation script: + ```r + source("install_from_github.R") + ``` + +4. Once installation completes, load the package: + ```r + library(witchplot) + run_witch() + ``` + +--- + +## Additional Setup for WITCH/RICE Models + +The `witchplot` package can visualize two types of data: + +1. **IIASA Database** (e.g., AR6, AR5) - **No additional setup needed** + ```r + run_iiasadb(iamc_databasename = "iamc15") + ``` + +2. **WITCH/RICE GDX files** - **Requires GAMS installation** + ```r + run_witch() # Requires GAMS + run_rice() # Requires GAMS + ``` + +### Setting up GAMS for GDX files + +If you want to work with WITCH/RICE model GDX files, you need GAMS: + +1. **Download and install GAMS** from: https://www.gams.com/download/ + - A free demo license is available + - Note the installation directory (e.g., `C:/GAMS/47`) + +2. **Initialize the GDX library in R:** + ```r + library(witchplot) + setup_gdx() # Auto-detects GAMS installation + + # Or specify GAMS path manually if needed: + setup_gdx("C:/GAMS/47") + ``` + +3. **Verify it works:** + ```r + run_witch() # Should now work! + ``` + +**Note**: The package will automatically try to initialize GDX when loaded. If you see errors about "GDX library not loaded", follow the steps above. + +--- + +## Troubleshooting + +### GDX Library Errors + +If you see: +``` +Error: GDX library has not been loaded +``` + +**Solution:** +1. Install GAMS from https://www.gams.com/download/ +2. Run `setup_gdx()` in R +3. See detailed troubleshooting guide at: `TEACHING/TROUBLESHOOTING_GDX.md` + +**Alternative**: Use IIASA database viewer (no GAMS needed): +```r +run_iiasadb(iamc_databasename = "iamc15", add_historical = FALSE) +``` + +### "Package not found" errors +- **Method 1**: Make sure all .zip files are in the same folder as `install_witchplot.R` +- **Method 2**: Check your internet connection and that the GitHub repository path is correct + +### "Rtools required" messages +- Use **Method 1** (binary packages) instead, or +- Install Rtools from: https://cran.r-project.org/bin/windows/Rtools/ + +### Package loading errors +Try restarting R and loading the package again: +```r +# Restart R session, then: +library(witchplot) +``` + +### Permission denied errors +Close R/RStudio completely and restart, then try installation again. + +--- + +## Package Information + +- **witchplot**: Interactive visualization toolkit for GAMS IAM model results +- **Dependencies from GitHub**: + - `gdxtools` (https://github.com/lolow/gdxtools) - Manipulate GDX files in R + - `witchtools` (https://github.com/witch-team/witchtools) - Data management for IAMs +- **CRAN dependencies**: Automatically installed (data.table, ggplot2, shiny, etc.) diff --git a/LICENSE.md b/LICENSE.md index fcca4a9..d00cb88 100644 --- a/LICENSE.md +++ b/LICENSE.md @@ -179,7 +179,7 @@ recommend that a file or class name and description of purpose be included on the same “printed page” as the copyright notice for easier identification within third-party archives. - Copyright 2019 Laurent Drouet + Copyright 2025 Johannes Emmerling Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/NAMESPACE b/NAMESPACE new file mode 100644 index 0000000..c816381 --- /dev/null +++ b/NAMESPACE @@ -0,0 +1,11 @@ +exportPattern("^[[:alpha:]]+") +import(ggplot2) +import(shiny) +import(gdxtools) +importFrom(data.table, fread, as.data.table, rbindlist, setnames) +importFrom(stringr, str_detect, str_subset, str_glue, str_trunc) +importFrom(plotly, ggplotly) +importFrom(shinyWidgets, pickerInput) +importFrom(memoise, memoise) +# Import dplyr LAST to ensure its functions (filter, select, etc.) take precedence over data.table +import(dplyr) diff --git a/R/RICE50x_plots.R b/R/RICE50x_plots.R deleted file mode 100644 index b0b8e18..0000000 --- a/R/RICE50x_plots.R +++ /dev/null @@ -1,39 +0,0 @@ -#Specific functions for the RICE50+ model - - -plot_macc_fit <- function(yearcheck = 2040){ -#check MACC curves -ax_co2 <- get_witch("ax_co2") -bx_co2 <- get_witch("bx_co2") -emi_bau_co2 <- get_witch("emi_bau_co2") -mx <- get_witch("mx") - -check_macc <- ax_co2 %>% select(t,n,value) %>% rename(ax_co2=value) %>% full_join(bx_co2 %>% select(t,n,value) %>% rename(bx_co2=value)) %>% full_join(emi_bau_co2 %>% select(t,n,value) %>% rename(emi_bau_co2=value)) %>% full_join(mx %>% select(t,n,value) %>% rename(mx=value)) - -plot_check_macc <- check_macc %>% filter(ttoyear(t) == yearcheck) -xmin<-0 -xmax<-1.2 -step<-0.05 -xx<-seq(xmin,xmax,by=step) -miudf<-data.frame(matrix("", ncol = length(plot_check_macc$n), nrow = length(xx))) -names(miudf) <- plot_check_macc$n -miudf$xx<-xx -for(i in 1:length(plot_check_macc$n)){ - miudf[,i] <- plot_check_macc$ax_co2[i]*(miudf$xx^2)/2 + plot_check_macc$bx_co2[i]*(miudf$xx^5)/5 - miudf[,i] <- plot_check_macc$mx[i] * (plot_check_macc$ax_co2[i]*(miudf$xx^2)/2 + plot_check_macc$bx_co2[i]*(miudf$xx^5)/5) - miudf[,i] <- plot_check_macc$mx[i] * (plot_check_macc$ax_co2[i]*(miudf$xx^2)/2 + plot_check_macc$bx_co2[i]*(miudf$xx^5)/5) * plot_check_macc$emi_bau_co2[i] - miudf[,i] <- plot_check_macc$mx[i]*(plot_check_macc$ax_co2[i]*(miudf$xx^1) + plot_check_macc$bx_co2[i]*(miudf$xx^4)) - miudf[,i] <- (plot_check_macc$ax_co2[i]*(miudf$xx^1) + plot_check_macc$bx_co2[i]*(miudf$xx^4)) -} -ggplot(miudf %>% pivot_longer(cols = !xx, names_to = "n"), aes(xx,value)) + geom_line(aes(colour = n)) -plotly::ggplotly() -#add enerdata points -enerdata <- fread(file = file.path(witch_folder, "input", "data", "enerdata-enerdata_macc_full.csv")) -enerdata <- enerdata %>% filter(sector=="Total_CO2" & scenario=="Ener-Blue") %>% mutate(t=yeartot(Year), mju=abatement_perc, n=Code) %>% select(t,n,cost,mju) -ggplot(enerdata %>% filter(ttoyear(t)==yearcheck), aes(mju,cost)) + geom_point(aes(colour = n)) - -#plot enerdata and model together -ggplot(miudf %>% pivot_longer(cols = !xx, names_to = "n"), aes(xx,value)) + xlim(0,0.6) + ylim(0,1000) + geom_line(aes(colour = n)) + geom_point(data=enerdata %>% filter(ttoyear(t)==yearcheck), aes(mju,cost, colour = n) -) + xlab("mju") + ylab("Carbon price $/tCO2") + guides(color=FALSE) -saveplot("RICE50+ MACC Curves Fit") -} diff --git a/R/add_historical_values.R b/R/add_historical_values.R index be60ac7..645aade 100644 --- a/R/add_historical_values.R +++ b/R/add_historical_values.R @@ -1,15 +1,49 @@ -add_historical_values <- function(variable, varname=deparse(substitute(variable)), scenplot=scenlist, check_calibration=T, overlap_years="model", verbose=F, iiasadb = F){ - #have to decide what to do with years with both model and historical data: overlap_years = #historical" or "model" - if(!length(list.files(path=file.path(witch_folder, paste0("data_", reg_id)), full.names = TRUE, pattern="^data_historical(.*).gdx$", recursive = FALSE))>0) return(as.data.table(variable)) +add_historical_values <- function(variable, varname=deparse(substitute(variable)), verbose=TRUE, iiasadb = F){ + # Determine reg_id - use "witch17" as default if not set + current_reg_id <- if(exists("reg_id") && !is.null(reg_id) && length(reg_id) > 0) { + reg_id[1] + } else { + "witch17" # Default for WITCH + } + + hist_file <- system.file("data", paste0("data_historical_values_", current_reg_id, ".gdx"), package = "witchplot") + if(hist_file == "") { + # If not found in package, check results_dir for data_historical_values.gdx + if(exists("results_dir") && length(results_dir) > 0) { + local_hist_file <- file.path(results_dir[1], "data_historical_values.gdx") + if(file.exists(local_hist_file)) { + hist_file <- local_hist_file + if(verbose) message(sprintf("Using historical data from results_dir: %s", hist_file)) + } else { + if(verbose) message(sprintf("No file data_historical_values.gdx found. Consider storing it from your model data folder in your results_dir, then it will be loaded.")) + return(as.data.table(variable)) + } + } else { + if(verbose) message(sprintf("No historical data file found for reg_id '%s'. Package data should contain: data/data_historical_values_%s.gdx", current_reg_id, current_reg_id)) + return(as.data.table(variable)) + } + } else { + if(verbose) message(sprintf("Using historical data from package: %s", hist_file)) + } + + if(exists("map_var_hist")) if(!(varname %in% map_var_hist$varname_model)) return(as.data.table(variable)) - #from here process the historical data files + #from here process the historical data files variable_loaded_original <- variable - if(iiasadb){ - #for IIASAdb rename relevant set columns - variable <- variable %>% dplyr::rename(n=REGION) %>% mutate(YEAR=yeartot(YEAR)) %>% dplyr::rename(t=YEAR) %>% mutate(VARIABLE=gsub("\\|","_",VARIABLE)) - varname <- gsub("\\|","_",varname) + if(iiasadb){ + #for IIASAdb rename relevant set columns (YEAR column already contains actual years, not time periods) + # Only rename if REGION and YEAR columns exist (they may already be renamed to n and year) + if("REGION" %in% names(variable) && "YEAR" %in% names(variable)) { + variable <- variable %>% dplyr::rename(n=REGION, year=YEAR) %>% mutate(VARIABLE=gsub("\\|","_",VARIABLE)) + } else { + # Already in standard format, just clean VARIABLE name + if("VARIABLE" %in% names(variable)) { + variable <- variable %>% mutate(VARIABLE=gsub("\\|","_",VARIABLE)) + } } + varname <- gsub("\\|","_",varname) + } if(exists("map_var_hist")){ map_var_hist$varname_model <- gsub("\\|","_",map_var_hist$varname_model) #just locally in this function @@ -23,12 +57,12 @@ add_historical_values <- function(variable, varname=deparse(substitute(variable) varname <- map_var_hist[varname_model==varname]$var_witch[1] #for now only one witch variable for a model variable can be used } } - + valid_suffix <- "_valid" #if(varname=="Q_EMI"){valid_suffix <- "_valid_primap"} if(varname=="Q"){valid_suffix <- c("_valid_wdi", "_valid_weo")} #if(varname=="SOCECON"){valid_suffix <- "_valid_wdi_sum"} - if(varname=="Q_IN"){valid_suffix <- "_valid_notcompatible"} + if(varname=="Q_IN"){valid_suffix <- "_valid_weo"} if(varname=="sigma"){valid_suffix <- "_valid_notcompatible"} if(varname=="quantiles"){valid_suffix <- "_valid_swiid"} #for quantiles if(varname=="K_EN"){valid_suffix <- c("_valid_platts_tot", "_valid_irena", "_valid_iaea", "_valid_gcpt")} #for quantiles, set it to @@ -36,94 +70,248 @@ add_historical_values <- function(variable, varname=deparse(substitute(variable) #treat special varnames if(str_detect(varname, "MAGICC")) varname <- gsub("MAGICC", "", varname) if(str_detect(varname, "HECTOR")) varname <- gsub("HECTOR", "", varname) - - #check which GDX file to use (all files that start with data_historical*.gdx) - if(!dir.exists(file.path(witch_folder, paste0("data_", reg_id[1])))) return(as.data.table(variable)) - gdxhistlist <- list.files(path=file.path(witch_folder, paste0("data_", reg_id)), full.names = TRUE, pattern="^data_historical(.*).gdx$", recursive = FALSE) - - for(.gdxname in gdxhistlist){ - #print(.gdxname) - .gdx <- gdx(.gdxname) - if(length(grep(paste(paste0("^", tolower(varname), valid_suffix), collapse = '|'), .gdx$parameters$name, value = TRUE))!=0){break} #to find the hist file with the valid data (only one!) + + # Load historical GDX file(s) from package data + # For IIASADB, combine global and r5 historical data + if(iiasadb) { + # Ensure required packages are loaded for reading GDX files + require(gdxtools) + require(data.table) + + # For IIASADB, load ALL available historical data files (global, r5, witch17, ed58, etc.) + # This ensures maximum regional coverage + + # Get all data_historical_values_*.gdx files from package data folder + data_dir <- system.file("data", package = "witchplot") + all_hist_files <- list.files(data_dir, pattern = "^data_historical_values_.*\\.gdx$", full.names = TRUE) + + # Also check results_dir if available + if(exists("results_dir") && length(results_dir) > 0 && dir.exists(results_dir[1])) { + local_hist_files <- list.files(results_dir[1], pattern = "^data_historical_values_.*\\.gdx$", full.names = TRUE) + all_hist_files <- c(all_hist_files, local_hist_files) + } + + # Remove duplicates (prefer package files over local) + all_hist_files <- unique(all_hist_files) + + # Load all available files + gdx_list <- list() + for(hf in all_hist_files) { + if(file.exists(hf)) { + gdx_list[[length(gdx_list) + 1]] <- gdx(hf) + if(verbose) message(sprintf("Loaded historical data from: %s", basename(hf))) + } + } + + if(length(gdx_list) == 0) { + if(verbose) message("No IIASADB historical data files found") + return(as.data.table(variable_loaded_original)) + } + + # Use first gdx for initial checks, will combine data from all files later + .gdx <- gdx_list[[1]] + } else { + # For non-IIASADB, load single file + .gdx <- gdx(hist_file) } - #now checking if for the precise variable historical data is there + + #now checking if for the precise variable historical data is there if(length(grep(paste(paste0("^", tolower(varname), valid_suffix), collapse = '|'), .gdx$parameters$name, value = TRUE))==0) return(as.data.table(variable_loaded_original)) ####### #here continue only if we're sure data will be merged ######## if(verbose) print(paste0("Historical values added for '", varname, "'.")) item <- grep(paste(paste0("^", tolower(varname), valid_suffix), collapse = '|'), .gdx$parameters$name, value = TRUE) #use grep with ^ to have them start by varname - if(!check_calibration) item <- item[1] #if not check calibration, just take the first (unique) data source) - for(.item in item){ - for(.reg_id_file in list.files(path=file.path(witch_folder, paste0("data_", reg_id)), full.names = TRUE, pattern=basename(.gdxname), recursive = FALSE)){ - .hist_single_one_reg_id <- as.data.table(gdx(.reg_id_file)[.item]); - if(.reg_id_file==list.files(path=file.path(witch_folder, paste0("data_", reg_id)), full.names = TRUE, pattern=basename(.gdxname), recursive = FALSE)[1]){.hist_single <- .hist_single_one_reg_id}else{.hist_single <- rbind(.hist_single, .hist_single_one_reg_id)} - } - .hist_single$file <- gsub(paste0(tolower(varname), "_"), "", .item); - if(.item==item[1]){.hist <- .hist_single}else{.hist <- rbind(.hist,.hist_single)} - } + + # Remove all valid_xxx where xxx contains "mean" + item <- item[!grepl("valid_.*mean", item)] + + if(length(item) == 0) { + if(verbose) message("No historical data items remaining after filtering (all contained 'mean')") + return(as.data.table(variable_loaded_original)) + } + + # Always use all data sources (check_calibration is always TRUE) + # For IIASADB, combine data from both global and r5 files + if(iiasadb && exists("gdx_list")) { + .hist <- NULL + for(gdx_idx in 1:length(gdx_list)) { + .gdx_temp <- gdx_list[[gdx_idx]] + # Check which items exist in this gdx file + items_in_gdx <- grep(paste(paste0("^", tolower(varname), valid_suffix), collapse = '|'), .gdx_temp$parameters$name, value = TRUE) + items_in_gdx <- items_in_gdx[!grepl("valid_.*mean", items_in_gdx)] + + for(.item in items_in_gdx){ + .hist_single <- as.data.table(.gdx_temp[.item]) + .hist_single$file <- gsub(paste0(tolower(varname), "_"), "", .item) + if(is.null(.hist)){.hist <- .hist_single}else{.hist <- data.table::rbindlist(list(.hist, .hist_single), fill=TRUE)} + } + } + } else { + # For non-IIASADB, load from single gdx + for(.item in item){ + .hist_single <- as.data.table(.gdx[.item]) + .hist_single$file <- gsub(paste0(tolower(varname), "_"), "", .item) + if(.item==item[1]){.hist <- .hist_single}else{.hist <- data.table::rbindlist(list(.hist, .hist_single), fill=TRUE)} + } + } - #get set dependency based on /build/ folder - use_build <- F; - if(use_build){ - .gdxiso3 <- gdx(file.path(witch_folder, "input", "build", basename(.gdxname))); - #print(str(.hist)); print(str(variable)); print(str(.gdxiso3[item[1]])) - colnames(.hist) <- c(colnames(.gdxiso3[item[1]]), "file") - #in built global data have set "global", but in input folder it gets converted to iso3, so: - colnames(.hist) <- gsub("global", "iso3", colnames(.hist)) - #add "World" if no country level data but global - if(!("iso3" %in% colnames(.hist))){.hist$n = "World"}else{colnames(.hist) <- gsub("iso3", "n", colnames(.hist))} - setnames(.hist, "year", "t") - #print(.hist) - }else{ - if(!("n" %in% colnames(.hist))) .hist$n = "World" - #try to get dependency from variable itself - setdep <- setdiff(names(variable), c("n", "file", "pathdir", "t", "value")) - if(iiasadb) setdep <- setdiff(names(variable), c("n", "VARIABLE", "UNIT", "SCENARIO", "MODEL", "t", "value")) - setdep <- c(setdep, "t") - setnames(.hist, paste0("V", seq(1:length(setdep))), setdep) - #print(.hist) + # Load set dependencies from RDS file (always use RDS for set dependencies) + # First try package data, then check results_dir + hist_set_dep_file <- system.file("data", "historical_data_set_dependencies.rds", package = "witchplot") + if(hist_set_dep_file == "" || !file.exists(hist_set_dep_file)) { + # If not in package, check results_dir + if(exists("results_dir") && length(results_dir) > 0) { + local_rds_file <- file.path(results_dir[1], "historical_data_set_dependencies.rds") + if(file.exists(local_rds_file)) { + hist_set_dep_file <- local_rds_file + if(verbose) message("Using set dependencies from results_dir") + } + } + } + + # Apply set dependencies from RDS file + if(hist_set_dep_file != "" && file.exists(hist_set_dep_file)) { + hist_set_dependencies <- readRDS(hist_set_dep_file) + # Get set dependencies for this historical parameter + if(.item %in% names(hist_set_dependencies)) { + setdep_hist <- hist_set_dependencies[[.item]] + # Rename columns: first columns get the set names from RDS, then value, then file + names(.hist) <- c(setdep_hist, "value", "file") + if(verbose) message(" Applied set dependencies from RDS: ", paste(setdep_hist, collapse=", ")) + } else { + if(verbose) message(" Warning: No set dependencies found for parameter: ", .item) + } + } else { + if(verbose) message(" Warning: No historical_data_set_dependencies.rds file found") } + # Ensure n column exists (for global/world data) + if(!("n" %in% colnames(.hist))) .hist$n = "World" + #adjust time unit to model - .hist$t <- yeartot(.hist$t) - t_historical<-unique(.hist$t) + if(iiasadb) { + # For IIASADB, historical data has 'year' column, just keep as is + t_historical <- unique(.hist$year) + } else { + # For WITCH/RICE, historical data has 'year', convert to time periods 't' + if("year" %in% names(.hist)) { + .hist$t <- yeartot(.hist$year) + .hist <- .hist %>% select(-year) # Remove year column after conversion + } + t_historical <- unique(.hist$t) + } #adjust scenario names if(exists("witch_regions")) .hist$n <- dplyr::recode(.hist$n, !!!setNames(witch_regions, display_regions)) - #change set name and element if map_var_hist has set_model defined - if(exists("map_var_hist")){ - if(!all(map_var_hist[varname_model==varname_original]$set_model=="")){ - .hist <- .hist %>% filter(c_across(map_var_hist[varname_model==varname_original]$set_model[1]) %in% map_var_hist[varname_model==varname_original]$element_witch) #keep only the data that are map_var_hist given for - set_map <- map_var_hist[varname_model==varname_original]$element_model; names(set_map) = map_var_hist[varname_model==varname_original]$element_witch - .hist <- .hist %>% mutate(across(all_of(map_var_hist$set_model[map_var_hist$varname_model == varname_original]), ~ dplyr::recode(., !!!set_map))) #map and change element names from witch to model [best way to replace mapvalues] - } - #unit conversion if needed + + #Apply map_var_hist transformations + if(exists("map_var_hist") && exists("varname_original")){ if(varname_original %in% map_var_hist$varname_model){ + # Determine transformation direction based on what columns exist + set_witch <- map_var_hist[varname_model==varname_original]$set_witch[1] + set_model <- map_var_hist[varname_model==varname_original]$set_model[1] + + if(set_model != "" && set_witch != "") { + # Check which direction to transform: + # If variable has set_model column, transform .hist FROM set_witch TO set_model (e.g., RICE: e -> ghg) + # If variable has set_witch column, transform .hist FROM set_model TO set_witch (e.g., WITCH: ghg -> e) + + if(set_model %in% names(variable) && set_witch %in% names(.hist)) { + # Transform historical FROM WITCH TO MODEL format (e.g., RICE: rename e -> ghg) + setnames(.hist, set_witch, set_model) + if(verbose) message(" Renamed set column: ", set_witch, " -> ", set_model) + + # Filter and recode elements (inverse direction: element_witch -> element_model) + if(set_model %in% names(.hist)) { + .hist <- .hist %>% filter(c_across(all_of(set_model)) %in% map_var_hist[varname_model==varname_original]$element_witch) + # Create inverse mapping: element_witch -> element_model + set_map <- map_var_hist[varname_model==varname_original]$element_model + names(set_map) <- map_var_hist[varname_model==varname_original]$element_witch + .hist <- .hist %>% mutate(across(all_of(set_model), ~ dplyr::recode(., !!!set_map))) + if(verbose) message(" Recoded ", nrow(.hist), " rows in set column: ", set_model, " (WITCH->MODEL)") + } + # Store which direction we transformed for unit conversion + transform_direction <- "WITCH_TO_MODEL" + } else if(set_witch %in% names(variable) && set_model %in% names(.hist)) { + # Transform historical FROM MODEL TO WITCH format (standard direction: ghg -> e) + setnames(.hist, set_model, set_witch) + if(verbose) message(" Renamed set column: ", set_model, " -> ", set_witch) + + # Filter and recode elements (standard direction: element_model -> element_witch) + if(set_witch %in% names(.hist)) { + .hist <- .hist %>% filter(c_across(all_of(set_witch)) %in% map_var_hist[varname_model==varname_original]$element_witch) + set_map <- map_var_hist[varname_model==varname_original]$element_model + names(set_map) <- map_var_hist[varname_model==varname_original]$element_witch + .hist <- .hist %>% mutate(across(all_of(set_witch), ~ dplyr::recode(., !!!set_map))) + if(verbose) message(" Recoded ", nrow(.hist), " rows in set column: ", set_witch, " (MODEL->WITCH)") + } + # Store which direction we transformed for unit conversion + transform_direction <- "MODEL_TO_WITCH" + } else { + if(verbose) message(" Could not determine transformation direction - set columns not found in expected format") + transform_direction <- "NONE" + } + } + + #unit conversion if needed .conv <- map_var_hist[varname_model==varname_original] %>% select(set_model, element_model, conv) - if(all(.conv$set_model=="")) .hist$conv <- unique(.conv$conv) else { - setnames(.conv, "element_model", unique(.conv$set_model)) - .hist <- .hist %>% left_join(.conv %>% select(-set_model)) + if(all(.conv$set_model=="")) { + # Simple conversion: apply same factor to all rows + .hist$conv <- unique(.conv$conv) + .hist <- .hist %>% mutate(value = value * conv) %>% select(-conv) + if(verbose) message(" Applied unit conversion: ", unique(.conv$conv)) + } else { + # Set-specific conversion: need to join on the set column + # Determine the actual column name based on transformation direction + if(exists("transform_direction") && transform_direction == "WITCH_TO_MODEL") { + # For RICE: column is now set_model (e.g., "ghg") + actual_col_name <- set_model + # Use element_model as the key for joining + join_col <- "element_model" + } else { + # Standard: column is set_witch (e.g., "e") + actual_col_name <- set_witch + # Use element_model as the key for joining + join_col <- "element_model" + } + + if(actual_col_name %in% names(.hist)) { + # Rename element_model to match the actual column name in .hist for joining + .conv_copy <- .conv + setnames(.conv_copy, join_col, actual_col_name) + .conv_select <- .conv_copy %>% select(-set_model) + # Join on the set column + .hist <- .hist %>% left_join(.conv_select, by = actual_col_name) + .hist <- .hist %>% mutate(value = value * conv) %>% select(-conv) + if(verbose) message(" Applied set-specific unit conversion on column: ", actual_col_name) + } else { + if(verbose) warning(paste0("Cannot apply unit conversion for ", varname_original, " - column '", actual_col_name, "' not found in historical data")) + } } - .hist <- .hist %>% mutate(value = value * conv) %>% select(-conv) } } - - #if check_calibration, add validation as data points! - if(check_calibration){ - .gdx_validation <- gdx(file.path(witch_folder, paste0("data_", reg_id[1]), "data_validation.gdx")) #only first reg_id - for(.item in intersect(item, .gdx_validation$parameters$name)){.hist_validation_single <- as.data.table(.gdx_validation[.item]); .hist_validation_single$file <- gsub(paste0(tolower(varname), "_"), "", .item); if(.item==item[1]){.hist_validation <- .hist_validation_single}else{.hist_validation <- rbind(.hist_validation,.hist_validation_single)} } - if(exists(".hist_validation") & !exists("map_var_hist")){ - if(!("n" %in% colnames(.hist_validation))){.hist_validation$n = "World"} - colnames(.hist_validation) <- colnames(.hist) - .hist$file <- gsub("valid", "historical", .hist$file) #for the historical set, use "historical" - .hist <- rbind(.hist,.hist_validation) - }else{.hist$file <- gsub("valid", "historical", .hist$file)} - } - else{ - #if not check_calibration and historical files are added to the scenarios, compute the mean in case multiple historical sources for one sub-item (e.g., elhydro) and drop the file column - .hist$file <- NULL - .hist <- .hist %>% group_by_at(setdiff(names(.hist), "value")) %>% summarize(value=mean(value), .groups = "drop") %>% as.data.table() + + # ALWAYS filter historical data to only include elements present in variable + # Do this AFTER map_var_hist transformation so column names match + # Find set columns (exclude standard columns: n, t, year, value, file, pathdir, tlen) + standard_cols <- c("n", "t", "year", "value", "file", "pathdir", "tlen") + set_cols_in_hist <- setdiff(names(.hist), standard_cols) + set_cols_in_var <- setdiff(names(variable), standard_cols) + common_set_cols <- intersect(set_cols_in_hist, set_cols_in_var) + + # Filter .hist to only include elements that exist in variable for each set column + for(set_col in common_set_cols) { + if(set_col %in% names(.hist) && set_col %in% names(variable)) { + elements_in_var <- unique(variable[[set_col]]) + .hist <- .hist %>% filter(c_across(all_of(set_col)) %in% elements_in_var) + if(verbose) message(sprintf(" Filtered historical data for '%s' to elements in variable: %s", + set_col, paste(elements_in_var, collapse=", "))) + } } + #rename "valid" to "historical" in file names + # Always keep historical data sources separate (check_calibration is always TRUE) + .hist$file <- gsub("valid", "historical", .hist$file) + #special case where categories do not match exactly if("q_in_valid_weo" %in% item) #add fuel column { @@ -133,57 +321,155 @@ add_historical_values <- function(variable, varname=deparse(substitute(variable) .hist[jfed=="elpb"]$fuel <- "wbio" .hist[jfed=="elnuclear"]$fuel <- "uranium" } - + #merge with variable - if(check_calibration){ - #just multiply by the pathdir so it appears for each pathdir - .hist_temp <- .hist - for(pd in basename(fullpathdir)) - { - .hist_temp$pathdir <- pd - if(pd==basename(fullpathdir[1])){.hist=.hist_temp}else{.hist <-rbind(.hist,.hist_temp)} - } - }else{ - #first multiply by scenplot, add missing columns here add historical data to results + # Always keep historical data sources separate (check_calibration is always TRUE) + # Multiply by pathdir so it appears for each pathdir .hist_temp <- .hist - - for(scen in scenplot) - { - .hist_temp$file <- scen - if(scen==scenplot[1]){.hist=.hist_temp}else{.hist <-rbind(.hist,.hist_temp)} - } - .hist_temp <- .hist - for(pd in basename(fullpathdir)) - { - .hist_temp$pathdir <- pd - if(pd==basename(fullpathdir)[1]){.hist=.hist_temp}else{.hist <-rbind(.hist,.hist_temp)} - } - if(overlap_years=="model"){ - #display model data for overlapping years, delete historical data - .hist <- subset(.hist, !(t %in% seq(1,10))) - }else{ - #or display historical data years, delete model data for 2005 and 2010 - #variable <- subset(variable, !(t %in% unique(.hist$t))) - #variable <- subset(variable, !(t %in% unique(.hist$t))) - variable <- subset(variable, !(t %in% t_historical)) - } + for(pd in basename(results_dir)) + { + .hist_temp$pathdir <- pd + if(pd==basename(results_dir[1])){.hist=.hist_temp}else{.hist <- data.table::rbindlist(list(.hist, .hist_temp), fill=TRUE)} } - + if(iiasadb){ #adjusting region names #creating same data format as iiasadb - .hist <- .hist %>% mutate(VARIABLE=varname_original, UNIT=unique(variable$UNIT)[1], SCENARIO="historical", MODEL=file) - .hist <- .hist %>% select(-file, -pathdir) - #keep only historical, no valid data points - .hist <- .hist %>% filter(!str_detect(MODEL, "valid")) + .hist <- .hist %>% + mutate(VARIABLE=varname_original, UNIT=unique(variable$UNIT)[1], SCENARIO="historical", MODEL=file) %>% + select(-file, -pathdir) %>% + #keep only historical, no valid data points + filter(!str_detect(MODEL, "valid")) + + # Ensure .hist has same columns as variable in same order + common_cols <- intersect(names(variable), names(.hist)) + variable <- variable %>% select(all_of(common_cols)) + .hist <- .hist %>% select(all_of(common_cols)) + } else { + # For WITCH/RICE: Keep all columns from variable, add .hist columns that exist + # DO NOT remove columns from variable that aren't in .hist! + # rbindlist with fill=TRUE will handle missing columns by filling with NA + # NOTE: Set column renaming (e.g., e <-> ghg) is now handled by map_var_hist above + + # Keep all variable columns as-is + # Only reorder .hist to have common columns first (helps with rbindlist) + common_cols <- intersect(names(variable), names(.hist)) + hist_only_cols <- setdiff(names(.hist), names(variable)) + + # Reorder .hist: common columns first (in variable's order), then hist-only columns + .hist <- .hist %>% select(all_of(c(common_cols, hist_only_cols))) + # variable stays as-is with all its columns + } + + # Debug: Check if .hist has data + if(verbose) message(sprintf(" Historical data rows before merge: %d, columns: %s", nrow(.hist), paste(names(.hist), collapse=", "))) + + # CASE-INSENSITIVE REGION MATCHING: Match .hist region names to variable region names + # Get region column name (for IIASADB it's still "n" at this point, renamed to REGION later) + region_col <- "n" + if(region_col %in% names(variable) && region_col %in% names(.hist)) { + # Get unique regions from both (case-sensitive) + var_regions <- unique(variable[[region_col]]) + hist_regions <- unique(.hist[[region_col]]) + + # Create case-insensitive mapping: lowercase -> actual variable case + var_regions_lower <- tolower(var_regions) + names(var_regions_lower) <- var_regions + + # For each historical region, find matching variable region (case-insensitive) + region_map <- setNames(character(0), character(0)) # empty named vector + for(hr in hist_regions) { + hr_lower <- tolower(hr) + # Find matching variable region + matching_var <- names(var_regions_lower)[var_regions_lower == hr_lower] + if(length(matching_var) > 0) { + region_map[hr] <- matching_var[1] # Use first match + } else { + region_map[hr] <- hr # No match, keep original + } + } + + # Apply the mapping to .hist + if(length(region_map) > 0) { + .hist <- .hist %>% mutate(!!region_col := dplyr::recode(.data[[region_col]], !!!region_map)) + if(verbose) { + changed <- sum(names(region_map) != region_map) + if(changed > 0) { + message(sprintf(" Adjusted %d region names in historical data to match variable case", changed)) + } + } + } + } + + # Use data.table rbindlist with fill=TRUE for safety + merged_variable <- data.table::rbindlist(list(as.data.table(variable), as.data.table(.hist)), fill=TRUE) + + # For RICE E variable: if variable has "ghg" column but historical data doesn't, + # assume historical data is CO2 equivalent (matching get_witch.R line 25 logic) + var_to_check <- if(exists("varname_original")) varname_original else varname + if(verbose) message(sprintf(" Checking if variable '%s' needs ghg fill", var_to_check)) + if(var_to_check %in% c("E", "EIND", "MIU", "ABATEDEMI", "ABATECOST")) { + if("ghg" %in% names(merged_variable)) { + # Count how many NA ghg values in historical data before filling + na_count <- sum(is.na(merged_variable$ghg) & str_detect(merged_variable$file, "historical")) + if(verbose) message(sprintf(" Found %d historical rows with NA ghg values", na_count)) + # Fill NA values in ghg column with "co2" for historical data + merged_variable <- merged_variable %>% + mutate(ghg = ifelse(is.na(ghg) & str_detect(file, "historical"), "co2", ghg)) + if(verbose) message(" Filled NA ghg values with 'co2' for historical data") + } } - merged_variable <- rbind(variable, .hist) - merged_variable$t <- as.numeric(merged_variable$t) - if(iiasadb) merged_variable <- merged_variable %>% dplyr::rename(REGION=n) %>% mutate(t=ttoyear(t)) %>% dplyr::rename(YEAR=t) %>% mutate(VARIABLE=gsub("_","|",VARIABLE)) - if(iiasadb) merged_variable <- merged_variable %>% mutate(REGION=toupper(REGION)) #for now use upper case for all regions + if(iiasadb) { + # For IIASADB, keep in WITCH format (n, year) but restore pipe in VARIABLE + merged_variable <- merged_variable %>% + mutate(VARIABLE=gsub("_","|",VARIABLE)) + # Ensure year is numeric + merged_variable$year <- as.numeric(merged_variable$year) + } else { + # For WITCH/RICE, t is in time periods + merged_variable$t <- as.numeric(merged_variable$t) + } #remove additional columns and set elements if no set_model given but set-witch is not empty - if(exists("map_var_hist")) if(map_var_hist[varname_model==varname_original]$set_witch[1]!="" & map_var_hist[varname_model==varname_original]$set_model[1]=="") merged_variable <- merged_variable %>% filter(get(map_var_hist[varname_model==varname_original]$set_witch)==map_var_hist[varname_model==varname_original]$element_witch) %>% select(-one_of(map_var_hist[varname_model==varname_original]$set_witch)) + if(exists("map_var_hist") && exists("varname_original")) { + if(varname_original %in% map_var_hist$varname_model) { + if(map_var_hist[varname_model==varname_original]$set_witch[1]!="" & map_var_hist[varname_model==varname_original]$set_model[1]=="") { + set_col <- map_var_hist[varname_model==varname_original]$set_witch[1] + target_val <- map_var_hist[varname_model==varname_original]$element_witch[1] + if(set_col %in% names(merged_variable)) { + merged_variable <- merged_variable %>% + filter(.data[[set_col]] == target_val) %>% + select(-all_of(set_col)) + } + } + } + } + + # Diagnostic: Check if historical data has matching regions with model data + if(verbose) { + # Get region column name (n for WITCH/RICE, REGION for IIASADB) + region_col <- if(iiasadb) "REGION" else "n" + + if(region_col %in% names(merged_variable)) { + hist_regions <- unique(merged_variable[str_detect(merged_variable$file, "historical"), ][[region_col]]) + model_regions <- unique(merged_variable[!str_detect(merged_variable$file, "historical"), ][[region_col]]) + + common_regions <- intersect(hist_regions, model_regions) + hist_only <- setdiff(hist_regions, model_regions) + model_only <- setdiff(model_regions, hist_regions) + + if(length(common_regions) == 0) { + warning(sprintf("No matching regions between historical data and model data!\n Historical regions: %s\n Model regions: %s", + paste(hist_regions, collapse=", "), paste(model_regions, collapse=", "))) + } else if(length(hist_only) > 0 || length(model_only) > 0) { + message(sprintf("Region mismatch detected:\n Common regions: %s\n Historical only: %s\n Model only: %s", + paste(common_regions, collapse=", "), + if(length(hist_only) > 0) paste(hist_only, collapse=", ") else "none", + if(length(model_only) > 0) paste(model_only, collapse=", ") else "none")) + } + } + } + return(merged_variable) } diff --git a/R/auxiliary_functions.R b/R/auxiliary_functions.R index 1024c0d..b0db5b3 100644 --- a/R/auxiliary_functions.R +++ b/R/auxiliary_functions.R @@ -8,8 +8,22 @@ ttoyear <- function(t, tlen = NULL){ tlen <- tstep } } - year = ((as.numeric(t)-1) * tlen + year0) - return(year) + + # If tlen is a vector (variable timesteps), use proper cumulative sum + if(length(tlen) > 1 && length(tlen) == length(t)) { + # Variable timesteps: calculate year using cumulative sum + t_numeric <- as.numeric(t) + t_order <- order(t_numeric) + tlen_ordered <- tlen[t_order] + year_ordered <- year0 + c(0, cumsum(tlen_ordered[-length(tlen_ordered)])) + year <- year_ordered[order(t_order)] # Restore original order + return(year) + } else { + # Constant timestep: use simple formula + if(length(tlen) > 1) tlen <- tlen[1] # If tlen is vector of same values, use first + year = ((as.numeric(t)-1) * tlen + year0) + return(year) + } } yeartot <- function(year, tlen = NULL){ @@ -32,8 +46,8 @@ saveplot <- function(plotname, width=7, height=5, text_size=16, suffix="", trans if(figure_format=="pdf"){plot_device=cairo_pdf}else{plot_device=figure_format} if(figure_format=="eps"){plot_device=cairo_ps} #device=cairo_pdf makes PDFs work with greek symbols etc. - if(!exists("legend_position")){legend_position = "bottom"} - if(legend_position=="bottom"){legend_direction="horizontal"}else{legend_direction="vertical"} + legend_position <- "bottom" + legend_direction <- "horizontal" if(transparent){transparent_background <- theme(legend.background = element_blank(), panel.background = element_blank(), plot.background = element_rect(fill = "transparent",colour = NA))}else{transparent_background = NULL} print(ggplot2::last_plot()) if(!deploy_online){ diff --git a/R/climate_plots.R b/R/climate_plots.R index ab01da8..c0c5813 100644 --- a/R/climate_plots.R +++ b/R/climate_plots.R @@ -1,31 +1,31 @@ #Plots for WITCH runs with climate damages or climate_datas climate_plot <- function(scenplot=scenlist, regions = "World"){ - Q_EMI <- get_witch("Q_EMI", check_calibration = T) + Q_EMI <- get_witch("Q_EMI") ghg <- get_witch("ghg") # to get GHGs for non-co2 sets ghgs <- unique(ghg$e) Q_EMI <- Q_EMI %>% filter(e %in% ghgs) %>% group_by(pathdir, file, n, t) %>% summarize(emiall = sum(value), emico2=sum(value[e=="co2"])) %>% mutate(nonco2=emiall-emico2) #get also BAU values - BAU_Q_EMI <- get_witch("BAU_Q_EMI", check_calibration = T) + BAU_Q_EMI <- get_witch("BAU_Q_EMI") BAU_Q_EMI <- BAU_Q_EMI %>% filter(e %in% ghgs) %>% group_by(pathdir, file, n, t) %>% summarize(emi_bau = sum(value), co2_bau=sum(value[e=="co2"])) climate_data <- Q_EMI %>% rename(emi=emiall, co2=emico2) climate_data <- merge(climate_data, BAU_Q_EMI, by = c("pathdir", "file", "n", "t"), all = T) - TRF <- get_witch("TRF", check_calibration = T) + TRF <- get_witch("TRF") climate_data <- merge(climate_data, TRF, by = c("pathdir", "file", "n", "t"), all = T); setnames(climate_data, "value", "trf") #add external climate modules in case - MAGICCTRF <- get_witch("MAGICCTRF", check_calibration = T) + MAGICCTRF <- get_witch("MAGICCTRF") if(length(MAGICCTRF)>0) {setnames(MAGICCTRF, "value", "trf_magicc6"); climate_data <- merge(climate_data, MAGICCTRF, by = c("pathdir", "file", "n", "t"), all = T)} - HECTORTRF <- get_witch("HECTORTRF", check_calibration = T) + HECTORTRF <- get_witch("HECTORTRF") if(length(HECTORTRF)>0) {setnames(HECTORTRF, "value", "trf_hector"); climate_data <- merge(climate_data, HECTORTRF, by = c("pathdir", "file", "n", "t"), all = T)} - TEMP <- get_witch("TEMP", check_calibration = T) + TEMP <- get_witch("TEMP") climate_data <- merge(climate_data, TEMP %>% filter(m=="atm") %>% select(-m), by = c("pathdir", "file", "n", "t"), all = T); setnames(climate_data, "value", "temp") #add external climate modules in case - MAGICCTEMP <- get_witch("MAGICCTEMP", check_calibration = T) + MAGICCTEMP <- get_witch("MAGICCTEMP") if(length(MAGICCTEMP)>0) {setnames(MAGICCTEMP, "value", "temp_magicc6"); climate_data <- merge(climate_data, MAGICCTEMP %>% filter(m=="atm") %>% select(-m), by = c("pathdir", "file", "n", "t"), all = T)} - HECTORTEMP <- get_witch("HECTORTEMP", check_calibration = T) + HECTORTEMP <- get_witch("HECTORTEMP") if(length(HECTORTEMP)>0) {setnames(HECTORTEMP, "value", "temp_hector"); climate_data <- merge(climate_data, HECTORTEMP %>% filter(m=="atm") %>% select(-m), by = c("pathdir", "file", "n", "t"), all = T); } #PLOTS: diff --git a/R/diagnostics.R b/R/diagnostics.R index b94811e..344a5dc 100644 --- a/R/diagnostics.R +++ b/R/diagnostics.R @@ -24,16 +24,16 @@ diagnostics_plots <- function(scenplot=scenlist){ paste0(formatC(t %/% 60 %% 60, width = 2, format = "d", flag = "0"),":",formatC(t %% 60, width = 2, format = "d", flag = "0")) )} plot_time <- ggplot(scenarios) + geom_bar(aes(file, total_time/60), stat = "identity") + ylab("Total duration (minutes)") + xlab("") + facet_grid(. ~ pathdir) + geom_text(aes(x=file, y=total_time/60+10, label=time_dhms(total_time))) - + iterations <- iterations %>% mutate(ONI=ifelse(!is.na(optimal), "Optimal", ifelse(is.na(feasible), "Infeasible", "Nonoptimal")), one=1) %>% mutate(siter=as.numeric(gsub("i","", siter))) %>% arrange(pathdir, file, siter) - - plot_iterations <- ggplot(iterations) + geom_tile(aes(file, one, fill=ONI, group=one), stat = "identity", position = "stack") + ylab("Iterations") + scale_fill_manual(values = c("Optimal"="darkgreen", "Nonoptimal"="yellow", "Infeasible"="red")) + xlab("") + theme(legend.position="none") + facet_grid(. ~ pathdir) + geom_text(data=iterations %>% group_by(file, pathdir) %>% summarize(numiter=max(siter)), aes(x=file, numiter+5, label=numiter)) - - + + plot_iterations <- ggplot(iterations) + geom_tile(aes(file, one, fill=ONI, group=one), stat = "identity", position = "stack") + ylab("Iterations") + scale_fill_manual(values = c("Optimal"="darkgreen", "Nonoptimal"="yellow", "Infeasible"="red")) + xlab("") + theme(legend.position="none") + facet_grid(. ~ pathdir) + geom_text(data=iterations %>% group_by(file, pathdir) %>% summarize(numiter=max(siter), .groups="drop"), aes(x=file, numiter+5, label=numiter)) + + #aggregate ALLERR over runs allerr <- allerr %>% group_by_at(c("pathdir", file_group_columns)) %>% mutate(siter=as.numeric(gsub("i","", siter))) %>% arrange(run,siter) %>% mutate(siter=1:n()) %>% select(-run) plot_convergence <- ggplot(allerr) + geom_line(aes(siter, pmax(value,0.001), color=V3)) + facet_grid(. ~ pathdir + file) + ylab("Convergence") + xlab("Iteration") + scale_y_log10(breaks = c(0.005, 0.1, 0.5, 1, 100), labels = c(0.005, 0.1, 0.5, 1, 100)) + geom_hline(yintercept = c(0.005), color="grey") + theme(legend.position = c(0.1,0.5), legend.title = element_blank(), legend.background = element_blank(), legend.key = element_blank()) - + #aggregate price_iter over runs price_iter <- price_iter %>% group_by_at(c("pathdir", file_group_columns, "V3", "t")) %>% mutate(siter=as.numeric(gsub("i","", siter))) %>% arrange(run,siter) %>% mutate(siter=1:n()) %>% select(-run) price_iter <- price_iter %>% group_by_at(c("siter", "pathdir", file_group_columns, "V3")) %>% mutate(value = value / mean(value[1])) %>% as.data.frame() #convert in starting all from 1 diff --git a/R/emission_plots.R b/R/emission_plots.R index 49d25f6..e9fd50e 100644 --- a/R/emission_plots.R +++ b/R/emission_plots.R @@ -8,26 +8,38 @@ Intensity_Plot <- function(years=c(2050, 2100), regions="World", year0=2010, sce tpes <- get_witch("tpes"); tpes_IP <- tpes %>% mutate(value=value*0.0036) %>% rename(PES=value) Q_EMI <- get_witch("Q_EMI"); Q_EMI_IP <- Q_EMI %>% mutate(value=value*3.667) %>% filter(e=="co2") %>% select(-e) %>% rename(CO2=value) Q <- get_witch("Q"); Q_IP <- Q %>% mutate(value=value*1e3) %>% filter(iq=="y") %>% select(-iq) %>% rename(GDP=value) - Intensity <- merge(tpes_IP, Q_EMI_IP, by=c("t", file_group_columns, "pathdir", "n")) - Intensity <- merge(Intensity, Q_IP, by=c("t", file_group_columns, "pathdir", "n")) + Intensity <- merge(tpes_IP, Q_EMI_IP, by=c("t", file_group_columns, "pathdir", "n", "tlen")) + Intensity <- merge(Intensity, Q_IP, by=c("t", file_group_columns, "pathdir", "n", "tlen")) Intensity_World <- Intensity; Intensity_World$n <- NULL - Intensity_World <- as.data.table(Intensity_World)[, lapply(.SD, sum), by=c("t", file_group_columns, "pathdir")] + Intensity_World <- as.data.table(Intensity_World)[, .(PES=sum(PES), CO2=sum(CO2), GDP=sum(GDP), tlen=first(tlen)), by=c("t", file_group_columns, "pathdir")] Intensity_World$n <- "World" Intensity <- rbind(Intensity, Intensity_World) + # Add EU aggregate if requested + if("EU" %in% regions) { + eu <- get_witch("eu") + eu_regions <- if(!exists("eu") || nrow(eu)==0) c("europe") else unique(eu$n) + Intensity_EU <- subset(Intensity, n %in% eu_regions); Intensity_EU$n <- NULL + Intensity_EU <- as.data.table(Intensity_EU)[, .(PES=sum(PES), CO2=sum(CO2), GDP=sum(GDP), tlen=first(tlen)), by=c("t", file_group_columns, "pathdir")] + Intensity_EU$n <- "EU" + Intensity <- rbind(Intensity, Intensity_EU) + } Intensity <- subset(Intensity, n %in% regions) Intensity$CI=Intensity$CO2/Intensity$PES *1e3 #gCO2/MJ (from GTCO2eq/EJ) Intensity$EI=Intensity$PES/Intensity$GDP *1e3 #MJ/$ (from EJ/billion $) Intensity_t <- subset(Intensity, t %in% yeartot(c(years, year0))) Intensity_t <- Intensity_t %>% group_by_at(c("pathdir", file_group_columns, "n")) %>% mutate(CI_change=(((CI/CI[t==yeartot(year0)])**(1/(ttoyear(t)-year0)))-1), EI_change=(((EI/EI[t==yeartot(year0)])**(1/(ttoyear(t)-year0)))-1)) %>% as.data.frame() - Intensity_t <- subset(Intensity_t, file %in% scenplot) + Intensity_t <- subset(Intensity_t, file %in% scenplot | grepl("historical", file, ignore.case=TRUE)) if(regions[1]=="World"){ - p_imp <- ggplot() + geom_point(data=subset(Intensity_t, ttoyear(t)!=year0+1e3), mapping=aes(x=CI_change, y=EI_change, color=file, shape=as.character(ttoyear(t))), size=6) + geom_hline(size=1,aes(yintercept=-.011), linetype="dashed") + geom_vline(size=1,aes(xintercept=-.003), linetype="dashed") + xlab(paste0("Carbon Intensity Change")) + ylab(paste0("Energy Intensity Change")) + guides(color=guide_legend(title=NULL), shape=guide_legend(title=NULL)) + theme(legend.position="bottom", legend.direction = "horizontal", legend.box = "horizontal") + scale_x_continuous(labels=scales::percent) + scale_y_continuous(labels=scales::percent) + p_imp <- ggplot() + geom_point(data=subset(Intensity_t, ttoyear(t)!=year0+1e3), mapping=aes(x=CI_change, y=EI_change, color=file, shape=as.character(ttoyear(t))), size=6) + geom_hline(linewidth=1,aes(yintercept=-.011), linetype="dashed") + geom_vline(linewidth=1,aes(xintercept=-.003), linetype="dashed") + xlab(paste0("Carbon Intensity Change")) + ylab(paste0("Energy Intensity Change")) + guides(color=guide_legend(title=NULL), shape=guide_legend(title=NULL)) + theme(legend.position="bottom", legend.direction = "horizontal", legend.box = "horizontal") + scale_x_continuous(labels=scales::percent) + scale_y_continuous(labels=scales::percent) p_ciei <- ggplot() + geom_point(data=subset(Intensity_t), mapping=aes(x=CI, y=EI, color=file, shape=as.character(ttoyear(t))), size=6) + xlab(paste0("Carbon Intensity [gCO2/MJ]")) + ylab(paste0("Energy Intensity [MJ/$]")) + guides(color=guide_legend(title=NULL), shape=guide_legend(title=NULL)) + theme(legend.position="bottom", legend.direction = "horizontal", legend.box = "horizontal") if(animate_plot) p_ciei <- ggplot() + geom_point(data=Intensity_t %>% select(CI,EI,file,t) %>% mutate(year=ttoyear(as.numeric(t))) %>% select(-t), mapping=aes(x=CI, y=EI, color=file), size=6) + xlab(paste0("Carbon Intensity [gCO2/MJ]")) + ylab(paste0("Energy Intensity [MJ/$]")) + guides(color=guide_legend(title=NULL), shape=guide_legend(title=NULL)) + theme(legend.position="bottom", legend.direction = "horizontal", legend.box = "horizontal") + labs(title = 'Year: {frame_time}') + transition_time(year) + ease_aes('linear') }else{ Intensity_t <- subset(Intensity_t, t==yeartot(years[1])) #for regional results only first year! - p_imp <- ggplot() + geom_point(data=Intensity_t, mapping=aes(x=CI_change, y=EI_change, colour=n, shape=file), size=6) + geom_hline(size=1,aes(yintercept=-.011), linetype="dashed") + geom_vline(size=1,aes(xintercept=-0.003), linetype="dashed") + xlab(paste0("Carbon Intensity Change p.a.")) + ylab(paste0("Energy Intensity Change p.a.")) + guides(color=guide_legend(title=NULL, nrow = 2)) + theme(legend.position="bottom", legend.direction = "horizontal", legend.box = "horizontal") + scale_x_continuous(labels=scales::percent) + scale_y_continuous(labels=scales::percent) + scale_color_manual(values = region_palette[restrict_regions]) - p_ciei <- ggplot() + geom_point(data=subset(Intensity_t), mapping=aes(x=CI, y=EI, color=n, shape=file), size=6) + xlab(paste0("Carbon Intensity [gCO2eq/MJ]")) + ylab(paste0("Energy Intensity [MJ/$]")) + guides(color=guide_legend(title=NULL), shape=guide_legend(title=NULL)) + theme(legend.position="bottom", legend.direction = "horizontal", legend.box = "horizontal") + scale_color_manual(values = region_palette[restrict_regions]) + # Create region-specific palette that only includes regions in the data + regions_in_data <- unique(Intensity_t$n) + region_palette_filtered <- get_region_palette(regions_in_data, reg_id = if(exists("reg_id")) reg_id else NULL) + p_imp <- ggplot() + geom_point(data=Intensity_t, mapping=aes(x=CI_change, y=EI_change, colour=n, shape=file), size=6) + geom_hline(linewidth=1,aes(yintercept=-.011), linetype="dashed") + geom_vline(linewidth=1,aes(xintercept=-0.003), linetype="dashed") + xlab(paste0("Carbon Intensity Change p.a.")) + ylab(paste0("Energy Intensity Change p.a.")) + guides(color=guide_legend(title=NULL, nrow = 2)) + theme(legend.position="bottom", legend.direction = "horizontal", legend.box = "horizontal") + scale_x_continuous(labels=scales::percent) + scale_y_continuous(labels=scales::percent) + scale_color_manual(values = region_palette_filtered) + p_ciei <- ggplot() + geom_point(data=subset(Intensity_t), mapping=aes(x=CI, y=EI, color=n, shape=file), size=6) + xlab(paste0("Carbon Intensity [gCO2eq/MJ]")) + ylab(paste0("Energy Intensity [MJ/$]")) + guides(color=guide_legend(title=NULL), shape=guide_legend(title=NULL)) + theme(legend.position="bottom", legend.direction = "horizontal", legend.box = "horizontal") + scale_color_manual(values = region_palette_filtered) } if(animate_plot) print(animate(p_ciei, nframes = 20, duration = 10, rewind = FALSE)) if(!animate_plot) { @@ -52,9 +64,9 @@ Q_EMI_SECTORS <- Q_EMI_SECTORS %>% filter(ttoyear(t) >= 2000 & ttoyear(t) <= 210 #Stacked Regions Plot ggplot(subset(Q_EMI_SECTORS, file %in% scenplot),aes(ttoyear(t),value, fill=n)) + geom_area(stat="identity") + facet_grid(sector ~ file, scales = "free") + ylab("GtCO2") + xlab("") + guides(fill=guide_legend(title=NULL, nrow = 1)) + theme(legend.position="bottom") + scale_fill_manual(values = region_palette[regions]) + scale_x_continuous(breaks = seq(2000,2100,25)) + theme(axis.text.x = element_text(angle = 90, vjust = 0.5, hjust=1)) saveplot("Sectoral CO2 Emissions Regions", add_title=F) -ggplot(subset(Q_EMI_SECTORS, t<=10 & n %in% regions & sector=="Fossil Fuels and Industrial" & file %in% scenplot)) + geom_line(stat="identity", size=1.2, aes(ttoyear(t),value, color=file)) + facet_wrap( ~ n, scales = "free", switch=NULL, ncol=length(regions)) + ylab("GtCO2") + xlab("") + guides(color=guide_legend(title=NULL, nrow = 2)) + theme(legend.position="bottom") +ggplot(subset(Q_EMI_SECTORS, t<=10 & n %in% regions & sector=="Fossil Fuels and Industrial" & file %in% scenplot)) + geom_line(stat="identity", linewidth=1.2, aes(ttoyear(t),value, color=file)) + facet_wrap( ~ n, scales = "free", switch=NULL, ncol=length(regions)) + ylab("GtCO2") + xlab("") + guides(color=guide_legend(title=NULL, nrow = 2)) + theme(legend.position="bottom") saveplot("Sectoral CO2 Emissions FFI") -ggplot(subset(Q_EMI_SECTORS, t<=10 & n %in% regions & sector=="Land Use" & file %in% scenplot)) + geom_line(stat="identity", size=1.2, aes(ttoyear(t),value, color=file)) + facet_wrap( ~ n, scales = "free", switch=NULL, ncol=length(regions)) + ylab("GtCO2") + xlab("") + guides(color=guide_legend(title=NULL, nrow = 2)) + theme(legend.position="bottom") +ggplot(subset(Q_EMI_SECTORS, t<=10 & n %in% regions & sector=="Land Use" & file %in% scenplot)) + geom_line(stat="identity", linewidth=1.2, aes(ttoyear(t),value, color=file)) + facet_wrap( ~ n, scales = "free", switch=NULL, ncol=length(regions)) + ylab("GtCO2") + xlab("") + guides(color=guide_legend(title=NULL, nrow = 2)) + theme(legend.position="bottom") saveplot("Sectoral CO2 Emissions LU") } @@ -90,7 +102,7 @@ Mitigation_Sources <- function(regions=witch_regions, scenario_stringency_order, MITIGATION_SOURCES <- subset(MITIGATION_SOURCES, file!=scenario_stringency_order[1]) MITIGATION_SOURCES <- MITIGATION_SOURCES[order(match(MITIGATION_SOURCES$file,scenario_stringency_order)),] MITIGATION_SOURCES <- MITIGATION_SOURCES[order(match(MITIGATION_SOURCES$file,scenario_stringency_order),match(MITIGATION_SOURCES$source,emi_sources)) ,] - MITIGATION_SOURCES$pathdir <- basename(fullpathdir[1]) #to avoid issues when saving data as EXCEL + MITIGATION_SOURCES$pathdir <- basename(results_dir[1]) #to avoid issues when saving data as EXCEL #to set minimal negative vales to zero MITIGATION_SOURCES$value <- pmax(MITIGATION_SOURCES$value, 0) #Stacked Regions Plot diff --git a/R/energy_plots.R b/R/energy_plots.R index 2e7dc0d..e7b67b8 100644 --- a/R/energy_plots.R +++ b/R/energy_plots.R @@ -1,17 +1,23 @@ Primary_Energy_Mix <- function(PES_y="value", regions="World", years=seq(yearmin, yearmax), plot_type="area", scenplot=scenlist, plot_name="Primary Energy Mix", add_total_tpes = F){ - if(length(fullpathdir)!=1){print("PES mix REGIONAL only for one directory at a time!");break} - Q_FUEL <- get_witch("Q_FUEL"); Q_FUEL_pes <- Q_FUEL %>% mutate(value=value*0.0036) %>% rename(j=fuel) + if(length(results_dir)!=1){stop("PES mix REGIONAL only for one directory at a time!"); return(invisible(NULL))} + Q_FUEL <- get_witch("Q_FUEL", add_historical=FALSE); Q_FUEL_pes <- Q_FUEL %>% mutate(value=value*0.0036) %>% rename(j=fuel) #if fuel==uranium multiply by the efficiency of 0.3333 Q_FUEL_pes <- Q_FUEL_pes %>% mutate(value=ifelse(j=="uranium", value*0.3333, value)) Q_FUEL_pes - Q_EN <- get_witch("Q_EN") %>% filter(j %in% c("elhydro", "elwindon", "elwindoff", "elpv", "elcsp")); Q_EN_pes <- Q_EN %>% mutate(value=value*0.0036) + # Include both elwind (historical) and elwindon/elwindoff (model) + Q_EN <- get_witch("Q_EN", add_historical=FALSE) %>% filter(j %in% c("elhydro", "elwind", "elwindon", "elwindoff", "elpv", "elcsp")); Q_EN_pes <- Q_EN %>% mutate(value=value*0.0036) #add bunkers - BUNK_FUEL <- get_witch("BUNK_FUEL") %>% select(-jbunk) %>% mutate(value=value*0.0036) %>% rename(j=fuel) + BUNK_FUEL <- get_witch("BUNK_FUEL", add_historical=FALSE) %>% mutate(value=value*0.0036) %>% rename(j=fuel) + # Remove jbunk column if it exists + if("jbunk" %in% names(BUNK_FUEL)) BUNK_FUEL <- BUNK_FUEL %>% select(-jbunk) #aggregate sub-categories - TPES <- rbind(Q_FUEL_pes, Q_EN_pes, BUNK_FUEL) - TPES <- subset(TPES, j %in% c("oil", "coal", "gas", "uranium", "trbiofuel", "wbio", "advbio", "trbiomass") | j %in% c("elpv", "elcsp", "elhydro", "elback", "elwindon", "elwindoff")) + TPES <- data.table::rbindlist(list(Q_FUEL_pes, Q_EN_pes, BUNK_FUEL), fill=TRUE) + # Ensure tlen column exists and has valid values + if(!"tlen" %in% names(TPES)) TPES$tlen <- tstep + TPES$tlen[is.na(TPES$tlen)] <- tstep + TPES <- subset(TPES, j %in% c("oil", "coal", "gas", "uranium", "trbiofuel", "wbio", "advbio", "trbiomass") | j %in% c("elpv", "elcsp", "elhydro", "elback", "elwind", "elwindon", "elwindoff")) TPES$category[TPES$j %in% c("oil")] = "Oil" TPES$category[TPES$j %in% c("gas")] = "Natural Gas" TPES$category[TPES$j %in% c("coal")] = "Coal" @@ -19,14 +25,14 @@ Primary_Energy_Mix <- function(PES_y="value", regions="World", years=seq(yearmin TPES$category[TPES$j %in% c("trbiofuel", "wbio", "advbio", "trbiomass")] = "Biomass" TPES$category[TPES$j %in% c("elpv", "elcsp")] = "Solar" TPES$category[TPES$j %in% c("elhydro")] = "Hydro" - TPES$category[TPES$j %in% c("elwindon", "elwindoff")] = "Wind" + TPES$category[TPES$j %in% c("elwind", "elwindon", "elwindoff")] = "Wind" #order categories for plots PES_Categories <- c("Oil", "Coal", "Natural Gas", "Nuclear", "Biomass", "Hydro", "Wind", "Solar") TPES <- TPES[order(match(TPES$category,PES_Categories)),] TPES$j <- NULL - TPES <- as.data.table(TPES)[, lapply(.SD, sum), by=c("t", file_group_columns, "pathdir", "n", "category")] + TPES <- as.data.table(TPES)[, .(value=sum(value), tlen=first(tlen)), by=c("t", file_group_columns, "pathdir", "n", "category")] if(regions[1]=="World"){ - TPES$n <- NULL; TPES <- TPES[, lapply(.SD, sum), by=c("t", file_group_columns, "pathdir", "category")]; TPES$n <- "World" + TPES$n <- NULL; TPES <- TPES[, .(value=sum(value), tlen=first(tlen)), by=c("t", file_group_columns, "pathdir", "category")]; TPES$n <- "World" }else{ TPES <- subset(TPES, n %in% regions) } @@ -45,9 +51,9 @@ Primary_Energy_Mix <- function(PES_y="value", regions="World", years=seq(yearmin if(PES_y=="share"){p <- p + ylab("%")}else{p <- p + ylab("EJ")} if(add_total_tpes & PES_y=="value"){ - total_tpes <- get_witch("tpes") %>% mutate(value=value*0.0036) - if(regions[1]=="World"){total_tpes$n <- NULL; total_tpes <- total_tpes[, lapply(.SD, sum), by=c("t", file_group_columns, "pathdir")]; total_tpes$n <- "World"}else{total_tpes <- subset(total_tpes, n %in% regions)} - p <- p + geom_line(data = subset(total_tpes, ttoyear(t)<=yearmax & n %in% regions & ttoyear(t) %in% years & file %in% scenplot), aes(ttoyear(t),value), color="darkgrey", linetype="dashed") + total_tpes <- get_witch("tpes", add_historical=FALSE) %>% mutate(value=value*0.0036) + if(regions[1]=="World"){total_tpes$n <- NULL; total_tpes <- total_tpes[, .(value=sum(value), tlen=first(tlen)), by=c("t", file_group_columns, "pathdir")]; total_tpes$n <- "World"}else{total_tpes <- subset(total_tpes, n %in% regions)} + p <- p + geom_line(data = subset(total_tpes, ttoyear(t)<=yearmax & n %in% regions & ttoyear(t) %in% years & file %in% scenplot), aes(ttoyear(t),value), color="darkgrey", linetype="dashed") } saveplot(plot_name) } @@ -57,10 +63,13 @@ Primary_Energy_Mix <- function(PES_y="value", regions="World", years=seq(yearmin Electricity_Mix <- function(Electricity_y="value", regions="World", years=seq(yearmin, yearmax), plot_type="area", plot_name="Electricity Mix", scenplot=scenlist, add_total_elec=F){ - if(length(fullpathdir)!=1){print("Electricity mix only for one directory at a time!");break} - Q_IN <- get_witch("Q_IN"); Q_IN_el <- Q_IN %>% mutate(value=value * 0.0036) - csi_el <- get_witch("csi") %>% rename(csi=value) %>% mutate(jfed=gsub("_new", "", jfed)) %>% filter(jfed %in% c("eloil", "elpb", "elpc", "elgastr", "elbigcc", "elcigcc", "elgasccs", "elpc_ccs", "elpc_oxy")) - JFED <- merge(Q_IN_el, csi_el, by = c("t", "n", file_group_columns, "pathdir", "fuel", "jfed"), all=TRUE) + if(length(results_dir)!=1){stop("Electricity mix only for one directory at a time!"); return(invisible(NULL))} + Q_IN <- get_witch("Q_IN", add_historical=FALSE); Q_IN_el <- Q_IN %>% mutate(value=value * 0.0036) + csi_el <- get_witch("csi", add_historical=FALSE) %>% rename(csi=value) %>% mutate(jfed=gsub("_new", "", jfed)) %>% filter(jfed %in% c("eloil", "elpb", "elpc", "elgastr", "elbigcc", "elcigcc", "elgasccs", "elpc_ccs", "elpc_oxy")) + # Merge - handle cases where Q_IN_el might not have 'fuel' column (historical data) + merge_cols <- intersect(c("t", "n", file_group_columns, "pathdir", "fuel", "jfed"), + intersect(names(Q_IN_el), names(csi_el))) + JFED <- merge(Q_IN_el, csi_el, by = merge_cols, all=TRUE) JFED <- JFED %>% filter(jfed %in% c("eloil", "elpb", "elpc", "elgastr", "elbigcc", "elcigcc", "elgasccs", "elpc_ccs", "elpc_oxy")) #take efficiency for EL into account #add csi for historical (seems to be 1!) @@ -71,17 +80,21 @@ Electricity_Mix <- function(Electricity_y="value", regions="World", years=seq(ye JFED$csi[is.na(JFED$csi)] <- 1 JFED$value <- JFED$value * JFED$csi JFED$csi <- NULL - JFED$fuel <- NULL + if("fuel" %in% names(JFED)) JFED$fuel <- NULL setnames(JFED, "jfed", "j") - Q_EN_pes <- get_witch("Q_EN") %>% mutate(value=value*0.0036) - Q_EN_pes <- subset(Q_EN_pes, j %in% c("elpv", "elcsp", "elnuclear", "elwind", "elhydro")) - ELEC <- rbind(Q_EN_pes, JFED) - ELEC[is.na(ELEC)] <- 0 #get rid of NAs to avoid sums not being correct, mainly from historical data! + Q_EN_pes <- get_witch("Q_EN", add_historical=FALSE) %>% mutate(value=value*0.0036) + # Include both elwind (historical) and elwindon/elwindoff (model) + Q_EN_pes <- subset(Q_EN_pes, j %in% c("elpv", "elcsp", "elnuclear", "elwind", "elwindon", "elwindoff", "elhydro")) + ELEC <- data.table::rbindlist(list(Q_EN_pes, JFED), fill=TRUE) + ELEC$value[is.na(ELEC$value)] <- 0 #get rid of NAs in value column only + # Ensure tlen column exists and has valid values + if(!"tlen" %in% names(ELEC)) ELEC$tlen <- tstep + ELEC$tlen[is.na(ELEC$tlen)] <- tstep #set default tstep if tlen is NA #aggregate sub-categories1 ELEC$category[ELEC$j %in% c("elnuclear")] = "Nuclear" ELEC$category[ELEC$j %in% c("elpv", "elcsp")] = "Solar" ELEC$category[ELEC$j %in% c("elhydro")] = "Hydro" - ELEC$category[ELEC$j %in% c("elwind")] = "Wind" + ELEC$category[ELEC$j %in% c("elwind", "elwindon", "elwindoff")] = "Wind" ELEC$category[ELEC$j %in% c("elpb")] = "Biomass w/o CCS" ELEC$category[ELEC$j %in% c("elbigcc")] = "Biomass w/ CCS" ELEC$category[ELEC$j %in% c("elpc")] = "Coal w/o CCS" @@ -95,16 +108,16 @@ Electricity_Mix <- function(Electricity_y="value", regions="World", years=seq(ye Electricity_Categories <- c("Coal w/o CCS", "Coal w/ CCS", "Gas w/o CCS", "Gas w/ CCS", "Oil", "Nuclear", "Biomass w/o CCS", "Biomass w/ CCS", "Hydro", "Wind", "Solar") ELEC <- ELEC[order(match(ELEC$category,Electricity_Categories)),] ELEC$j <- NULL - ELEC <- as.data.table(ELEC)[, lapply(.SD, sum), by=c("t", file_group_columns, "pathdir", "n", "category")] + ELEC <- as.data.table(ELEC)[, .(value=sum(value), tlen=first(tlen)), by=c("t", file_group_columns, "pathdir", "n", "category")] if(regions[1]=="World"){ - ELEC$n <- NULL; ELEC <- ELEC[, lapply(.SD, sum), by=c("t", file_group_columns, "pathdir", "category")]; ELEC$n <- "World" + ELEC$n <- NULL; ELEC <- ELEC[, .(value=sum(value), tlen=first(tlen)), by=c("t", file_group_columns, "pathdir", "category")]; ELEC$n <- "World" }else{ ELEC <- subset(ELEC, n %in% regions) } assign("ELEC_MIX",ELEC,envir = .GlobalEnv) if(Electricity_y=="share"){ELEC <- ELEC %>% group_by_at(c("t", file_group_columns, "n", "pathdir")) %>% mutate(value=value/(sum(value))*100)} - p <- ggplot(data=subset(ELEC, ttoyear(t) %in% years & file %in% scenplot)) + p <- ggplot(data=subset(ELEC, ttoyear(t) %in% years & (file %in% scenplot | grepl("historical", file, ignore.case=TRUE)))) p <- p + xlab("") + guides(fill=guide_legend(title=NULL, nrow = 2)) + theme(legend.position="bottom") if(plot_type=="area"){ p <- p + geom_area(aes(ttoyear(t),value, fill=category), stat="identity") + scale_fill_manual(values=c("Solar"="yellow", "Hydro"="blue", "Nuclear"="cyan", "Wind"="orange", "Coal w/ CCS"="dimgrey", "Coal w/o CCS"="black", "Gas w/ CCS"="brown2", "Gas w/o CCS"="brown", "Oil"="darkorchid4", "Biomass w/ CCS"="green", "Biomass w/o CCS"="darkgreen")) @@ -118,8 +131,8 @@ Electricity_Mix <- function(Electricity_y="value", regions="World", years=seq(ye if(add_total_elec & Electricity_y=="value"){ total_elec <- get_witch("Q_EN") %>% filter(j=="el") %>% mutate(value=value*0.0036) - if(regions[1]=="World"){total_elec$n <- NULL; total_elec <- total_elec[, lapply(.SD, sum), by=c("t", file_group_columns, "pathdir")]; total_elec$n <- "World"}else{total_elec <- subset(total_elec, n %in% regions)} - p <- p + geom_line(data = subset(total_elec, ttoyear(t)<=yearmax & n %in% regions & ttoyear(t) %in% years & file %in% scenplot), aes(ttoyear(t),value), color="darkgrey", linetype="dashed") + if(regions[1]=="World"){total_elec$n <- NULL; total_elec <- total_elec[, .(value=sum(value), tlen=first(tlen)), by=c("t", file_group_columns, "pathdir")]; total_elec$n <- "World"}else{total_elec <- subset(total_elec, n %in% regions)} + p <- p + geom_line(data = subset(total_elec, ttoyear(t)<=yearmax & n %in% regions & ttoyear(t) %in% years & (file %in% scenplot | grepl("historical", file, ignore.case=TRUE))), aes(ttoyear(t),value), color="darkgrey", linetype="dashed") } saveplot(plot_name) } @@ -159,8 +172,8 @@ Energy_Trade <- function(fuelplot=c("oil", "coal", "gas"), scenplot=scenlist, ad Investment_Plot <- function(regions=witch_regions, scenplot=scenlist, match_hist_inv = F){ if(regions[1]=="World") regions <- witch_regions - I_EN <- get_witch("I_EN", check_calibration = T) - I_RD_inv = get_witch("I_RD", check_calibration = T) + I_EN <- get_witch("I_EN") + I_RD_inv = get_witch("I_RD") I_EN_sum <- I_EN %>% group_by_at(c("pathdir", file_group_columns, "n", "t")) %>% summarize(value=sum(value)) I_RD_inv <- I_RD_inv %>% mutate(rd = dplyr::recode(rd, !!!c("en"="Energy Efficiency","neladvbio"="Advanced Biofuels","battery"="Batteries", "fuelcell"="Hydrogen"))) I_RD_inv <- I_RD_inv %>% rename(category="rd") %>% mutate(sector="Energy RnD") @@ -171,16 +184,16 @@ Investment_Plot <- function(regions=witch_regions, scenplot=scenlist, match_hist I_EN_CCS <- I_EN %>% group_by_at(c("pathdir", file_group_columns, "n", "t")) %>% filter(jinv %in% c("elcigcc", "elgasccs", "elbigcc", "nelcoalccs")) %>% summarize(value=sum(value)) %>% mutate(category="Fossils with CCS") j_stor <- unique(get_witch("j_stor")$j) I_EN_Storage <- I_EN %>% group_by_at(c("pathdir", file_group_columns, "n", "t")) %>% filter(jinv %in% j_stor | jinv=="str_storage") %>% summarize(value=sum(value)) %>% mutate(category="Storage") - I_EN_GRID <- get_witch("I_EN_GRID", check_calibration = T) + I_EN_GRID <- get_witch("I_EN_GRID") I_EN_GRID$category <- "Grid" - I_EN_categorized <- rbind(I_EN_Renewables, I_EN_CCS, I_EN_FossilFuels, I_EN_Nuclear, I_EN_Hydrogen, I_EN_GRID, I_EN_Storage) + I_EN_categorized <- data.table::rbindlist(list(I_EN_Renewables, I_EN_CCS, I_EN_FossilFuels, I_EN_Nuclear, I_EN_Hydrogen, I_EN_GRID, I_EN_Storage), fill=TRUE) I_EN_categorized$sector <- "Power supply" I_TRANSPORT_trad <- I_EN %>% group_by_at(c("pathdir", file_group_columns, "n", "t")) %>% filter(jinv %in% c("trad_cars", "hybrid", "trad_stfr", "hbd_stfr")) %>% summarize(value=sum(value)) %>% mutate(category="ICE/Hybrid") I_TRANSPORT_lowcarbon <- I_EN %>% group_by_at(c("pathdir", file_group_columns, "n", "t")) %>% filter(jinv %in% c("edv", "edv_stfr", "plg_hybrid", "plg_hbd_stfr")) %>% summarize(value=sum(value)) %>% mutate(category="Electric Vehicles") - I_TRANSPORT <- rbind(I_TRANSPORT_trad, I_TRANSPORT_lowcarbon); + I_TRANSPORT <- data.table::rbindlist(list(I_TRANSPORT_trad, I_TRANSPORT_lowcarbon), fill=TRUE); I_inv <- get_witch("I") %>% dplyr::rename(category=g) %>% mutate(sector="Final Good") %>% filter(category=="fg") - I_OUT_inv <- get_witch("I_OUT", check_calibration = T) %>% rename(category=f) %>% filter(category=="oil") %>% mutate(category="Oil Extraction") %>% mutate(sector="Fuel supply") - Investment_Energy <- rbind(as.data.frame(I_EN_categorized), I_RD_inv, I_OUT_inv, I_inv) + I_OUT_inv <- get_witch("I_OUT") %>% rename(category=f) %>% filter(category=="oil") %>% mutate(category="Oil Extraction") %>% mutate(sector="Fuel supply") + Investment_Energy <- data.table::rbindlist(list(as.data.frame(I_EN_categorized), I_RD_inv, I_OUT_inv, I_inv), fill=TRUE) Investment_Energy_historical <- Investment_Energy %>% filter(file=="historical_iea") %>% filter(ttoyear(t)==2020) %>% mutate(file="IEA (2020)", value_annualized=value * 1e3) #align 2020 value to IEA to account for missing parts of reporting (as we only count modules etc.) @@ -190,7 +203,7 @@ Investment_Plot <- function(regions=witch_regions, scenplot=scenlist, match_hist Investment_Energy <- Investment_Energy %>% mutate(value=value*min(max(0.8,value_annualized), 1.5)) %>% select(-value_annualized) } - Investment_Energy <- Investment_Energy %>% filter(t>=4 & t<=10 & (file %in% scenplot)) %>% group_by_at(c("category", "sector", "pathdir", file_group_columns, "n")) %>% mutate(value_annualized=value/(10-4+1) * 1e3) + Investment_Energy <- Investment_Energy %>% filter(t>=4 & t<=10 & (file %in% scenplot | grepl("historical", file, ignore.case=TRUE))) %>% group_by_at(c("category", "sector", "pathdir", file_group_columns, "n")) %>% mutate(value_annualized=value/(10-4+1) * 1e3) Investment_Energy_global <- rbind(Investment_Energy, Investment_Energy_historical) %>% group_by_at(c("category", "sector", "pathdir", file_group_columns, "t")) %>% filter(n %in% regions) %>% summarize(value=sum(value), value_annualized=sum(value_annualized)) assign("Investment_Energy_regional",rbind(Investment_Energy, Investment_Energy_historical),envir = .GlobalEnv) Investment_Energy_global <- Investment_Energy_global %>% filter(category!="fg") @@ -203,7 +216,7 @@ Investment_Plot <- function(regions=witch_regions, scenplot=scenlist, match_hist Power_capacity <- function(regions="World", years=seq(yearmin, yearmax), plot_name="Power Capacity", scenplot=scenlist){ - if(length(fullpathdir)!=1){print("Electricity mix only for one directory at a time!")}else{ + if(length(results_dir)!=1){print("Electricity mix only for one directory at a time!")}else{ K_EN <- get_witch("K_EN") K_EN <- K_EN %>% filter(jreal %in% c("eloil", "elpb", "elpc", "elgastr", "elbigcc", "elcigcc", "elgasccs", "elpc_ccs", "elpv", "elcsp", "elnuclear", "elwindon", "elwindoff", "elhydro")) K_EN <- K_EN %>% mutate(category = dplyr::recode(jreal, !!!c("elpc" = "Coal w/o CCS", "elpc_ccs" = "Coal w/ CCS", "elgastr" = "Gas w/o CCS", "elgasccs" = "Gas w/ CCS", "eloil" = "Oil", "elnuclear" = "Nuclear", "elpb" = "Biomass w/o CCS", "elbigcc" = "Biomass w/ CCS", "elhydro" = "Hydro", "elwindon" = "Wind Onshore", "elwindoff" = "Wind Offshore", "elpv" = "Solar PV", "elcsp" = "Solar CSP"))) @@ -215,7 +228,7 @@ Power_capacity <- function(regions="World", years=seq(yearmin, yearmax), plot_na ELEC <- subset(ELEC, n %in% regions) ELEC <- ELEC %>% group_by_at(c("t", file_group_columns, "pathdir", "category")) %>% summarize(value=sum(value)) %>% mutate(n="World") } - p <- ggplot(data=subset(ELEC, ttoyear(t) %in% years & file %in% scenplot)) + p <- ggplot(data=subset(ELEC, ttoyear(t) %in% years & (file %in% scenplot | grepl("historical", file, ignore.case=TRUE)))) p <- p + xlab("") + guides(color=guide_legend(title=NULL, nrow = 3)) + theme(legend.position="bottom") #p <- p + geom_line(aes(ttoyear(t),value*1e3, color=category), stat="identity") + scale_color_manual(values=c("Solar PV"="yellow","Solar CSP"="gold2", "Hydro"="blue", "Nuclear"="cyan", "Wind Onshore"="orange", "Wind Offshore"="coral2", "Coal w/ CCS"="dimgrey", "Coal w/o CCS"="black", "Gas w/ CCS"="brown2", "Gas w/o CCS"="brown", "Oil"="darkorchid4", "Biomass w/ CCS"="green", "Biomass w/o CCS"="darkgreen")) + ylab("GW") p <- p + geom_area(aes(ttoyear(t),value*1e3, fill=category), stat="identity", position = "stack") + scale_fill_manual(values=c("Solar PV"="yellow","Solar CSP"="gold2", "Hydro"="blue", "Nuclear"="cyan", "Wind Onshore"="orange", "Wind Offshore"="coral2", "Coal w/ CCS"="dimgrey", "Coal w/o CCS"="black", "Gas w/ CCS"="brown2", "Gas w/o CCS"="brown", "Oil"="darkorchid4", "Biomass w/ CCS"="green", "Biomass w/o CCS"="darkgreen")) + ylab("GW") diff --git a/R/gdx_file_loader.R b/R/gdx_file_loader.R new file mode 100644 index 0000000..47e9dc9 --- /dev/null +++ b/R/gdx_file_loader.R @@ -0,0 +1,212 @@ +## GDX File Discovery and Loading +## This file contains functions for discovering and loading GDX files + +#' Load GDX files from results directory +#' +#' Discovers GDX files in the results directory, applies filters, and sets up +#' scenario list and region information. This function is called internally +#' during session initialization for WITCH/RICE models. +#' +#' @return NULL (modifies global environment with filelist, scenlist, region info) +#' @keywords internal +.load_gdx_files <- function() { + # Check if we should load GDX files + if(!exists("results_dir", envir=.GlobalEnv) || + is.null(get("results_dir", envir=.GlobalEnv)) || + exists("iamc_filename", envir=.GlobalEnv) || + exists("iamc_databasename", envir=.GlobalEnv)) { + return(invisible(NULL)) + } + + results_dir <- get("results_dir", envir=.GlobalEnv) + + # Discover GDX files + filelist <- gsub(".gdx", "", list.files( + path = results_dir[1], + full.names = FALSE, + pattern = "*.gdx", + recursive = FALSE + )) + + # Apply file filters + if(!exists("restrict_files", envir=.GlobalEnv)) { + assign("restrict_files", "results_", envir=.GlobalEnv) + } + restrict_files <- get("restrict_files", envir=.GlobalEnv) + + if(restrict_files[1] != "") { + for(i in 1:length(restrict_files)) { + .filelist_res <- filelist[apply(outer(filelist, restrict_files[i], stringr::str_detect), 1, all)] + if(i == 1) { + .filelist_res_all <- .filelist_res + } else { + .filelist_res_all <- c(.filelist_res_all, .filelist_res) + } + } + filelist <- unique(.filelist_res_all) + } + + # Apply exclusion filters + if(exists("exclude_files", envir=.GlobalEnv)) { + exclude_files <- get("exclude_files", envir=.GlobalEnv) + if(exclude_files[1] != "") { + filelist <- filelist[!stringr::str_detect(filelist, paste(exclude_files, collapse = '|'))] + } + } + + if(length(filelist) == 0) { + stop("No GDX files found.") + } + + # Assign filelist early so other functions can access it + assign("filelist", filelist, envir=.GlobalEnv) + + # Set up scenario list + if(exists("scenlist", envir=.GlobalEnv)) { + scenlist <- get("scenlist", envir=.GlobalEnv) + # Check for missing scenarios + if(length(names(scenlist[!(names(scenlist) %in% filelist)])) > 0) { + print("Missing Scenarios:") + print(cat(names(scenlist[!(names(scenlist) %in% filelist)]), sep = ", ")) + } + filelist <- intersect(names(scenlist), filelist) + scenlist <- scenlist[filelist] + assign("scenlist", scenlist, envir=.GlobalEnv) + assign("filelist", filelist, envir=.GlobalEnv) # Update after filtering + } else { + if(!exists("removepattern", envir=.GlobalEnv)) { + assign("removepattern", "results_", envir=.GlobalEnv) + } + removepattern <- get("removepattern", envir=.GlobalEnv) + scenlist <- gsub(paste(removepattern, collapse = "|"), "", filelist) + names(scenlist) <- filelist + assign("scenlist", scenlist, envir=.GlobalEnv) + } + + print(data.frame(scenlist = scenlist)) + + # Set up file grouping + if(exists("file_separate", envir=.GlobalEnv)) { + file_separate <- get("file_separate", envir=.GlobalEnv) + file_group_columns <- c("file", unname(file_separate[3:length(file_separate)])) + } else { + file_group_columns <- "file" + } + assign("file_group_columns", file_group_columns, envir=.GlobalEnv) + + # Note: We always assume flexible timestep - tlen will be loaded in get_witch() + # No need to detect or set flexible_timestep anymore + + # Check for stochastic runs + tset <- get_witch("t") + if("t" %in% names(tset)) { + if(any(stringr::str_detect((tset %>% dplyr::select(t) %>% unique())$t, "_"))) { + stochastic_files <- tset %>% + dplyr::filter(stringr::str_detect(t, "_")) %>% + dplyr::mutate(numeric_t = as.numeric(sub(".*_(\\d+)$", "\\1", t))) %>% + dplyr::group_by(file) %>% + dplyr::summarise(num_branches = max(numeric_t, na.rm = TRUE)) + assign("stochastic_files", stochastic_files, envir=.GlobalEnv) + } else { + assign("stochastic_files", NULL, envir=.GlobalEnv) + } + } else { + assign("stochastic_files", NULL, envir=.GlobalEnv) + } + + # Get variable descriptions from first file + mygdx <- gdxtools::gdx(file.path(results_dir[1], paste0(filelist[1], ".gdx"))) + all_var_descriptions <- rbind( + data.frame(name = mygdx$variables$name, description = mygdx$variables$text), + data.frame(name = mygdx$parameters$name, description = mygdx$parameters$text) + ) + assign("all_var_descriptions", all_var_descriptions, envir=.GlobalEnv) + + # Set up region information + .setup_region_info(filelist, results_dir) + + # filelist already assigned earlier + invisible(NULL) +} + +#' Setup region information +#' +#' @param filelist Character vector of GDX filenames +#' @param results_dir Path to results directory +#' @keywords internal +.setup_region_info <- function(filelist, results_dir) { + # Get region ID + if(!exists("reg_id", envir=.GlobalEnv)) { + conf <- get_witch("conf") + if(!exists("conf")) { + stop("No conf set found. Please specify reg_id manually!") + } + if(length(unique(subset(conf, V1 == "regions")$V2)) > 1) { + print("Be careful: not all results files were run with the same regional aggregation!") + } + scenlist <- get("scenlist", envir=.GlobalEnv) + reg_id <- subset(conf, file == scenlist[1] & pathdir == basename(results_dir[1]) & V1 == "regions")$V2 + assign("reg_id", reg_id, envir=.GlobalEnv) + } else { + reg_id <- get("reg_id", envir=.GlobalEnv) + } + + # Get regions list + if (requireNamespace("gdxtools", quietly = TRUE) && + exists("batch_extract", where = asNamespace("gdxtools"), mode = "function")) { + n <- suppressWarnings(gdxtools::batch_extract("n", files = file.path(results_dir, paste0(filelist, ".gdx")))) + if(is.null(n$n)) { + witch_regions <- "World" + } else { + witch_regions <- unique(n$n$V1) + } + } else { + # Fallback: try to get regions from first file + tryCatch({ + first_gdx <- gdxtools::gdx(file.path(results_dir[1], paste0(filelist[1], ".gdx"))) + if("n" %in% names(first_gdx$sets)) { + witch_regions <- first_gdx$sets$n$V1 + } else { + witch_regions <- "World" + } + }, error = function(e) { + witch_regions <- "World" + }) + } + + # Apply nice region names if they exist + if(exists("nice_region_names", envir=.GlobalEnv)) { + nice_region_names <- get("nice_region_names", envir=.GlobalEnv) + witch_regions <- dplyr::recode(witch_regions, !!!nice_region_names) + } + + display_regions <- witch_regions + assign("display_regions", display_regions, envir=.GlobalEnv) + assign("witch_regions", witch_regions, envir=.GlobalEnv) + + # Set up color palettes + region_palette <- get_region_palette(witch_regions, reg_id) + if(exists("restrict_regions", envir=.GlobalEnv)) { + restrict_regions <- get("restrict_regions", envir=.GlobalEnv) + region_palette <- region_palette[restrict_regions] + } + assign("region_palette", region_palette, envir=.GlobalEnv) + + # Short names palette + region_palette_specific_short <- region_palette + names(region_palette_specific_short) <- witch_name_short(names(region_palette)) + assign("region_palette_specific_short", region_palette_specific_short, envir=.GlobalEnv) + + # Long names palette + region_palette_longnames <- region_palette + names(region_palette_longnames) <- dplyr::recode( + names(region_palette), + !!!setNames( + paste0(witch_region_longnames[names(witch_region_longnames)], " (", names(witch_region_longnames), ")"), + names(witch_region_longnames) + ) + ) + assign("region_palette_longnames", region_palette_longnames, envir=.GlobalEnv) + + print(paste(length(get("scenlist", envir=.GlobalEnv)), "Scenarios and", length(witch_regions), "regions considered.")) +} diff --git a/R/gdx_loader_new.R b/R/gdx_loader_new.R new file mode 100644 index 0000000..8c1e812 --- /dev/null +++ b/R/gdx_loader_new.R @@ -0,0 +1,320 @@ +## Modern GDX File Loading +## Refactored to avoid global variables and improve clarity + +#' Discover GDX files in directory +#' +#' @param results_dir Path to results directory +#' @param restrict_files Pattern to filter files (e.g., "results_") +#' @param exclude_files Pattern to exclude files +#' @return Character vector of GDX filenames (without .gdx extension) +#' @keywords internal +.discover_gdx_files <- function(results_dir, restrict_files = "results_", exclude_files = "") { + message("Searching for GDX files in: ", results_dir) + + # Find all GDX files + all_files <- gsub("\\.gdx$", "", list.files( + path = results_dir, + pattern = "\\.gdx$", + full.names = FALSE, + recursive = FALSE + )) + + if (length(all_files) == 0) { + stop("No GDX files found in: ", results_dir) + } + + # ALWAYS filter to files starting with "results_" first + all_files <- all_files[stringr::str_starts(all_files, "results_")] + + if (length(all_files) == 0) { + stop("No GDX files starting with 'results_' found in: ", results_dir) + } + + # Apply additional inclusion filters (if restrict_files is not "results_") + if (!is.null(restrict_files) && restrict_files != "" && restrict_files != "results_") { + patterns <- if (is.character(restrict_files)) restrict_files else unlist(restrict_files) + filtered <- all_files + for (pattern in patterns) { + filtered <- filtered[stringr::str_detect(filtered, pattern)] + } + all_files <- unique(filtered) + } + + # Apply exclusion filter + if (!is.null(exclude_files) && exclude_files != "") { + all_files <- all_files[!stringr::str_detect(all_files, paste(exclude_files, collapse = "|"))] + } + + if (length(all_files) == 0) { + stop("No GDX files found after applying filters") + } + + message("Found ", length(all_files), " GDX files") + all_files +} + +#' Create scenario list from filenames +#' +#' @param filelist Character vector of GDX filenames +#' @param removepattern Pattern to remove from scenario names +#' @param scenlist_custom Optional pre-defined scenario list +#' @return Named character vector (names=filenames, values=scenario names) +#' @keywords internal +.create_scenlist <- function(filelist, removepattern = "results_", scenlist_custom = NULL) { + if (!is.null(scenlist_custom)) { + # User provided scenario list - validate and filter + missing <- setdiff(names(scenlist_custom), filelist) + if (length(missing) > 0) { + warning("Missing scenarios in GDX files: ", paste(missing, collapse = ", ")) + } + + # Keep only scenarios that exist + valid_files <- intersect(names(scenlist_custom), filelist) + scenlist <- scenlist_custom[valid_files] + } else { + # Auto-generate scenario names from filenames + scenario_names <- filelist + if (!is.null(removepattern) && removepattern != "") { + scenario_names <- gsub(paste(removepattern, collapse = "|"), "", filelist) + } + scenlist <- setNames(scenario_names, filelist) + } + + scenlist +} + +#' Load GDX session data +#' +#' Main function that discovers files, creates scenario list, and loads metadata. +#' Returns a list with all session data instead of using global variables. +#' +#' @param results_dir Path to results directory +#' @param restrict_files Pattern to filter GDX files +#' @param exclude_files Pattern to exclude GDX files +#' @param removepattern Pattern to remove from scenario names +#' @param scenlist_custom Optional custom scenario list +#' @param reg_id Regional aggregation ID +#' @return List containing: filelist, scenlist, regions, palettes, metadata +#' @keywords internal +.load_gdx_session <- function(results_dir, + restrict_files = "results_", + exclude_files = "", + removepattern = "results_", + scenlist_custom = NULL, + reg_id = NULL) { + + # Discover GDX files + filelist <- .discover_gdx_files(results_dir, restrict_files, exclude_files) + + # Create scenario list + scenlist <- .create_scenlist(filelist, removepattern, scenlist_custom) + + # Update filelist to only include scenarios in scenlist + filelist <- names(scenlist) + + # Check if any files remain after filtering + if (length(filelist) == 0) { + stop("No GDX files found after applying filters and scenario list matching.\n", + "Check restrict_files, exclude_files, and scenlist_custom parameters.") + } + + # Set up file grouping columns + file_group_columns <- if(exists("file_separate", envir=.GlobalEnv)) { + file_separate <- get("file_separate", envir=.GlobalEnv) + c("file", unname(file_separate[3:length(file_separate)])) + } else { + "file" + } + + # Print summary + print(data.frame( + file = filelist, + scenario = as.character(scenlist), + row.names = NULL + )) + + # Set filelist globally early so get_witch() can access it + # This is needed because get_witch() depends on this being global + # Note: results_dir is already assigned by the caller (run_witch, etc.) + assign("filelist", filelist, envir = .GlobalEnv) + + # Get metadata from first file + first_gdx_path <- file.path(results_dir, paste0(filelist[1], ".gdx")) + metadata <- .extract_gdx_metadata(first_gdx_path, filelist, results_dir) + + # Get region information (suppress join messages) + region_info <- suppressMessages(.extract_region_info(filelist, results_dir, reg_id)) + + # Return everything as a list + list( + filelist = filelist, + scenlist = scenlist, + results_dir = results_dir, + file_group_columns = file_group_columns, + regions = region_info$regions, + reg_id = region_info$reg_id, + region_palette = region_info$palette, + region_palette_short = region_info$palette_short, + region_palette_long = region_info$palette_long, + stochastic_files = metadata$stochastic_files, + var_descriptions = metadata$var_descriptions + ) +} + +#' Extract metadata from GDX files +#' +#' @param first_gdx_path Path to first GDX file +#' @param filelist All GDX filenames +#' @param results_dir Results directory +#' @return List with metadata +#' @keywords internal +.extract_gdx_metadata <- function(first_gdx_path, filelist, results_dir) { + # Variable descriptions + mygdx <- gdxtools::gdx(first_gdx_path) + var_descriptions <- rbind( + data.frame(name = mygdx$variables$name, description = mygdx$variables$text), + data.frame(name = mygdx$parameters$name, description = mygdx$parameters$text) + ) + + # Note: We always assume flexible timestep - tlen will be loaded in get_witch() + # No longer need to detect flexible_timestep + + # Check for stochastic runs + stochastic_files <- NULL + tset <- tryCatch({ + get_witch("t") + }, error = function(e) NULL) + + if (!is.null(tset) && "t" %in% names(tset)) { + if (any(stringr::str_detect((tset %>% dplyr::select(t) %>% unique())$t, "_"))) { + stochastic_files <- tset %>% + dplyr::filter(stringr::str_detect(t, "_")) %>% + dplyr::mutate(numeric_t = as.numeric(sub(".*_(\\d+)$", "\\1", t))) %>% + dplyr::group_by(file) %>% + dplyr::summarise(num_branches = max(numeric_t, na.rm = TRUE)) + } + } + + list( + var_descriptions = var_descriptions, + stochastic_files = stochastic_files + ) +} + +#' Extract region information +#' +#' @param filelist GDX filenames +#' @param results_dir Results directory +#' @param reg_id Regional aggregation ID +#' @return List with region info and palettes +#' @keywords internal +.extract_region_info <- function(filelist, results_dir, reg_id = NULL) { + # Determine reg_id if not provided + if (is.null(reg_id)) { + # Try to read conf directly from the first GDX file + conf <- tryCatch({ + first_gdx_path <- file.path(results_dir[1], paste0(filelist[1], ".gdx")) + if (file.exists(first_gdx_path)) { + mygdx <- gdxtools::gdx(first_gdx_path) + if ("conf" %in% gdxtools::all_items(mygdx)$sets) { + conf_data <- data.table::data.table(mygdx["conf"]) + conf_data + } else { + NULL + } + } else { + NULL + } + }, error = function(e) NULL) + + if (!is.null(conf)) { + reg_id <- subset(conf, V1 == "regions")$V2 + if (length(reg_id) == 0) reg_id <- "default" + if (length(unique(subset(conf, V1 == "regions")$V2)) > 1) { + message("Note: Multiple regional aggregations detected in files") + } + } else { + reg_id <- "default" + } + } + + # Get regions from GDX files + regions <- .get_regions_from_gdx(filelist, results_dir) + + + # Create color palettes + palette <- get_region_palette(regions, reg_id) + palette_short <- palette + names(palette_short) <- witch_name_short(names(palette)) + + palette_long <- palette + names(palette_long) <- dplyr::recode( + names(palette), + !!!setNames( + paste0(witch_region_longnames[names(witch_region_longnames)], " (", names(witch_region_longnames), ")"), + names(witch_region_longnames) + ) + ) + + message(length(filelist), " scenarios and ", length(regions), " regions loaded") + + list( + regions = regions, + reg_id = reg_id, + palette = palette, + palette_short = palette_short, + palette_long = palette_long + ) +} + +#' Get regions from GDX files +#' +#' @param filelist GDX filenames +#' @param results_dir Results directory +#' @return Character vector of region names +#' @keywords internal +.get_regions_from_gdx <- function(filelist, results_dir) { + if (requireNamespace("gdxtools", quietly = TRUE) && + exists("batch_extract", where = asNamespace("gdxtools"), mode = "function")) { + n <- suppressWarnings(gdxtools::batch_extract( + "n", + files = file.path(results_dir, paste0(filelist, ".gdx")) + )) + if (!is.null(n$n)) { + return(unique(n$n$V1)) + } + } + + # Fallback: get from first file + tryCatch({ + first_gdx <- gdxtools::gdx(file.path(results_dir, paste0(filelist[1], ".gdx"))) + if ("n" %in% names(first_gdx$sets)) { + return(unique(first_gdx$sets$n$V1)) + } + }, error = function(e) {}) + + # Default + "World" +} + +#' Set global environment variables from session data +#' +#' This is a compatibility function for legacy code that expects global variables. +#' New code should use the session list directly. +#' +#' @param session_data List returned by .load_gdx_session() +#' @keywords internal +.set_global_session_vars <- function(session_data) { + assign("filelist", session_data$filelist, envir = .GlobalEnv) + assign("scenlist", session_data$scenlist, envir = .GlobalEnv) + assign("file_group_columns", session_data$file_group_columns, envir = .GlobalEnv) + assign("reg_id", session_data$reg_id, envir = .GlobalEnv) + assign("witch_regions", session_data$regions, envir = .GlobalEnv) + assign("display_regions", session_data$regions, envir = .GlobalEnv) + assign("region_palette", session_data$region_palette, envir = .GlobalEnv) + assign("region_palette_specific_short", session_data$region_palette_short, envir = .GlobalEnv) + assign("region_palette_longnames", session_data$region_palette_long, envir = .GlobalEnv) + assign("stochastic_files", session_data$stochastic_files, envir = .GlobalEnv) + assign("all_var_descriptions", session_data$var_descriptions, envir = .GlobalEnv) + invisible(NULL) +} diff --git a/R/get_iiasadb.R b/R/get_iiasadb.R index 38b3d36..31ba659 100644 --- a/R/get_iiasadb.R +++ b/R/get_iiasadb.R @@ -3,23 +3,33 @@ #require(reticulate) #require(yaml) -#Function to get IIASAdb variable -get_iiasadb <- function(database="ar6-public", varlist="Emissions|CO2", varname=NULL, modlist="*", scenlist="*", region="World", show_variables = F, add_metadata = T){ +#Function to download data from IIASA database +download_iiasadb <- function(database="iamc15", varlist="Emissions|CO2", varname=NULL, modlist="*", scenlist="*", region="World", show_variables = F, add_metadata = T){ require(reticulate) pyam <- import("pyam", convert = FALSE) - #if private databases provide credentials - if(file.exists("iiasa_credentials.yml")){ - require(yaml) - iiasa_credentials <- yaml::read_yaml("iiasa_credentials.yml") # two lines with username: and password: - pyam$iiasa$set_config(iiasa_credentials$username, iiasa_credentials$password) + + # --- IIASA Authentication Check ------------------------------------------- + + # Check if IIASA login configuration exists + has_iiasa_login <- dir.exists("~/.config/ixmp4") || dir.exists("~/.local/share/ixmp4") + + if (!has_iiasa_login) { + warning( + "No IIASA login detected. You can still access *public* databases.\n\n", + "If you need to access *private* IIASA databases, please run the following command\n", + "in your system console (not in R):\n\n", + " ixmp4 login \n\n", + "You will be prompted for your password. After that, restart R and rerun this script.\n" + ) } + #show variables in case if(show_variables) print(py_to_r(pyam$iiasa$Connection(database)$variables())) assign("iiasadb_variables_available", as.data.frame(py_to_r(pyam$iiasa$Connection(database)$variables())), envir = .GlobalEnv) - print("Available_Connections:"); print(py_to_r(pyam$iiasa$Connection(database)$valid_connections)) + print("Available Platforms:"); print(py_to_r(pyam$iiasa$platforms())) iiasadb_data <- pyam$read_iiasa(database, model=modlist, scenario=scenlist, variable=varlist, region=region, meta=1) #If AR6, also add meta categories and other meta data - if(database == "ar6_public" & add_metadata){ + if(database == "ar6-public" & add_metadata){ #as_pandas concatenates data and meta into a pandas DF (meta_cols = TRUE adds all meta data) iiasadb_df <- iiasadb_data$as_pandas(meta_cols = c("Ssp_family", "Policy_category", "Policy_category_name", "Category", "IMP_marker")) #pandas to R data frame @@ -39,3 +49,68 @@ get_iiasadb <- function(database="ar6-public", varlist="Emissions|CO2", varname= if(!is.null(varname)) iiasadb_df <- iiasadb_df %>% mutate(variable = dplyr::recode(variable, !!!setNames(varname, varlist))) return(iiasadb_df) } + +#' Load IIASADB Variable from Files +#' +#' Similar to get_witch(), loads IAMC format data from CSV/XLSX files and returns a dataframe +#' in standard WITCH format (n, year, value, file). +#' Data should be loaded at startup using run_iiasadb(launch=FALSE). +#' +#' @param variable_name Name of the IAMC variable to load (e.g., "Population", "GDP|PPP") +#' @param scenplot Vector of scenarios to include (default: all loaded scenarios) +#' @param add_historical Whether to add historical data (default: from global add_historical setting) +#' +#' @return Data frame with columns: n, year, value, file, MODEL, SCENARIO, VARIABLE, UNIT +#' @export +get_iiasadb <- function(variable_name, scenplot = NULL, add_historical = NULL) { + # Check if IIASADB data is loaded + if(!exists("iiasadb_data", envir = .GlobalEnv)) { + stop("IIASADB data not loaded. Please run run_iiasadb(launch=FALSE) first to load data.") + } + + # Get add_historical setting + if(is.null(add_historical)) { + add_historical <- if(exists("add_historical", envir = .GlobalEnv)) get("add_historical", envir = .GlobalEnv) else getOption("add_historical", TRUE) + } + + # Get data from global environment + all_data <- get("iiasadb_data", envir = .GlobalEnv) + + # Filter by variable name (exact match, case-insensitive) + variable_data <- all_data %>% + filter(toupper(VARIABLE) == toupper(variable_name)) + + if(nrow(variable_data) == 0) { + warning(sprintf("Variable '%s' not found in loaded data", variable_name)) + return(data.frame()) + } + + # Filter by scenarios if specified (before format conversion) + if(!is.null(scenplot)) { + variable_data <- variable_data %>% filter(SCENARIO %in% scenplot) + } + + # Convert from IAMC format to standard WITCH format + # IAMC: MODEL, SCENARIO, REGION, VARIABLE, UNIT, YEAR, value + # WITCH: n, year, value, file, (keep MODEL, SCENARIO, VARIABLE, UNIT for reference) + variable_data <- variable_data %>% + rename(n = REGION, year = YEAR) %>% + mutate( + n = tolower(n), # Ensure lowercase for consistency + file = paste(MODEL, SCENARIO, sep = "_") # Combine MODEL and SCENARIO + ) %>% + select(n, year, value, file, MODEL, SCENARIO, VARIABLE, UNIT, everything()) + + # Add historical data if requested + if(add_historical) { + variable_data <- add_historical_values(variable_data, + varname = variable_name, + verbose = FALSE, + iiasadb = TRUE) + } + + # Ensure result is a standard data.frame (not tibble or data.table) + variable_data <- as.data.frame(variable_data) + + return(variable_data) +} diff --git a/R/get_witch.R b/R/get_witch.R index a57493f..e03f3b1 100644 --- a/R/get_witch.R +++ b/R/get_witch.R @@ -1,39 +1,50 @@ -# Load GDX of all scenarios and basic pre-processing -get_witch <- function(variable_name, - scenplot = scenlist, - check_calibration = TRUE, - field = "l", - postprocesssuffix = NULL, - skip_restrict_regions = FALSE){ - for (current_pathdir in fullpathdir){ +# Load GDX of all scenarios and basic pre-processing +get_witch <- function(variable_name, + scenplot = scenlist, + field = "l", + postprocesssuffix = NULL, + skip_restrict_regions = FALSE, + add_historical = TRUE){ + + for (current_pathdir in results_dir){ for (file in filelist){ if(file.exists(file.path(current_pathdir, paste0(file,".gdx")))){ - mygdx <- gdx(file.path(current_pathdir, paste0(file,".gdx"))) + mygdx <- gdxtools::gdx(file.path(current_pathdir, paste0(file,".gdx"))) if(!is.null(postprocesssuffix)) { - mygdx <- gdx(file.path(current_pathdir, postprocesssuffix, + mygdx <- gdxtools::gdx(file.path(current_pathdir, postprocesssuffix, paste0(paste0(file, "_", postprocesssuffix), ".gdx")))} - if(is.element(variable_name, all_items(mygdx)$variables) | - is.element(variable_name, all_items(mygdx)$parameters) | - is.element(variable_name, all_items(mygdx)$sets) | - is.element(variable_name, all_items(mygdx)$variables) | - is.element(variable_name, all_items(mygdx)$equations)){ - tempdata <- data.table(mygdx[variable_name, field = field]) - if(is.element(variable_name, all_items(mygdx)$equations)) names(tempdata)[1:2] <- c("t", "n") + if(is.element(variable_name, gdxtools::all_items(mygdx)$variables) | + is.element(variable_name, gdxtools::all_items(mygdx)$parameters) | + is.element(variable_name, gdxtools::all_items(mygdx)$sets) | + is.element(variable_name, gdxtools::all_items(mygdx)$variables) | + is.element(variable_name, gdxtools::all_items(mygdx)$equations)){ + tempdata <- data.table::data.table(mygdx[variable_name, field = field]) + if(is.element(variable_name, gdxtools::all_items(mygdx)$equations)) names(tempdata)[1:2] <- c("t", "n") if(variable_name %in% c("E", "EIND", "MIU", "ABATEDEMI", "ABATECOST") & !("ghg" %in% names(tempdata))) tempdata$ghg <- "co2" if(!("n" %in% names(tempdata))) tempdata$n <- "World" tempdata$file <- as.character(file) - #add time step - if("t" %in% names(tempdata)){ - if(flexible_timestep) if("tlen" %in% all_items(mygdx)$parameters) tempdata <- tempdata %>% left_join(mygdx["tlen"] %>% rename(tlen=value)) else tempdata$tlen = tstep + #add time step for all time-indexed variables (not just those without extra dimensions) + if("t" %in% names(tempdata) && !is.element(variable_name, gdxtools::all_items(mygdx)$sets)){ + # Always try to add tlen from the GDX file; fallback to tstep if not available + if("tlen" %in% gdxtools::all_items(mygdx)$parameters) { + tempdata <- tempdata %>% dplyr::left_join(mygdx["tlen"] %>% dplyr::rename(tlen=value), by = "t") + } else { + tempdata$tlen <- tstep + } + # Also try to add year parameter from GDX file if available + if("year" %in% gdxtools::all_items(mygdx)$parameters) { + tempdata <- tempdata %>% dplyr::left_join(mygdx["year"] %>% dplyr::rename(year=value), by = "t") + } } - if(length(fullpathdir)>=1){ + if(length(results_dir)>=1){ tempdata$pathdir <- basename(current_pathdir) } if(!exists("allfilesdata")) { allfilesdata <- tempdata } else { - allfilesdata <- rbind(allfilesdata,tempdata) + # Use rbindlist with fill=TRUE to handle different columns (e.g., model vs historical data) + allfilesdata <- data.table::rbindlist(list(allfilesdata, tempdata), fill=TRUE) } remove(tempdata) } @@ -76,14 +87,20 @@ get_witch <- function(variable_name, }} #try adding historical values - if(historical & !(is.element(variable_name, all_items(mygdx)$sets))) { - allfilesdata <- add_historical_values(allfilesdata, - varname = variable_name, - scenplot = scenplot, - check_calibration = check_calibration, - verbose = FALSE)} + if(add_historical) { + # Check if variable_name is a set (sets should not get historical data) + is_set <- FALSE + if(exists("mygdx")) { + is_set <- is.element(variable_name, gdxtools::all_items(mygdx)$sets) + } + if(!is_set) { + allfilesdata <- add_historical_values(allfilesdata, + varname = variable_name, + verbose = FALSE) + } + } # also save as data.table - allfilesdata <- as.data.table(allfilesdata) + allfilesdata <- data.table::as.data.table(allfilesdata) #in case nice_region_names exist map region names for those with a nice name if(exists("nice_region_names") & !unique(allfilesdata$n)[1]=="World") allfilesdata <- allfilesdata %>% mutate(n = dplyr::recode(n, !!!nice_region_names)) #in case restrict_regions exists keep only these regions diff --git a/R/impact_plots.R b/R/impact_plots.R index 6447584..2cc8b06 100644 --- a/R/impact_plots.R +++ b/R/impact_plots.R @@ -4,18 +4,18 @@ SCC_plot <- function(scenplot=scenlist, regions = "World", normalization_region gdp_measure <- "y" #"cc" #for consumption or "y" for GDP emi_sum <- "ghg" #or "co2" for only CO2 or ghg for all gases #Impacts and Damages computation - OMEGA <- get_witch("OMEGA", check_calibration = T) - Q <- get_witch("Q", check_calibration = T) + OMEGA <- get_witch("OMEGA") + Q <- get_witch("Q") Q <- Q %>% filter(iq == gdp_measure) %>% select(-iq) - l <- get_witch("l", check_calibration = T) - Q_EMI <- get_witch("Q_EMI", check_calibration = T) # nolint + l <- get_witch("l") + Q_EMI <- get_witch("Q_EMI") # nolint ghg <- get_witch("ghg") # to get GHGs for non-co2 sets if(emi_sum=="ghg") ghg_used <- unique(ghg$e) else if(emi_sum=="co2") ghg_used = c("co2") Q_EMI <- Q_EMI %>% filter(e %in% ghg_used) %>% group_by(pathdir, file, n, t) %>% summarize(emiall = sum(value)) #get also BAU values - BAU_Q <- get_witch("BAU_Q", check_calibration = T) + BAU_Q <- get_witch("BAU_Q") BAU_Q <- BAU_Q %>% filter(iq == gdp_measure) %>% select(-iq) - BAU_Q_EMI <- get_witch("BAU_Q_EMI", check_calibration = T) + BAU_Q_EMI <- get_witch("BAU_Q_EMI") BAU_Q_EMI <- BAU_Q_EMI %>% filter(e %in% ghg_used) %>% group_by(pathdir, file, n, t) %>% summarize(emiall = sum(value)) impact <- Q %>% rename(gdp=value) impact <- merge(impact, BAU_Q, by = c("pathdir", "file", "n", "t")); setnames(impact, "value", "gdp_bau") @@ -23,10 +23,10 @@ SCC_plot <- function(scenplot=scenlist, regions = "World", normalization_region impact <- merge(impact, BAU_Q_EMI, by = c("pathdir", "file", "n", "t")); setnames(impact, "emiall", "emi_bau") impact <- merge(impact, l, by = c("pathdir", "file", "n", "t")); setnames(impact, "value", "pop") - TEMP <- get_witch("TEMP", check_calibration = T) + TEMP <- get_witch("TEMP") impact <- merge(impact, TEMP %>% filter(m=="atm") %>% select(-m), by = c("pathdir", "file", "n", "t")); setnames(impact, "value", "temp") #add external climate modules in case - MAGICCTEMP <- get_witch("MAGICCTEMP", check_calibration = T) + MAGICCTEMP <- get_witch("MAGICCTEMP") if(length(MAGICCTEMP)>0) {impact <- merge(impact, MAGICCTEMP %>% filter(m=="atm") %>% select(-m), by = c("pathdir", "file", "n", "t"), all.x = T); setnames(impact, "value", "temp_magicc6")} @@ -39,8 +39,8 @@ SCC_plot <- function(scenplot=scenlist, regions = "World", normalization_region #Temperature #temp_plot <- plot_witch(impact, varname = "temp", scenplot = scenplot_nopulse, regions = "World", ylab = "Temperature increase [deg C]", conv_factor=1, nagg="mean") - temp_plot <- ggplot() + geom_line(data = impact %>% filter(file %in% scenplot_nopulse & ttoyear(t) <= yearmax & ttoyear(t) >= yearmin) %>% group_by(pathdir, file, t) %>% summarise_at(., .vars=vars(str_subset(names(impact), "temp")), funs(mean)), aes(ttoyear(t),temp,colour=file), stat="identity", size=1.5, linetype = "solid") + xlab("") + ylab("Temperature [deg C]") - if(length(MAGICCTEMP)>0){temp_plot <- temp_plot + geom_line(data = impact %>% filter(file %in% scenplot_nopulse & ttoyear(t) <= yearmax & ttoyear(t) >= yearmin & !is.na(temp_magicc6)) %>% group_by(pathdir, file, t) %>% summarise_at(., .vars=vars(str_subset(names(impact), "temp_magicc6")), funs(mean)), aes(ttoyear(t),temp_magicc6,colour=file), stat="identity", size=1.5, linetype = "dashed") + ylab("Temp., MAGICC dashed")} + temp_plot <- ggplot() + geom_line(data = impact %>% filter(file %in% scenplot_nopulse & ttoyear(t) <= yearmax & ttoyear(t) >= yearmin) %>% group_by(pathdir, file, t) %>% summarise_at(., .vars=vars(str_subset(names(impact), "temp")), funs(mean)), aes(ttoyear(t),temp,colour=file), stat="identity", linewidth=1.5, linetype = "solid") + xlab("") + ylab("Temperature [deg C]") + if(length(MAGICCTEMP)>0){temp_plot <- temp_plot + geom_line(data = impact %>% filter(file %in% scenplot_nopulse & ttoyear(t) <= yearmax & ttoyear(t) >= yearmin & !is.na(temp_magicc6)) %>% group_by(pathdir, file, t) %>% summarise_at(., .vars=vars(str_subset(names(impact), "temp_magicc6")), funs(mean)), aes(ttoyear(t),temp_magicc6,colour=file), stat="identity", linewidth=1.5, linetype = "dashed") + ylab("Temp., MAGICC dashed")} diff --git a/R/inequality_plots.R b/R/inequality_plots.R index 8393d59..a4d0745 100644 --- a/R/inequality_plots.R +++ b/R/inequality_plots.R @@ -172,9 +172,19 @@ compute_global_inequality <- function(Y_DIST="Y_DIST", Y="Y", l="l", scenplot=sc inequality_dataset_model <- get_witch(Y_DIST) %>% full_join(get_witch(Y, scenplot = scenplot) %>% dplyr::rename(Y=value)) %>% full_join(get_witch(l, scenplot = scenplot) %>% dplyr::rename(pop=value)) %>% mutate(year=ttoyear(t), gdppcppp=Y*1e6/pop, value=value/Y) #now get historical data on ed57 aggregation - data_hist_quantiles <- gdx(file.path(witch_folder, paste0("data_", reg_id), "data_historical_values_inequality_converted.gdx")) + # Try to load from results_dir first + hist_quantiles_file <- file.path(results_dir, "data_historical_values_inequality_converted.gdx") + hist_values_file <- file.path(results_dir, "data_historical_values.gdx") + + if(!file.exists(hist_quantiles_file) || !file.exists(hist_values_file)) { + warning(sprintf("Inequality historical data files not found in results_dir.\nPlease place the following files in '%s':\n - data_historical_values_inequality_converted.gdx\n - data_historical_values.gdx\nSkipping historical inequality data.", results_dir)) + # Return only model data without historical + return(inequality_dataset_model) + } + + data_hist_quantiles <- gdx(hist_quantiles_file) data_hist_quantiles <- data_hist_quantiles["quantiles"] %>% mutate(year=as.numeric(year)) - data_historical <- gdx(file.path(witch_folder, paste0("data_", reg_id), "data_historical_values.gdx")) + data_historical <- gdx(hist_values_file) gdp_historical <- data_historical["ykali_valid_wdi"] %>% dplyr::rename(Y=value) %>% full_join(data_historical["l_valid_wdi"] %>% dplyr::rename(pop=value)) %>% full_join(data_historical["mer2ppp_valid_wdi"] %>% dplyr::rename(mer2ppp=value)) #Extrapolate fix PPP2MER rates gdp_historical <- gdp_historical %>% group_by(n) %>% tidyr::fill(mer2ppp, mer2ppp, .direction = "updown") diff --git a/R/map_functions.R b/R/map_functions.R index 7d63cda..8dbadbd 100644 --- a/R/map_functions.R +++ b/R/map_functions.R @@ -29,19 +29,23 @@ witchmap <- function(variable_report, file_report=scenlist[1], t_report=20, scal #now get WITCH regions conf <- get_witch("conf", scenplot = file_report) - reg_id_map <- subset(conf, file==scenlist[1] & pathdir==basename(fullpathdir[1]) & V1=="regions")$V2 - mod.countries.filename = file.path(witch_folder, paste0("data_", reg_id_map, "/regions.inc")) - # Read mod_countries - mod.countries = readLines(mod.countries.filename) - mod.countries = mod.countries[mod.countries!=""] # Remove empty lines - mod.countries = mod.countries[!str_detect(mod.countries,"^\\*")] # Remove * comments - mod.countries = str_trim(str_split_fixed(mod.countries,"#",2)[,1]) # Remove # comments - set.begin = grep("map_n_iso3(n,iso3)*",tolower(mod.countries))[1] - set.end = set.begin + grep(";",mod.countries[set.begin:length(mod.countries)])[1] - mod.countries = mod.countries[(set.begin+1):(set.end-2)] # Keep mapping data - mod.countries = str_split(mod.countries,"\\.") - mod.countries <- data.table(matrix(unlist(mod.countries), ncol=2, byrow=T)) - setnames(mod.countries,c("n","ISO3")) + reg_id_map <- subset(conf, file==scenlist[1] & pathdir==basename(results_dir[1]) & V1=="regions")$V2 + + # Get region mapping from witchtools + if(!requireNamespace("witchtools", quietly = TRUE)){ + warning("Cannot create map: witchtools package is not available. Please install it from GitHub: remotes::install_github('witch-team/witchtools')") + return(invisible(NULL)) + } + + if(!(reg_id_map %in% names(witchtools::region_mappings))){ + warning(sprintf("Cannot create map: Region mapping '%s' not found in witchtools::region_mappings.\nAvailable mappings: %s", + reg_id_map, paste(names(witchtools::region_mappings), collapse=", "))) + return(invisible(NULL)) + } + + # Get the mapping from witchtools and convert to the format expected by the map code + mod.countries <- as.data.table(witchtools::region_mappings[[reg_id_map]]) + setnames(mod.countries, c("iso3", reg_id_map), c("ISO3", "n")) # create mod.countries to map regions #add for displaying center mod.countries$center <- (mod.countries$ISO3%in%c("USA","BRA","CAN","AUS","NER","SAU","FRA","POL","RUS","AFG","IND","CHN","IDN")) @@ -190,23 +194,30 @@ map_simple <- function(data, yearmap=2100, title="", scenplot=scenlist, legend_t sf::sf_use_s2(FALSE) #to avoid errors world <- ne_countries(scale = "medium", returnclass = "sf") #add geometry - world <- suppressWarnings(cbind(world, st_coordinates(st_centroid(world$geometry)))) - #get model iso3 mapping - mod.countries = readLines(file.path(witch_folder, paste0("data_", reg_id, "/regions.inc"))) - mod.countries = mod.countries[mod.countries != ""] # Remove empty lines - mod.countries = mod.countries[!str_detect(mod.countries, "^\\*")] # Remove * comments - mod.countries = str_trim(str_split_fixed(mod.countries, "#", 2)[, 1]) # Remove # comments - mod.countries = mod.countries[(grep("map_n_iso3(n,iso3)*", tolower(mod.countries))[1] + 1):(grep("map_n_iso3(n,iso3)*", tolower(mod.countries))[1] + grep(";", mod.countries[grep("map_n_iso3(n,iso3)*", tolower(mod.countries))[1]:length(mod.countries)])[1] -2)] # Keep mapping data - mod.countries = str_split(mod.countries, "\\.") - mod.countries <- data.table(matrix(unlist(mod.countries), ncol = 2, byrow = T)) - setnames(mod.countries, c("n", "iso_a3")) + world <- suppressWarnings(cbind(world, sf::st_coordinates(sf::st_centroid(world$geometry)))) + + # Get region mapping from witchtools + if(!requireNamespace("witchtools", quietly = TRUE)){ + warning("Cannot create map: witchtools package is not available. Please install it from GitHub: remotes::install_github('witch-team/witchtools')") + return(invisible(NULL)) + } + + if(!(reg_id %in% names(witchtools::region_mappings))){ + warning(sprintf("Cannot create map: Region mapping '%s' not found in witchtools::region_mappings.\nAvailable mappings: %s", + reg_id, paste(names(witchtools::region_mappings), collapse=", "))) + return(invisible(NULL)) + } + + # Get the mapping from witchtools and convert to the format expected by the map code + mod.countries <- as.data.table(witchtools::region_mappings[[reg_id]]) + setnames(mod.countries, c("iso3", reg_id), c("iso_a3", "n")) #Add data to iso3 list data <- data %>% filter(t == yeartot(yearmap) & file %in% scenplot) - data <- data %>% full_join(mod.countries, relationship = "many-to-many") + data <- suppressMessages(data %>% full_join(mod.countries, by = "n", relationship = "many-to-many")) #Add data to world polygon - data_map <- data %>% select(t, file, pathdir, value, iso_a3) %>% full_join(world, relationship = "many-to-many") %>% filter(!is.na(value) & !is.na(iso_a3) & !is.na(file)) %>% as.data.frame() + data_map <- suppressMessages(data %>% select(t, file, pathdir, value, iso_a3) %>% full_join(world, by = "iso_a3", relationship = "many-to-many") %>% filter(!is.na(value) & !is.na(iso_a3) & !is.na(file)) %>% as.data.frame()) p_map <- ggplot(data = data_map) + geom_sf(aes(fill = value, geometry = geometry)) + scale_fill_viridis_c(option = "plasma", direction = -1) + ggtitle(title) + labs(fill = legend_title) + theme_bw() + theme(strip.background = element_rect(fill = "white")) - if(length(scenplot)>1) p_map <- p_map + facet_wrap(file ~ .) + if(length(scenplot)>1) p_map <- p_map + facet_wrap(file ~ ., ncol = 1) #remove Antarctica p_map <- p_map + coord_sf(ylim = c(-50, 90)) saveplot("Map", width = 12, height = 10) @@ -220,16 +231,23 @@ plot_map_region_definition <- function(regional_focus="World") { #regional_focus = "Europe" or "World" world <- ne_countries(scale = "medium", returnclass = "sf") #add geometry - world <- suppressWarnings(cbind(world, st_coordinates(st_centroid(world$geometry)))) - #get model iso3 mapping - mod.countries = readLines(file.path(witch_folder, paste0("data_", reg_id, "/regions.inc"))) - mod.countries = mod.countries[mod.countries != ""] # Remove empty lines - mod.countries = mod.countries[!str_detect(mod.countries, "^\\*")] # Remove * comments - mod.countries = str_trim(str_split_fixed(mod.countries, "#", 2)[, 1]) # Remove # comments - mod.countries = mod.countries[(grep("map_n_iso3(n,iso3)*", tolower(mod.countries))[1] + 1):(grep("map_n_iso3(n,iso3)*", tolower(mod.countries))[1] + grep(";", mod.countries[grep("map_n_iso3(n,iso3)*", tolower(mod.countries))[1]:length(mod.countries)])[1] -2)] # Keep mapping data - mod.countries = str_split(mod.countries, "\\.") - mod.countries <- data.table(matrix(unlist(mod.countries), ncol = 2, byrow = T)) - setnames(mod.countries, c("n", "iso_a3")) + world <- suppressWarnings(cbind(world, sf::st_coordinates(sf::st_centroid(world$geometry)))) + + # Get region mapping from witchtools + if(!requireNamespace("witchtools", quietly = TRUE)){ + warning("Cannot create map: witchtools package is not available. Please install it from GitHub: remotes::install_github('witch-team/witchtools')") + return(invisible(NULL)) + } + + if(!(reg_id %in% names(witchtools::region_mappings))){ + warning(sprintf("Cannot create map: Region mapping '%s' not found in witchtools::region_mappings.\nAvailable mappings: %s", + reg_id, paste(names(witchtools::region_mappings), collapse=", "))) + return(invisible(NULL)) + } + + # Get the mapping from witchtools and convert to the format expected by the map code + mod.countries <- as.data.table(witchtools::region_mappings[[reg_id]]) + setnames(mod.countries, c("iso3", reg_id), c("iso_a3", "n")) p_map <- ggplot(data = mod.countries %>% full_join(world) %>% filter(!is.na(n) & !is.na(iso_a3))) + geom_sf(aes(fill = n, geometry = geometry)) + scale_fill_manual(values = region_palette) + xlab("") + ylab("") + ggtitle(str_glue("Regional aggregation: {reg_id}")) + theme_bw() + theme(strip.background = element_rect(fill = "white"), legend.position="bottom") + guides(fill = guide_legend(nrow = 3)) if(regional_focus=="Europe") p_map <- p_map + coord_sf(xlim = c(-10,33), ylim = c(36,73), expand = FALSE) saveplot(str_glue("region_definition_{reg_id}"), width = 12, height = 8, add_title = F) @@ -245,19 +263,22 @@ tatm <- subset(tatm_data, file %in% scenplot & ttoyear(t)==yearplot) df <- read_parquet(file.path(pathadj, "data/coefficients_modmean.parquet")) df <- tatm %>% cross_join(df) df <- df %>% mutate(TATM = gmt * value) +# Get world map data for borders +world_map <- map_data("world") ggplot() + geom_tile(data = df, aes(x = lon, y = lat, fill = TATM)) + + geom_polygon(data = world_map, aes(x = long, y = lat, group = group), + colour = "black", fill = NA, linewidth = 0.25) + ggtitle(paste("Downscaled temperature change in", yearplot)) + scale_fill_steps( low = "white", # mid = "white", high = "red", - # midpoint = 1, + # midpoint = 1, n.break = 7 ) + - borders("world", colour = "black", size = .25) + theme_minimal() + - facet_wrap(file ~ .) + + facet_wrap(file ~ ., ncol = 1) + labs(x="", y="", fill="°C") } diff --git a/R/plotly.R b/R/plotly.R deleted file mode 100644 index 9da8959..0000000 --- a/R/plotly.R +++ /dev/null @@ -1,36 +0,0 @@ -#Plotly graphs -library(plotly) - - - -#Create plotly Panel -# Create a shareable link to your chart -Sys.setenv("plotly_username"="johannes.emm") -Sys.setenv("plotly_api_key"="Gn54gpjB6p4p5wJEO9O7") -#plotly() - - - - - -#get dynamic ggplotly plots -ggplotly() #just execute after each plot - - - - - - - - - - - - -p <- ggplot2::last_plot() -p_plotly <- ggplotly(p) -# Set up API credentials: https://plot.ly/r/getting-started -plot_for_web = api_create(p_plotly, filename="plotly_test", fileopt = "overwrite") -plot_for_web - - diff --git a/R/policy_cost.R b/R/policy_cost.R index 47d1fb2..bc09da6 100644 --- a/R/policy_cost.R +++ b/R/policy_cost.R @@ -15,12 +15,12 @@ Policy_Cost <- function(discount_rate=5, tmin=4, tmax=20, bauscen="ssp2_bau", re GDP$GDP_Loss_discounted = (GDP$"GDP Loss")*(1+discount_rate/100)^(-(5*(GDP$t-3))) GDP$GDP_MER_discounted = (GDP$"value")*(1+discount_rate/100)^(-(5*(GDP$t-3))) GDP <- subset(GDP, file %in% scenplot) - GDP_WORLD <- GDP %>% select(-n) %>% group_by_at(c("t", file_group_columns, "pathdir")) %>% summarize_all(.funs = sum, na.rm=T) %>% mutate(n="World") %>% as.data.frame() + GDP_WORLD <- GDP %>% select(-n) %>% group_by_at(c("t", file_group_columns, "pathdir")) %>% summarize(across(where(is.numeric) & !matches("tlen"), ~sum(.x, na.rm=T)), across(matches("tlen"), ~first(.x))) %>% mutate(n="World") %>% as.data.frame() GDP <- rbind(GDP, GDP_WORLD) #PC over time plot (NOT discounted!) PC_annual_relative <- subset(GDP, t<=tmax&t>=tmin); PC_annual_relative$rel_cost <- PC_annual_relative$"GDP Loss"/PC_annual_relative$"bau"; p <- ggplot(subset(PC_annual_relative, n %in% regions & file!=bauscen)) + geom_line(aes(ttoyear(t), rel_cost*100, color=file), show.legend = TRUE) +ylab(paste("% of", measure)) + xlab("") + theme(legend.position="bottom",legend.direction="horizontal") + guides(fill=guide_legend(title=NULL, nrow = 1)) - if(length(fullpathdir) > 1){p <- p + facet_grid(. ~ pathdir)} + if(length(results_dir) > 1){p <- p + facet_grid(. ~ pathdir)} if(regions[1] != "World" & (length(regions)>1)){p <- p + facet_grid(. ~ n)} #now compute also discounted NPV value Policy_Cost <- GDP %>% filter(t<=tmax & t>=tmin) %>% group_by_at(c("n", file_group_columns, "pathdir")) %>% summarize(GDP_Loss_discounted=sum(GDP_Loss_discounted), GDP_MER_discounted=sum(GDP_MER_discounted)) @@ -29,7 +29,7 @@ Policy_Cost <- function(discount_rate=5, tmin=4, tmax=20, bauscen="ssp2_bau", re #Policy_Cost$PC <- pmax(Policy_Cost$PC, 0) assign("POLCOST", Policy_Cost, envir = .GlobalEnv) p <- ggplot(subset(Policy_Cost, n %in% regions & file!=bauscen)) + geom_bar(position=position_dodge(), stat="identity",aes(file, PC, fill=file), show.legend = TRUE) +ylab(paste("% of", measure, "(NPV)")) + xlab("") + theme(legend.position="bottom",legend.direction="horizontal") + guides(fill=guide_legend(title=NULL, nrow = 1)) - if(length(fullpathdir) > 1){p <- p + facet_grid(. ~ pathdir)} + if(length(results_dir) > 1){p <- p + facet_grid(. ~ pathdir)} if(regions[1] != "World"){p <- p + facet_grid(. ~ n)} if(show_numbers){p <- p + geom_text(data=subset(Policy_Cost, n %in% regions & file!=bauscen), aes(x=file, y=PC+0.1, label=paste0(round(PC, 1),"%")), size=3)} p <- p + theme(axis.ticks = element_blank(), axis.text.x = element_blank()) @@ -121,7 +121,7 @@ Policy_Cost_Decomposition <- function(discount_rate=5, tmin=4, tmax=20, bauscen= POLCOSTDECOMP <- POLCOSTDECOMP %>% filter(file %in% scenplot) #Line plot p <- ggplot(POLCOSTDECOMP %>% filter(n %in% regions & t>= tmin & t <= tmax)) + geom_line(aes(ttoyear(t), 100*(value-value_bau)/gdp_bau, color=file), show.legend = TRUE) +ylab(paste("Change in % of", measure)) + xlab("") + theme(legend.position="bottom",legend.direction="horizontal") + guides(fill=guide_legend(title=NULL, nrow = 1)) + facet_grid(variable ~ n) - if(length(fullpathdir) > 1){p <- p + facet_grid(variable ~ pathdir)} + if(length(results_dir) > 1){p <- p + facet_grid(variable ~ pathdir)} #now aggregate to NPV discounted values (PC) DAM_DECOMP_NPV <- POLCOSTDECOMP %>% mutate(diff=(value-value_bau), diff_disc=diff*(1+discount_rate/100)^(-(ttoyear(t)-ttoyear(tmin))), gdp_disc=gdp_bau*(1+discount_rate/100)^(-(ttoyear(t)-ttoyear(tmin)))) %>% filter(t >= tmin & t <= tmax) %>% group_by_at(c(file_group_columns, "pathdir", "n", "variable")) %>% summarize(NPV=sum(diff_disc)/sum(gdp_disc)) #keep only relevant data and good naming @@ -134,7 +134,7 @@ Policy_Cost_Decomposition <- function(discount_rate=5, tmin=4, tmax=20, bauscen= assign("DAM_DECOMP_NPV", DAM_DECOMP_NPV, envir = .GlobalEnv) #Bar chart p_bar <- ggplot(subset(DAM_DECOMP_NPV, n %in% regions & file!=bauscen & variable!="GDP")) + geom_bar(position=position_stack(), stat="identity",aes(file, NPV, fill=variable), show.legend = TRUE) +ylab(paste("% of", measure, "(NPV)")) + xlab("") + theme(legend.position="bottom",legend.direction="horizontal") + guides(fill=guide_legend(title=NULL, nrow = 1)) + facet_grid(. ~ n) + theme(axis.text.x=element_text(angle=90,hjust=1)) + scale_y_continuous(labels = scales::percent) + geom_point(data = subset(DAM_DECOMP_NPV, n %in% regions & file!=bauscen & variable=="GDP"), aes(file, NPV), color="black", shape=16) - if(length(fullpathdir) > 1){p_bar <- p_bar + facet_grid(. ~ pathdir)} + if(length(results_dir) > 1){p_bar <- p_bar + facet_grid(. ~ pathdir)} if(show_numbers){p_bar <- p_bar + geom_text(data = subset(DAM_DECOMP_NPV, n %in% regions & file!=bauscen & variable=="GDP"), aes(file, NPV*1.1, label=paste0(round(NPV*100, 1),"%")), size=3)} saveplot(paste0(measure, " loss decomposition")) } @@ -155,7 +155,7 @@ Carbon_Price <- function(scenplot=scenlist){ carbonprice <- subset(carbonprice, file %in% scenplot) #carbonprice$value <- carbonprice$value * usd_deflator #Apply deflator p <- ggplot(subset(carbonprice, t==20 & n=="usa")) + geom_bar(position=position_dodge(), stat="identity",aes(file, value*1e3/(44/12), fill=file), show.legend = TRUE) +ylab("$/tCO2") + theme(legend.position="bottom",legend.direction="horizontal")+ guides(fill=guide_legend(title=NULL, nrow = 1)) - if(length(fullpathdir) > 1){p <- p + facet_grid(. ~ pathdir)} + if(length(results_dir) > 1){p <- p + facet_grid(. ~ pathdir)} saveplot("Global Carbon Price 2100") } @@ -169,8 +169,8 @@ Social_Cost_of_Carbon <- function(regions=witch_regions, scenplot=scenlist){ SCC$SCC <- (-1) * (m_eqq_emi_tree$value / m_eqq_y$value) * 1000 / (44/12) SCC$value <- NULL; #SCC$pathdir <- NULL - p <- ggplot(subset(SCC, n %in% regions & ttoyear(t) <= yearmax & ttoyear(t)>=2015 & file %in% scenplot),aes(ttoyear(t),SCC,colour=file)) + geom_line(stat="identity", size=1.2) + xlab("year") +ylab("$/tCO2") + p <- ggplot(subset(SCC, n %in% regions & ttoyear(t) <= yearmax & ttoyear(t)>=2015 & file %in% scenplot),aes(ttoyear(t),SCC,colour=file)) + geom_line(stat="identity", linewidth=1.2) + xlab("year") +ylab("$/tCO2") if(length(regions)>1){p <- p + facet_grid(. ~ n, scales="free")} - if(length(fullpathdir)!=1){p <- p + facet_grid(pathdir ~ .)} + if(length(results_dir)!=1){p <- p + facet_grid(pathdir ~ .)} saveplot("Social Cost of Carbon") } diff --git a/R/region_palettes.R b/R/region_palettes.R new file mode 100644 index 0000000..13e280a --- /dev/null +++ b/R/region_palettes.R @@ -0,0 +1,122 @@ +## Region Color Palettes and Mappings +## This file contains region-specific color palettes and name mappings for WITCH models + +#' Get witch region color palette +#' +#' Returns a named vector of colors for WITCH regions +#' +#' @param regions Character vector of region names to include in palette +#' @param reg_id Regional aggregation identifier (e.g., "witch17", "ed57") +#' @return Named vector of hex colors +#' @export +get_region_palette <- function(regions, reg_id = NULL) { + # Base palette with specific colors for common regions + region_palette_specific <- setNames(rainbow(length(regions)), regions) + + # WITCH standard regions + region_palette_witch <- c( + usa = "darkblue", Usa = "darkblue", + oldeuro = "blue", neweuro = "cornflowerblue", + kosau = "darkgreen", Kosau = "darkgreen", + cajaz = "chartreuse4", Cajaz = "chartreuse4", + te = "gold2", Te = "gold2", + mena = "darkgoldenrod4", Mena = "darkgoldenrod4", + ssa = "goldenrod", Ssa = "goldenrod", + sasia = "darkorange2", "South Asia" = "darkorange2", + china = "deeppink3", PRC = "deeppink3", + easia = "orangered", ESEAP = "orangered", + laca = "#fbb714", Laca = "#fbb714", + india = "#fbf003", India = "#fbf003", + europe = "blue", Europe = "blue", + indonesia = "lightsalmon3", Indonesia = "lightsalmon3", + Rest_of_World = "grey48", + chinaw = "darkorange", chinac = "darkorange2", chinae = "darkorange4", + italy = "green", mexico = "slateblue2", brazil = "tomato4", + canada = "blueviolet", jpnkor = "darkseagreen", oceania = "forestgreen", + southafrica = "indianred3", seasia = "orangered", + World = "black", "Global Pool" = "black" + ) + + # RICE50+ ed57 regional aggregation + region_palette_ed57 <- c( + "arg" = "#000000", "aus" = "#48d1cc", "aut" = "#ae8000", "bel" = "#800000", + "bgr" = "#003366", "blt" = "#bf4040", "bra" = "#ffd633", "can" = "#6600cc", + "chl" = "#ffece6", "chn" = "#ff531a", "cor" = "#adebad", "cro" = "#808080", + "dnk" = "#ff9933", "egy" = "#0044cc", "esp" = "#ffd6cc", "fin" = "#00cccc", + "fra" = "#cc0000", "gbr" = "#ffffdd", "golf57" = "#33d6ff", "grc" = "#00ffcc", + "hun" = "#9999ff", "idn" = "#996633", "irl" = "#ff4dff", "ita" = "#ffff00", + "jpn" = "#006600", "meme" = "#b32d00", "mex" = "#ccff33", "mys" = "#145252", + "nde" = "#00d900", "nld" = "#c309bd", "noan" = "#ffff99", "noap" = "#ecf2f9", + "nor" = "#ff3399", "oeu" = "#ffb3ff", "osea" = "#008fb3", "pol" = "#d6f5d6", + "prt" = "#003300", "rcam" = "#4d1919", "rcz" = "#00ffff", "rfa" = "#deb887", + "ris" = "#000080", "rjan57" = "#bf00ff", "rom" = "#ff00ff", "rsaf" = "#ff8000", + "rsam" = "#0000ff", "rsas" = "#ccd6dd", "rsl" = "#00ff00", "rus" = "#66757f", + "slo" = "#ff3091", "sui" = "#61a62f", "swe" = "#cb1942", "tha" = "#efff14", + "tur" = "#4b0082", "ukr" = "#c198ff", "usa" = "#ffcc00", "vnm" = "#3377ff", + "zaf" = "#b3ccff" + ) + + # WITCH34 regional aggregation + region_palette_witch34 <- c( + "bnl" = "#800000", "northeu" = "#bf4040", "balkan" = "#808080", + "easteu" = "#9999ff", "che" = "#61a62f", "deu" = "#deb887", + "rou" = "#ff00ff", "cze" = "#00ffff", "japan" = "green", "korea" = "red" + ) + + # Combine palettes + region_palette <- replace(region_palette_specific, names(region_palette_witch), region_palette_witch) + region_palette <- replace(region_palette, names(region_palette_ed57), region_palette_ed57) + region_palette <- replace(region_palette, names(region_palette_witch34), region_palette_witch34) + + # Keep only palette for regions actually used + region_palette <- region_palette[regions] + + return(region_palette) +} + +#' Get WITCH region short names +#' +#' Converts WITCH region names to 3-letter ISO-like codes +#' +#' @param witch_name Character vector of WITCH region names +#' @return Character vector of shortened names +#' @export +witch_name_short <- function(witch_name) { + witch_name <- gsub("indonesia", "IDN", witch_name) + witch_name_shortened <- substr(toupper(witch_name), 1, 3) + witch_name_shortened <- gsub("MEN", "MEA", witch_name_shortened) + witch_name_shortened <- gsub("SOU", "ZAF", witch_name_shortened) + witch_name_shortened <- gsub("CHI", "CHN", witch_name_shortened) + witch_name_shortened <- gsub("TE", "TEC", witch_name_shortened) + return(witch_name_shortened) +} + +#' WITCH region long names mapping +#' +#' Named vector mapping region codes to full names +#' +#' @export +witch_region_longnames <- c( + "canada" = "Canada", + "jpnkor" = "Japan-Korea", + "oceania" = "Oceania", + "indonesia" = "Indonesia", + "southafrica" = "South Africa", + "brazil" = "Brazil", + "mexico" = "Mexico", + "china" = "China", + "india" = "India", + "te" = "Transition Economies", + "ssa" = "Sub-Saharan Africa", + "laca" = "Latin America-Caribbean", + "sasia" = "South Asia", + "seasia" = "South East Asia", + "mena" = "Middle East-North Africa", + "europe" = "Europe", + "usa" = "United States of America", + "easia" = "East Asia", + "kosau" = "South-Korea and Australia", + "cajaz" = "Canada Japan New-Zealand", + "neweuro" = "Eastern Europe", + "oldeuro" = "Western Europe" +) diff --git a/R/session_init.R b/R/session_init.R new file mode 100644 index 0000000..e00c6cd --- /dev/null +++ b/R/session_init.R @@ -0,0 +1,102 @@ +## Session Initialization +## This file contains the initialization function called by run_*() functions + +#' Initialize witchplot session +#' +#' Sets up the session environment with default options and validates paths. +#' Called internally by run_witch(), run_rice(), run_fidelio(), and run_iiasadb(). +#' +#' @return NULL (modifies global environment) +#' @keywords internal +.initialize_witchplot_session <- function() { + ## Set default theme ## + ggplot2::theme_set(ggplot2::theme_bw()) + + ## Set variables for combining old/new j technologies ## + varlist_combine_old_new_j <<- c("Q_EN", "K_EN", "I_EN", "Q_IN") + + # Set default time parameters + if(!exists("year0", envir=.GlobalEnv)) { + assign("year0", 2005, envir=.GlobalEnv) + assign("tstep", 5, envir=.GlobalEnv) + } + if(!exists("yearmin", envir=.GlobalEnv)) { + assign("yearmin", getOption("yearmin", 1980), envir=.GlobalEnv) + } + if(!exists("yearmax", envir=.GlobalEnv)) { + assign("yearmax", getOption("yearmax", 2100), envir=.GlobalEnv) + } + + ## Set up graphs directory ## + # Always use first results_dir for graphs + if(exists("results_dir", envir=.GlobalEnv) && !is.null(get("results_dir", envir=.GlobalEnv))) { + results_dir <- get("results_dir", envir=.GlobalEnv) + + # Always use first results_dir for graphs + graphdir_val <- file.path(results_dir[1], "graphs") + assign("graphdir", graphdir_val, envir=.GlobalEnv) + + # Validate that directory exists + if(any(!dir.exists(results_dir))) { + stop(sprintf("Results directory does not exist: '%s'\nPlease check the results_dir parameter.", + results_dir[!dir.exists(results_dir)][1])) + } + } else { + assign("graphdir", NULL, envir=.GlobalEnv) + } + + # Load GDX files if applicable (only for GDX-based models, not IIASADB) + if (exists("results_dir", envir = .GlobalEnv) && + !is.null(get("results_dir", envir = .GlobalEnv)) && + !exists("iamc_filename", envir = .GlobalEnv) && + !exists("iamc_databasename", envir = .GlobalEnv)) { + + results_dir <- get("results_dir", envir = .GlobalEnv) + + # Get parameters + restrict_files <- if (exists("restrict_files", envir = .GlobalEnv)) { + get("restrict_files", envir = .GlobalEnv) + } else { + "results_" + } + + exclude_files <- if (exists("exclude_files", envir = .GlobalEnv)) { + get("exclude_files", envir = .GlobalEnv) + } else { + "" + } + + removepattern <- if (exists("removepattern", envir = .GlobalEnv)) { + get("removepattern", envir = .GlobalEnv) + } else { + "results_" + } + + # Don't use global scenlist as custom - it's created by previous runs + # Only use scenlist_custom if explicitly passed via ... argument + scenlist_custom <- NULL + + reg_id <- if (exists("reg_id", envir = .GlobalEnv)) { + get("reg_id", envir = .GlobalEnv) + } else { + NULL + } + + # Load GDX session data using new clean approach + # Use only first directory for discovering files and metadata + # The actual loading from all directories happens in get_witch() + session_data <- .load_gdx_session( + results_dir = results_dir[1], + restrict_files = restrict_files, + exclude_files = exclude_files, + removepattern = removepattern, + scenlist_custom = scenlist_custom, + reg_id = reg_id + ) + + # Set global variables for backward compatibility with existing code + .set_global_session_vars(session_data) + } + + invisible(NULL) +} diff --git a/R/setup_gdx.R b/R/setup_gdx.R new file mode 100644 index 0000000..0203abe --- /dev/null +++ b/R/setup_gdx.R @@ -0,0 +1,68 @@ +#' Setup GDX Library for Reading GAMS Files +#' +#' Initializes the GAMS GDX library required for reading GDX files from WITCH/RICE models. +#' This function provides helpful diagnostics if GAMS is not installed or cannot be found. +#' +#' @param gams_path Optional path to GAMS installation directory. If NULL (default), +#' gdxtools will try to auto-detect GAMS installation. +#' +#' @return TRUE if GDX library was loaded successfully, FALSE otherwise +#' +#' @details +#' The gdxtools package requires the GAMS GDX library to read GDX files. +#' If you see errors about "GDX library not loaded", you need to: +#' +#' 1. Install GAMS from https://www.gams.com/download/ +#' 2. Run this function to initialize the library: \code{setup_gdx()} +#' +#' If GAMS is installed but not auto-detected, specify the path manually: +#' \code{setup_gdx("C:/GAMS/XX")} where XX is your GAMS version +#' +#' @examples +#' \dontrun{ +#' # Auto-detect GAMS installation +#' setup_gdx() +#' +#' # Manually specify GAMS path +#' setup_gdx("C:/GAMS/47") +#' } +#' +#' @export +setup_gdx <- function(gams_path = NULL) { + # Check if gdxtools is installed + if (!requireNamespace("gdxtools", quietly = TRUE)) { + message("✗ gdxtools package is not installed!") + message(" Install it with: install.packages('gdxtools') or devtools::install_github('lolow/gdxtools')") + return(FALSE) + } + + # Try to initialize GDX library + tryCatch({ + if (is.null(gams_path)) { + message("Attempting to auto-detect GAMS installation...") + result <- gdxtools::igdx() + } else { + message("Loading GDX library from: ", gams_path) + result <- gdxtools::igdx(gams_path) + } + + if (!is.null(result) && result) { + message("✓ GDX library loaded successfully!") + message(" You can now use run_witch() and run_rice()") + return(TRUE) + } else { + message("✗ GDX library could not be loaded") + message(" Please install GAMS from: https://www.gams.com/download/") + message(" After installing GAMS, run: setup_gdx()") + return(FALSE) + } + }, error = function(e) { + message("✗ Error loading GDX library: ", conditionMessage(e)) + message("\nTroubleshooting steps:") + message(" 1. Install GAMS from: https://www.gams.com/download/") + message(" 2. After installation, try: setup_gdx()") + message(" 3. If auto-detection fails, specify path: setup_gdx('C:/GAMS/XX')") + message("\nNote: IIASA database viewer (run_iiasadb) does not require GAMS") + return(FALSE) + }) +} diff --git a/R/shiny_modules.R b/R/shiny_modules.R new file mode 100644 index 0000000..2b15854 --- /dev/null +++ b/R/shiny_modules.R @@ -0,0 +1,231 @@ +extract_additional_sets <- function(afd, file_group_columns) { +additional_sets <- setdiff(names(afd), c(file_group_columns, "pathdir", "t", "n", "value", "tlen", "year")) +if(length(additional_sets)==0) { +list(additional_set_id="na", set_elements="na", additional_set_id2="na", set_elements2="na") +} else if(length(additional_sets)==1) { +# Get sorted elements (keep alphabetical order) +elements <- sort(unique(tolower(afd[[additional_sets[1]]]))) +list( +additional_set_id=additional_sets[1], +set_elements=elements, +additional_set_id2="na", +set_elements2="na" +) +} else { +# Get sorted elements for first set (keep alphabetical order) +elements <- sort(unique(tolower(afd[[additional_sets[1]]]))) +list( +additional_set_id=additional_sets[1], +set_elements=elements, +additional_set_id2=additional_sets[2], +set_elements2=sort(unique(tolower(afd[[additional_sets[2]]]))) +) +} +} +compute_regional_aggregates <- function(afd, variable, regions_to_aggregate=c("World", "EU")) { +if(nrow(afd)==0) return(afd) +for(region_name in regions_to_aggregate) { +if(region_name=="World") { +afd_agg <- afd +afd_agg$n <- NULL +} else if(region_name=="EU") { +eu <- get_witch("eu") +eu_regions <- if(!exists("eu")) c("europe") else unique(eu$n) +afd_agg <- dplyr::filter(afd, n %in% eu_regions) +afd_agg$n <- NULL +} else { +next +} +if(nrow(afd_agg)==0) next +# Check if tlen exists +has_tlen <- "tlen" %in% names(afd_agg) +if(variable %in% default_meta_param()$parameter) { +agg_type <- default_meta_param()[parameter==variable & type=="nagg"]$value +if(agg_type=="sum") { +if(has_tlen) { + afd_agg <- dplyr::group_by_at(afd_agg, setdiff(names(afd_agg), c("value", "tlen"))) %>% dplyr::summarize(value=sum(value), tlen=dplyr::first(tlen), .groups='drop') +} else { + afd_agg <- dplyr::group_by_at(afd_agg, setdiff(names(afd_agg), "value")) %>% dplyr::summarize(value=sum(value), .groups='drop') +} +} else if(agg_type=="mean") { +if(has_tlen) { + afd_agg <- dplyr::group_by_at(afd_agg, setdiff(names(afd_agg), c("value", "tlen"))) %>% dplyr::summarize(value=mean(value), tlen=dplyr::first(tlen), .groups='drop') +} else { + afd_agg <- dplyr::group_by_at(afd_agg, setdiff(names(afd_agg), "value")) %>% dplyr::summarize(value=mean(value), .groups='drop') +} +} else { +if(has_tlen) { + afd_agg <- dplyr::group_by_at(afd_agg, setdiff(names(afd_agg), c("value", "tlen"))) %>% dplyr::summarize(value=sum(value), tlen=dplyr::first(tlen), .groups='drop') +} else { + afd_agg <- dplyr::group_by_at(afd_agg, setdiff(names(afd_agg), "value")) %>% dplyr::summarize(value=sum(value), .groups='drop') +} +} +} else { +if(has_tlen) { + afd_agg <- dplyr::group_by_at(afd_agg, setdiff(names(afd_agg), c("value", "tlen"))) %>% dplyr::summarize(value=sum(value), tlen=dplyr::first(tlen), .groups='drop') +} else { + afd_agg <- dplyr::group_by_at(afd_agg, setdiff(names(afd_agg), "value")) %>% dplyr::summarize(value=sum(value), .groups='drop') +} +} +afd_agg <- dplyr::mutate(afd_agg, n=region_name) %>% as.data.frame() +afd <- rbind(afd, afd_agg[, names(afd)]) +} +afd +} +subset_by_additional_sets <- function(afd, additional_set_id, additional_set_selected, additional_set_id2=NULL, additional_set_selected2=NULL) { +if(additional_set_id!="na") { +afd[[additional_set_id]] <- tolower(afd[[additional_set_id]]) +afd <- subset(afd, get(additional_set_id) %in% additional_set_selected) +afd[[additional_set_id]] <- NULL +if(length(additional_set_selected)>1) { +afd <- dplyr::group_by_at(afd, setdiff(names(afd), "value")) %>% dplyr::summarize(value=sum(value), .groups='drop') +} +} +if(!is.null(additional_set_id2) && additional_set_id2!="na") { +afd[[additional_set_id2]] <- tolower(afd[[additional_set_id2]]) +afd <- subset(afd, get(additional_set_id2) %in% additional_set_selected2) +afd[[additional_set_id2]] <- NULL +if(length(additional_set_selected2)>1) { +afd <- dplyr::group_by_at(afd, setdiff(names(afd), "value")) %>% dplyr::summarize(value=sum(value), .groups='drop') +} +} +afd +} +apply_growth_rate <- function(afd, growth_rate_flag) { +if(!growth_rate_flag) return(afd) +# Expect year column to already exist (added in prepare_plot_data with proper tlen handling) +if(!"year" %in% names(afd)) { + # Fallback: add year if it doesn't exist + if("tlen" %in% names(afd)) { + afd$year <- ttoyear(afd$t, afd$tlen) + } else { + afd$year <- ttoyear(afd$t) + } +} +dplyr::group_by_at(afd, setdiff(names(afd), c("t", "value", "year"))) %>% +dplyr::arrange(t) %>% +dplyr::mutate(growthrate=((value/dplyr::lag(value))^(1/(year-dplyr::lag(year)))-1)*100) %>% +dplyr::select(-value) %>% +dplyr::rename(value=growthrate) %>% +dplyr::mutate(value=ifelse(is.na(value), 0, value)) %>% +dplyr::ungroup() +} +prepare_plot_data <- function(variable, field_show, yearlim, scenarios, additional_set_id, additional_set_selected, additional_set_id2=NULL, additional_set_selected2=NULL, regions, growth_rate_flag=FALSE, time_filter=TRUE, compute_aggregates=TRUE, verbose=FALSE) { +afd <- get_witch(variable, , field=field_show) +if(verbose) print(stringr::str_glue("Variable {variable} loaded.")) + +# Check if variable has time dimension +has_time_dim <- "t" %in% names(afd) + +# Order pathdir factor according to results_dir vector +if("pathdir" %in% names(afd) && exists("results_dir", envir=.GlobalEnv)) { + results_dir <- get("results_dir", envir=.GlobalEnv) + if(length(results_dir) > 1) { + pathdir_levels <- basename(results_dir) + afd$pathdir <- factor(afd$pathdir, levels=pathdir_levels) + } +} + +afd <- subset_by_additional_sets(afd, additional_set_id, additional_set_selected, additional_set_id2, additional_set_selected2) + +if(has_time_dim) { + # Process time-series data + # Add year column if not already present + if(!"year" %in% names(afd)) { + # Year column not present, need to calculate it + if("tlen" %in% names(afd)) { + # When tlen varies over time, we need to calculate year properly + # Create a unique mapping from t to year using cumsum + tlen_mapping <- afd %>% + dplyr::select(t, tlen) %>% + dplyr::distinct() %>% + dplyr::arrange(as.numeric(t)) %>% + dplyr::mutate( + tlen = ifelse(is.na(tlen), tstep, tlen), + # Calculate cumulative time: start year + sum of all previous tlen values + year = year0 + c(0, cumsum(tlen[-dplyr::n()])) + ) %>% + dplyr::select(t, year) + # Join the year mapping back to the data + afd <- afd %>% dplyr::left_join(tlen_mapping, by = "t") + } else { + # No tlen, use simple conversion + afd$year <- ttoyear(afd$t) + } + } + + if(time_filter) { + afd <- subset(afd, year>=yearlim[1] & year<=yearlim[2]) + } + afd <- dplyr::filter(afd, !is.na(value)) + if(compute_aggregates) { + afd <- compute_regional_aggregates(afd, variable) + } + afd <- apply_growth_rate(afd, growth_rate_flag) + afd <- subset(afd, file %in% c(scenarios, paste0(scenarios, "(b1)"), paste0(scenarios, "(b2)"), paste0(scenarios, "(b3)")) | stringr::str_detect(file, "historical")) + unit_conv <- unit_conversion(variable) + if(growth_rate_flag) { + unit_conv$unit <- " % p.a." + unit_conv$convert <- 1 + } + afd$value <- afd$value * unit_conv$convert + list(data=afd, unit_conv=unit_conv, has_time_dim=TRUE) +} else { + # Process non-time-series data (no time dimension) + afd <- dplyr::filter(afd, !is.na(value)) + afd <- subset(afd, file %in% scenarios) + unit_conv <- unit_conversion(variable) + afd$value <- afd$value * unit_conv$convert + list(data=afd, unit_conv=unit_conv, has_time_dim=FALSE) +} +} +create_gdx_plot <- function(afd, variable, unit_conv, regions, yearlim, ylim_zero, region_palette, results_dir, show_historical=TRUE) { +if(nrow(afd)==0) return(NULL) +# Remove any rows with NA or infinite values in year or value +afd <- subset(afd, !is.na(year) & !is.infinite(year) & !is.na(value) & !is.infinite(value)) +if(nrow(afd)==0) return(NULL) +# Filter data by year range BEFORE splitting +afd <- subset(afd, year >= yearlim[1] & year <= yearlim[2]) +if(nrow(afd)==0) return(NULL) +# Order pathdir factor according to results_dir vector +if("pathdir" %in% names(afd) && length(results_dir) > 1) { + pathdir_levels <- basename(results_dir) + afd$pathdir <- factor(afd$pathdir, levels=pathdir_levels) +} +# Separate model and historical data +model_data <- subset(afd, n %in% regions & !stringr::str_detect(file, "historical")) +hist_data <- subset(afd, n %in% regions & stringr::str_detect(file, "historical")) +# Return NULL if no model data +if(nrow(model_data)==0) return(NULL) +# Create plot with nice aesthetics and historical data overlay +if(length(regions)==1 || (length(regions)==1 && regions[1] %in% c("World","EU"))) { +p <- ggplot2::ggplot(model_data, ggplot2::aes(x=year, y=value, color=file)) + +ggplot2::geom_line(linewidth=1.5) + +ggplot2::labs(title=variable, x=NULL, y=unit_conv$unit) + +ggplot2::theme(text=ggplot2::element_text(size=16), + legend.position="bottom", + legend.direction="horizontal", + legend.title=ggplot2::element_blank()) +if(ylim_zero) p <- p + ggplot2::geom_hline(yintercept=0, alpha=0.5) +# Add historical data if available and show_historical is TRUE +if(show_historical && nrow(hist_data)>0) { +p <- p + ggplot2::geom_line(data=hist_data, ggplot2::aes(x=year, y=value, color=file), linewidth=1.0) +} +} else { +p <- ggplot2::ggplot(model_data, ggplot2::aes(x=year, y=value, color=n, linetype=file)) + +ggplot2::geom_line(linewidth=1.5) + +ggplot2::labs(title=variable, x=NULL, y=unit_conv$unit) + +ggplot2::scale_colour_manual(values=region_palette) + +ggplot2::theme(text=ggplot2::element_text(size=16), + legend.position="bottom", + legend.direction="horizontal", + legend.title=ggplot2::element_blank()) +if(ylim_zero) p <- p + ggplot2::geom_hline(yintercept=0, alpha=0.5) +# Add historical data if available and show_historical is TRUE +if(show_historical && nrow(hist_data)>0) { +p <- p + ggplot2::geom_line(data=hist_data, ggplot2::aes(x=year, y=value, color=n, group=interaction(n, file)), linewidth=1.0) +} +} +if(length(results_dir)!=1) p <- p + ggplot2::facet_grid(. ~ pathdir) +p +} diff --git a/R/shiny_ui_helpers.R b/R/shiny_ui_helpers.R new file mode 100644 index 0000000..b65afd9 --- /dev/null +++ b/R/shiny_ui_helpers.R @@ -0,0 +1,46 @@ +create_scenario_selector <- function(scenlist) { + selectInput(inputId="scenarios_selected", label="Scenarios:", choices=unname(scenlist), size=length(scenlist), selectize=FALSE, multiple=TRUE, selected=unname(scenlist)) +} +create_variable_selector <- function(list_of_variables, default_var="Q_EMI", use_picker=TRUE) { + if(use_picker) { + pickerInput(inputId="variable_selected", label="Variable:", choices=list_of_variables, selected=default_var, options=list(`live-search`=TRUE)) + } else { + selectInput(inputId="variable_selected", label="Variable:", choices=list_of_variables, size=1, selectize=FALSE, multiple=FALSE, selected=default_var) + } +} +create_region_selector <- function(witch_regions, include_aggregates=c("World", "EU"), default_region="World") { + if(length(include_aggregates)>0) { + regions_for_selector <- list(Aggregate=as.list(include_aggregates), `Native regions`=witch_regions) + } else { + regions_for_selector <- c(witch_regions, include_aggregates) + } + selectInput(inputId="regions_selected", label="Regions:", regions_for_selector, size=max(10, length(regions_for_selector)), selectize=FALSE, multiple=TRUE, selected=default_region) +} +get_gdx_variable_list <- function(results_dir, filelist, filter_time_dependent=FALSE) { + list_of_variables <- NULL + for(f in filelist) { + .gdx <- gdx(file.path(results_dir[1], paste0(f, ".gdx"))) + for(item in c("variables", "parameters")) { + info_item <- .gdx[[item]] + info_item <- info_item[info_item$dim<=4,] + info_item <- info_item[sapply(info_item$domnames, function(x) "t" %in% x),] + list_of_variables <- c(list_of_variables, info_item$name) + } + } + list_of_variables <- unique(list_of_variables) + list_of_variables <- c(sort(str_subset(list_of_variables, "^[:upper:]")), sort(str_subset(list_of_variables, "^[:lower:]"))) + if(filter_time_dependent) { + list_of_variables <- str_subset(list_of_variables, pattern="_t$") + } + list_of_variables +} +get_gdx_variable_list_simple <- function(results_dir, filelist) { + list_of_variables <- NULL + for(f in filelist) { + .gdx <- gdx(file.path(results_dir[1], paste0(f, ".gdx"))) + list_of_variables <- c(list_of_variables, all_items(.gdx)$variables) + list_of_variables <- c(list_of_variables, all_items(.gdx)$parameters) + } + list_of_variables <- unique(list_of_variables) + c(sort(str_subset(list_of_variables, "^[:upper:]")), sort(str_subset(list_of_variables, "^[:lower:]"))) +} diff --git a/R/witch_functions.R b/R/witch_functions.R deleted file mode 100644 index 46108ab..0000000 --- a/R/witch_functions.R +++ /dev/null @@ -1,194 +0,0 @@ -## Further Options ## -#restrict_regions <- c("usa") # if exists, only these regions will be loaded everywhere -#Change some regions to nice names in case (regions not specified will use standard witch name) -#nice_region_names <- c("ccasia"="Caucasus and Central Asia", "china"="PRC", "india"="India", "indonesia"="Indonesia", "sasia"="South Asia", "seasia"="Southeast Asia") -deploy_online <- FALSE #if not deployed online save graphs -figure_format="png" -historical = TRUE #add historical data where available -ggplot2::theme_set(ggplot2::theme_bw()) #set default theme -show_numbers_2100 = FALSE -legend_position="bottom" # "none", "bottom", or "right" -write_plotdata_csv = F #if true, saves data of plot as csv file -varlist_combine_old_new_j <- c("Q_EN", "K_EN", "I_EN", "Q_IN") #variables for which to combine old and new j technologies -if(!exists("year0")) {year0 = 2005; tstep = 5;} -if(!exists("yearmin")) yearmin = 1980 -if(!exists("yearmax")) yearmax = 2100 -## End of further Options ## - -witch_folder <- normalizePath(witch_folder) -main_folder <- normalizePath(main_folder) - -fullpathdir = file.path(main_folder, subdir) -#Specify directory for graphs and data to be saved: by default: /graphs/ in the folder -graphdir = if(length(fullpathdir)>1){file.path(main_folder, "graphs") }else{file.path(fullpathdir, "graphs")} - -#check if directory valid -if(any(!dir.exists(fullpathdir))){stop("Please check the main directory and sub directory!")} -if(!dir.exists(witch_folder)){stop("Please check your witch directory!")} - -# witchtools -if (!"witchtools" %in% rownames(installed.packages())) { - if (!"remotes" %in% rownames(installed.packages())) - install.packages("remotes", repos = "http://cloud.r-project.org") - remotes::install_github("witch-team/witchtools") - if (!requireNamespace("witchtools")) stop("Package witchtools not found") -} -library(witchtools) - -#Install and load packages -require_package <- function(package){ - if(!is.element(package, .packages(all.available = TRUE))){ - try(install.packages(package, repos="http://cran.rstudio.com"), silent = TRUE) - } - suppressPackageStartupMessages(library(package,character.only=T, quietly = TRUE)) -} - -pkgs <- c('data.table', 'stringr', 'docopt', 'countrycode', 'ggplot2', - 'ggpubr', 'scales', 'RColorBrewer', - 'dplyr', 'openxlsx', - 'gsubfn', 'tidyr', 'rlang', 'shiny', 'shinyWidgets','bslib', - 'shinythemes', - 'rworldmap', - 'sf', 'rnaturalearth', 'plotly', 'purrr', - #'reldist', - 'tidytidbits', - 'forcats', 'arrow', 'memoise') -res <- lapply(pkgs, require_package) -require_gdxtools() -library(dplyr, warn.conflicts = FALSE) -# Suppress summarise info -options(dplyr.summarise.inform = FALSE) - -#load basic functions -source('R/auxiliary_functions.R') -source('R/witch_load_and_plot.R') -source('R/add_historical_values.R') -source('R/get_iiasadb.R') -source('R/get_witch.R') - - -#from here only if GDX files are loaded -if(!exists("iamc_filename") & !exists("iamc_databasename")){ -filelist <- gsub(".gdx","",list.files(path=fullpathdir[1], full.names = FALSE, pattern="*.gdx", recursive = FALSE)) -if(!exists("restrict_files")) restrict_files <- "results_" -if(restrict_files[1]!=""){ - for(i in 1:length(restrict_files)){ - .filelist_res = filelist[apply(outer(filelist, restrict_files[i], str_detect), 1, all)] - if(i==1) .filelist_res_all <- .filelist_res else .filelist_res_all <- c(.filelist_res_all, .filelist_res) - } - filelist <- unique(.filelist_res_all) -} -if(exists("exclude_files")) if(exclude_files[1]!="") filelist = filelist[!str_detect(filelist, paste(exclude_files, collapse = '|'))] -if(length(filelist)==0){stop("No GDX files found.")} -if(exists("scenlist")){ - #check if missing scenarios in scenlist - if(length(names(scenlist[!(names(scenlist) %in% filelist)]))>0){print("Missing Scenarios:"); print(cat(names(scenlist[!(names(scenlist) %in% filelist)]), sep=", "))} - filelist <- intersect(names(scenlist), filelist) - scenlist <- scenlist[filelist] - } -if(!exists("removepattern")) removepattern <- "results_" -if(!exists("scenlist")){scenlist <- gsub(paste(removepattern, collapse="|"), "", filelist); names(scenlist) <- filelist} -#print("GDX Files:") -#print(filelist) -#print(paste("Scenarios used:", length(scenlist))) -print(data.frame(scenlist=scenlist)) - -#file to separate check -if(exists("file_separate")) file_group_columns <- c("file", unname(file_separate[3:length(file_separate)])) else file_group_columns <- "file" - -#check if multiple time steps -if(length(unique(suppressWarnings(batch_extract("tlen", files = file.path(fullpathdir,paste0(filelist,".gdx"))))$tlen$value)) > 1) flexible_timestep <- T else flexible_timestep <- F #allow for flexible time step - -#in case some runs are stochastic, set flag and provide mapping -tset <- get_witch("t") -if("t" %in% names(tset)){ - if(any(str_detect((tset %>% select(t) %>% unique())$t, "_"))){ - stochastic_files <- tset %>% filter(str_detect(t, "_")) %>% mutate(numeric_t = as.numeric(sub(".*_(\\d+)$", "\\1", t))) %>% group_by(file) %>% summarise(num_branches = max(numeric_t, na.rm = TRUE)) - }else{stochastic_files <- NULL} -}else{stochastic_files <- NULL} - - -#get variable description of all variables from the 1st file -mygdx <- gdx(file.path(fullpathdir[1],paste0(filelist[1],".gdx"))) -all_var_descriptions <- rbind(data.frame(name=mygdx$variables$name, description=mygdx$variables$text), data.frame(name=mygdx$parameters$name, description=mygdx$parameters$text)) - -#Palettes for WITCH regions and regional aggregation -if(!exists("reg_id")){ -conf <- get_witch("conf") -if(!(exists("conf"))) stop("No conf set found. Please specify region_i = x manually!") -if(length(unique(subset(conf, V1=="regions")$V2))>1) print("Be careful: not all results files were run with the same regional aggregation!") -reg_id <- subset(conf, file==scenlist[1] & pathdir==basename(fullpathdir[1]) & V1=="regions")$V2 -} -n <- suppressWarnings(batch_extract("n", files = file.path(fullpathdir,paste0(filelist,".gdx")))) -if(is.null(n$n)) {witch_regions <- "World"} else witch_regions <- unique(n$n$V1) - -if(exists("nice_region_names")) witch_regions <- dplyr::recode(witch_regions, !!!nice_region_names) -display_regions <- witch_regions - -if(!dir.exists(file.path(witch_folder, paste0("data_", reg_id)))) print("No data_* directory for historical data found.") - -region_palette_specific <- setNames(rainbow(length(witch_regions)), witch_regions) #just in case have a fall back colour -region_palette_witch <- c(usa="darkblue",Usa="darkblue",oldeuro="blue", neweuro="cornflowerblue",kosau="darkgreen",Kosau="darkgreen",cajaz="chartreuse4",Cajaz="chartreuse4",te="gold2",Te="gold2",mena="darkgoldenrod4",Mena="darkgoldenrod4",ssa="goldenrod",Ssa="goldenrod",sasia="darkorange2","South Asia"="darkorange2",china="deeppink3",PRC="deeppink3",easia="orangered",ESEAP="orangered",laca="#fbb714",Laca="#fbb714",india="#fbf003",India="#fbf003",europe="blue",Europe="blue",indonesia="lightsalmon3",Indonesia="lightsalmon3",Rest_of_World="grey48",chinaw="darkorange",chinac="darkorange2",chinae="darkorange4",italy="green",mexico="slateblue2",brazil="tomato4",canada="blueviolet",jpnkor="darkseagreen",oceania="forestgreen",southafrica="indianred3",seasia="orangered",World="black", "Global Pool"="black") -#add ed57 region colors for RICE50+ -region_palette_ed57 <- c("arg" = "#000000","aus" = "#48d1cc","aut" = "#ae8000","bel" = "#800000","bgr" = "#003366","blt" = "#bf4040","bra" = "#ffd633","can" = "#6600cc","chl" = "#ffece6","chn" = "#ff531a","cor" = "#adebad","cro" = "#808080","dnk" = "#ff9933","egy" = "#0044cc","esp" = "#ffd6cc","fin" = "#00cccc","fra" = "#cc0000","gbr" = "#ffffdd","golf57" = "#33d6ff","grc" = "#00ffcc","hun" = "#9999ff","idn" = "#996633","irl" = "#ff4dff","ita" = "#ffff00","jpn" = "#006600","meme"= "#b32d00","mex" = "#ccff33","mys" = "#145252","nde" = "#00d900","nld" = "#c309bd","noan"= "#ffff99","noap"= "#ecf2f9","nor" = "#ff3399","oeu" = "#ffb3ff","osea"= "#008fb3","pol" = "#d6f5d6","prt" = "#003300","rcam"= "#4d1919","rcz" = "#00ffff","rfa" = "#deb887","ris" = "#000080","rjan57" = "#bf00ff","rom" = "#ff00ff","rsaf"= "#ff8000","rsam"= "#0000ff","rsas"= "#ccd6dd","rsl" = "#00ff00","rus" = "#66757f","slo" = "#ff3091","sui" = "#61a62f","swe" = "#cb1942","tha" = "#efff14","tur" = "#4b0082","ukr" = "#c198ff","usa" = "#ffcc00","vnm" = "#3377ff","zaf" = "#b3ccff") -#Add witch34 region colors -region_palette_witch34 <- c("bnl" = "#800000","northeu" = "#bf4040","balkan" = "#808080","easteu" = "#9999ff", "che"="#61a62f", "deu" = "#deb887", "rou" = "#ff00ff", "cze" = "#00ffff", "japan"="green", korea="red") -region_palette <- replace(region_palette_specific, names(region_palette_witch), region_palette_witch) -region_palette <- replace(region_palette, names(region_palette_ed57), region_palette_ed57) -region_palette <- replace(region_palette, names(region_palette_witch34), region_palette_witch34) -#now keep only palette for regions actually used -region_palette <- region_palette[witch_regions] -if(exists("restrict_regions")) region_palette <- region_palette[restrict_regions] - -print(paste(length(scenlist), "Scenarios and", length(witch_regions), "regions considered.")) - -witch_name_short <- function(witch_name){ - witch_name <- gsub("indonesia", "IDN", witch_name) - witch_name_shortened <- substr(toupper(witch_name), 1, 3) - witch_name_shortened <- gsub("MEN", "MEA", witch_name_shortened) - witch_name_shortened <- gsub("SOU", "ZAF", witch_name_shortened) - witch_name_shortened <- gsub("CHI", "CHN", witch_name_shortened) - witch_name_shortened <- gsub("TE", "TEC", witch_name_shortened) - return(witch_name_shortened) -} -region_palette_specific_short <- region_palette; names(region_palette_specific_short) <- witch_name_short(names(region_palette_specific)) - -witch_region_names <-"n,longname -canada,Canada -jpnkor,Japan-Korea -oceania,Oceania -indonesia,Indonesia -southafrica,South Africa -brazil,Brazil -mexico,Mexico -china,China -india,India -te,Transition Economies -ssa,Sub-Saharan Africa -laca,Latin America-Caribbean -sasia,South Asia -seasia,South East Asia -mena,Middle East-North Africa -europe,Europe -usa,United States of America -easia,East Asia -kosau,South-Korea and Australia -cajaz,Canada Japan New-Zealand, -neweuro,Eastern Europe -oldeuro,Western Europe" -witch_region_names <- read.table(textConnection(witch_region_names), sep=",", head=T, dec=".") -region_palette_longnames <- region_palette -names(region_palette_longnames) <- dplyr::recode(names(region_palette), !!!setNames(paste0(as.character(witch_region_names$longname), " (",as.character(witch_region_names$n),")"), as.character(witch_region_names$n))) - -#load specialized functions -source('R/map_functions.R') -source('R/export_variables.R') -source('R/diagnostics.R') -source('R/impact_plots.R') -source('R/energy_plots.R') -source('R/emission_plots.R') -source('R/climate_plots.R') -source('R/policy_cost.R') -source('R/inequality_plots.R') -source('R/RICE50x_plots.R') -} \ No newline at end of file diff --git a/R/witch_load_and_plot.R b/R/witch_load_and_plot.R index 42938e6..10d38bb 100644 --- a/R/witch_load_and_plot.R +++ b/R/witch_load_and_plot.R @@ -12,11 +12,11 @@ plot_witch <- function(data, varname="value", regions="World", scenplot=scenlist p <- ggplot() + xlab("") +ylab(ylab) if(ylim0) p <- p + ylim(0, NA) if(regions[1]=="World" | length(regions)==1){ - p <- p + geom_line(data = data %>% filter(file!="historical"), aes(ttoyear(t),plot_value,colour=file), stat="identity", linewidth=line_size) + geom_line(data = data %>% filter(file=="historical"), aes(ttoyear(t),plot_value,colour=file), stat="identity", size=0.5) + p <- p + geom_line(data = data %>% filter(file!="historical"), aes(ttoyear(t),plot_value,colour=file), stat="identity", linewidth=line_size) + geom_line(data = data %>% filter(file=="historical"), aes(ttoyear(t),plot_value,colour=file), stat="identity", linewidth=0.5) }else{ - p <- p + geom_line(data = data %>% filter(file!="historical"), aes(ttoyear(t),plot_value,colour=n, linetype=file), stat="identity", linewidth=line_size) + scale_colour_manual(values = region_palette) + geom_line(data = data %>% filter(file=="historical"), aes(ttoyear(t),plot_value,colour=n, linetype=file), stat="identity", size=0.5) + p <- p + geom_line(data = data %>% filter(file!="historical"), aes(ttoyear(t),plot_value,colour=n, linetype=file), stat="identity", linewidth=line_size) + scale_colour_manual(values = region_palette) + geom_line(data = data %>% filter(file=="historical"), aes(ttoyear(t),plot_value,colour=n, linetype=file), stat="identity", linewidth=0.5) } - if(length(fullpathdir)!=1){p <- p + facet_grid(pathdir ~ .)} + if(length(results_dir)!=1){p <- p + facet_grid(pathdir ~ .)} return(p) } @@ -30,7 +30,7 @@ get_plot_witch <- function(variable_name, additional_set="na", additional_set_id #some default values, maybe not even needed to customize #removepattern="results_" #DEBUG: - #variable_name="Q_OUT"; additional_set="f"; additional_set_id="oil"; convert=1; unit=""; aggregation="regional"; cumulative=FALSE; plot=TRUE; bar=""; bar_x="time"; bar_y="value"; bar_setvalues=""; bar_colors=""; regions=witch_regions; scenplot=scenlist; variable_field="l"; current_pathdir = fullpathdir[1]; file <- filelist[1]; + #variable_name="Q_OUT"; additional_set="f"; additional_set_id="oil"; convert=1; unit=""; aggregation="regional"; cumulative=FALSE; plot=TRUE; bar=""; bar_x="time"; bar_y="value"; bar_setvalues=""; bar_colors=""; regions=witch_regions; scenplot=scenlist; variable_field="l"; current_pathdir = results_dir[1]; file <- filelist[1]; line_size = 1.5; show_legend_title = F if(additional_set_id=="all"){plot=FALSE} @@ -64,7 +64,7 @@ get_plot_witch <- function(variable_name, additional_set="na", additional_set_id } if (additional_set != "na" & additional_set_id == "sum") #sum over the set if "sum" is choosen { - if(length(fullpathdir)>=1){allfilesdata <- aggregate(value~n+t+file+pathdir, data=allfilesdata, sum)} + if(length(results_dir)>=1){allfilesdata <- aggregate(value~n+t+file+pathdir, data=allfilesdata, sum)} else{allfilesdata <- aggregate(value~n+t+file, data=allfilesdata, sum, na.rm=TRUE)} allfilesdata <- as.data.frame(allfilesdata) allfilesdata <- as.data.table(allfilesdata) @@ -83,16 +83,16 @@ get_plot_witch <- function(variable_name, additional_set="na", additional_set_id if (aggregation == "global_sum") { allfilesdata$n <- NULL - if(length(fullpathdir)>=1){allfilesdata <- aggregate(value~t+file+pathdir, data=allfilesdata, sum)} + if(length(results_dir)>=1){allfilesdata <- aggregate(value~t+file+pathdir, data=allfilesdata, sum)} else{allfilesdata <- aggregate(value~t+file, data=allfilesdata, sum)} #print(str(allfilesdata)); assign("test",allfilesdata,envir = .GlobalEnv) allfilesdata <- as.data.table(allfilesdata) #try for RCP: - p <- ggplot(data=subset(allfilesdata),aes(ttoyear(t),value, colour=get(line_colour), linetype=get(line_type))) + geom_line(stat="identity", size=line_size) + xlab("year") +ylab(unit_conversion$unit) + p <- ggplot(data=subset(allfilesdata),aes(ttoyear(t),value, colour=get(line_colour), linetype=get(line_type))) + geom_line(stat="identity", linewidth=line_size) + xlab("year") +ylab(unit_conversion$unit) if(show_legend_title){p <- p + labs(linetype=line_type, colour=line_colour)}else{p <- p + theme(legend.title=element_blank())} if(show_numbers_2100){p <- p + geom_text(data=subset(allfilesdata, t==20), aes(x=2100, y=value, label=round(value, 2)))} - if(length(fullpathdir)!=1){p <- p + facet_grid(pathdir ~ .)} - if(length(fullpathdir)==1){p <- p + guides(linetype="none")} + if(length(results_dir)!=1){p <- p + facet_grid(pathdir ~ .)} + if(length(results_dir)==1){p <- p + guides(linetype="none")} if(plot){saveplot(variable_name)} } if (aggregation == "global_mean") @@ -101,14 +101,14 @@ get_plot_witch <- function(variable_name, additional_set="na", additional_set_id allfilesdata <- allfilesdata %>% group_by_at(c("pathdir", file_group_columns, "t")) %>% summarize(value=mean(value), .groups = "drop") p <- ggplot(data=subset(allfilesdata),aes(ttoyear(t),value, colour=get(line_colour), linetype=get(line_type))) + geom_line(stat="identity", linewidth=line_size) + xlab("year") +ylab(unit_conversion$unit) + labs(linetype=line_type, colour=line_colour) if(show_numbers_2100){p <- p + geom_text(data=subset(allfilesdata, t==20), aes(x=2100, y=value, label=round(value, 2)))} - if(length(fullpathdir)!=1){p <- p + facet_grid(pathdir ~ .)} - if(length(fullpathdir)==1){p <- p + guides(linetype="none")} + if(length(results_dir)!=1){p <- p + facet_grid(pathdir ~ .)} + if(length(results_dir)==1){p <- p + guides(linetype="none")} if(plot){saveplot(variable_name)} } if (aggregation == "regional") { p <- ggplot(subset(allfilesdata, n %in% regions),aes(ttoyear(t),value,colour=n, linetype=file)) + geom_line(stat="identity", linewidth=line_size) + xlab("year") +ylab(unit_conversion$unit) + scale_colour_manual(values = region_palette) - if(length(fullpathdir)!=1){p <- p + facet_grid(pathdir ~ .)} + if(length(results_dir)!=1){p <- p + facet_grid(pathdir ~ .)} if(plot){saveplot(variable_name)} } if (aggregation == "all") @@ -122,11 +122,11 @@ get_plot_witch <- function(variable_name, additional_set="na", additional_set_id if(bar=="set"){ if(bar_setvalues[1] != ""){allfilesdata <- subset(allfilesdata, get(additional_set) %in% bar_setvalues)} if(additional_set!="na"){allfilesdata[[additional_set]] <- as.factor(allfilesdata[[additional_set]])} - if(length(fullpathdir)>=1){allfilesdata <- allfilesdata[, lapply(.SD, sum), by=c("t", "file", additional_set, "pathdir")]}else{allfilesdata <- allfilesdata[, lapply(.SD, sum), by=c("t", "file", additional_set)]} + if(length(results_dir)>=1){allfilesdata <- allfilesdata[, lapply(.SD, sum), by=c("t", "file", additional_set, "pathdir")]}else{allfilesdata <- allfilesdata[, lapply(.SD, sum), by=c("t", "file", additional_set)]} allfilesdata$n <- NULL if(additional_set!="na"){allfilesdata[[additional_set]] <- as.factor(allfilesdata[[additional_set]])} if(bar_setvalues[1] != ""){allfilesdata[[additional_set]] <- reorder.factor(allfilesdata[[additional_set]], new.order=bar_setvalues)} #to keep order from setlist in function call - if(bar_y=="share"){if(length(fullpathdir)!=1){allfilesdata <- allfilesdata %>% group_by_at(c("t", file_group_columns, "pathdir")) %>% mutate(value=value/(sum(value))*100)}else{allfilesdata <- allfilesdata %>% group_by_at(c("t", "file")) %>% mutate(value=value/(sum(value))*100)}} + if(bar_y=="share"){if(length(results_dir)!=1){allfilesdata <- allfilesdata %>% group_by_at(c("t", file_group_columns, "pathdir")) %>% mutate(value=value/(sum(value))*100)}else{allfilesdata <- allfilesdata %>% group_by_at(c("t", "file")) %>% mutate(value=value/(sum(value))*100)}} if(str_detect(bar_x, "time")){ if(!is.na(destring(bar_x))){allfilesdata <- subset(allfilesdata, t==yeartot(destring(bar_x)))} p <- ggplot(data=subset(allfilesdata),aes(ttoyear(t),value, fill=get(additional_set))) + geom_bar(stat="identity") + xlab("year") + facet_grid( ~ file) + guides(fill=guide_legend(title=NULL)) @@ -138,7 +138,7 @@ get_plot_witch <- function(variable_name, additional_set="na", additional_set_id } if(bar=="region"){ allfilesdata[["n"]] <- reorder.factor(allfilesdata[["n"]], new.order=regions) #to keep order from setlist in function call - if(bar_y=="share"){if(length(fullpathdir)!=1){allfilesdata %>% group_by_at(c("t", file_group_columns, "pathdir")) %>% mutate(value=value/(sum(value))*100)}else{allfilesdata <- allfilesdata %>% group_by_at(c("t", "file")) %>% mutate(value=value/(sum(value))*100)}} + if(bar_y=="share"){if(length(results_dir)!=1){allfilesdata %>% group_by_at(c("t", file_group_columns, "pathdir")) %>% mutate(value=value/(sum(value))*100)}else{allfilesdata <- allfilesdata %>% group_by_at(c("t", "file")) %>% mutate(value=value/(sum(value))*100)}} if(str_detect(bar_x, "time")){ if(!is.na(destring(bar_x))){allfilesdata <- subset(allfilesdata, t==yeartot(destring(bar_x)))} p <- ggplot(data=subset(allfilesdata),aes(ttoyear(t),value, fill=n)) + geom_bar(stat="identity") + xlab("year") + facet_grid( ~ file) + guides(fill=guide_legend(title=NULL)) + scale_fill_manual(values=region_palette) @@ -146,7 +146,7 @@ get_plot_witch <- function(variable_name, additional_set="na", additional_set_id p <- ggplot(data=subset(allfilesdata, t==yeartot(destring(bar_x))),aes(file,value, fill=n)) + geom_bar(stat="identity") + xlab("scenario") + guides(fill=guide_legend(title=NULL)) + scale_fill_manual(values=region_palette) } } - if(length(fullpathdir)!=1){p <- p + facet_grid(pathdir ~ file)} + if(length(results_dir)!=1){p <- p + facet_grid(pathdir ~ file)} if(plot){saveplot(variable_name)} } #save the variable under the WITCH name in the global environment @@ -167,7 +167,7 @@ getvar_witchhist <- function(varname, unit_conversion=1, hist_varname=varname, a for(s in 1:length(additional_sets)) tempvar[[names(additional_sets[s])]] <- additional_sets[s] } tempvar$value <- tempvar$value * unit_conversion; - tempvar <- add_historical_values(tempvar, varname = hist_varname, check_calibration = T) + tempvar <- add_historical_values(tempvar, varname = hist_varname, verbose = TRUE, iiasadb = TRUE) tempvar <- tempvar %>% filter(n %in% n_model) print(ggplot(tempvar) + geom_line(aes(ttoyear(t), value, color=file, linetype=n)) + xlab("") + ylab(ylab)) assign(varname, tempvar, envir = .GlobalEnv) diff --git a/R/witchplot.R b/R/witchplot.R new file mode 100644 index 0000000..4b8fe50 --- /dev/null +++ b/R/witchplot.R @@ -0,0 +1,584 @@ +#' @keywords internal +.onLoad <- function(libname, pkgname) { +ggplot2::theme_set(ggplot2::theme_bw()) +options( +results_dir="./", +restrict_files="results_", +exclude_files="", +removepattern="", +year0=2005, +tstep=5, +yearmin=1980, +yearmax=2100, +reg_id=NULL, +deploy_online=FALSE, +figure_format="png", +add_historical=TRUE, +write_plotdata_csv=FALSE +) + +# Try to automatically initialize GDX library +tryCatch({ + if(requireNamespace("gdxtools", quietly = TRUE)) { + # Silently try to initialize GDX - don't show messages during package load + suppressMessages(gdxtools::igdx()) + } +}, error = function(e) { + # Silently fail - GDX will be initialized later if needed + # User will get helpful error message from setup_gdx() if they try to use it +}) +} + +#' Clean up global environment from previous witchplot sessions +#' @keywords internal +.cleanup_witchplot_globals <- function() { + # List of all global variables created by witchplot + witchplot_globals <- c( + "results_dir", "restrict_files", "exclude_files", "removepattern", + "deploy_online", "figure_format", "add_historical", "write_plotdata_csv", + "reg_id", "year0", "tstep", "yearmin", "yearmax", + "filelist", "scenlist", "file_group_columns", + "witch_regions", "display_regions", "region_palette", + "region_palette_specific_short", "region_palette_longnames", + "stochastic_files", "all_var_descriptions", + "graphdir", "map_var_hist", "iamc_filename", "iamc_databasename", + "iiasadb_snapshot", "iiasadb_historical", + "varlist_combine_old_new_j", "file_separate", "nice_region_names", "restrict_regions" + ) + + # Remove all witchplot globals that exist + for(var in witchplot_globals) { + if(exists(var, envir = .GlobalEnv)) { + rm(list = var, envir = .GlobalEnv) + } + } + + invisible(NULL) +} + +#' Launch WITCH Model Interactive Visualization +#' +#' Loads WITCH model GDX result files and launches an interactive Shiny application +#' for scenario comparison and visualization. +#' +#' @param results_dir Path(s) to results directory containing GDX files. Can be a vector for multiple directories (default: "./") +#' @param restrict_files Pattern to filter GDX files (default: "results_") +#' @param exclude_files Pattern to exclude GDX files (default: "") +#' @param removepattern Pattern to remove from scenario names (default: "results_") +#' @param add_historical Logical, add historical data where available (default: TRUE) +#' @param deploy_online Logical, whether to deploy online (default: FALSE) +#' @param figure_format Output format for figures: "png", "pdf", "svg" (default: "png") +#' @param write_plotdata_csv Logical, save plot data as CSV (default: FALSE) +#' @param launch Logical, launch Shiny app immediately (default: TRUE). Set FALSE to load data only. +#' @param ... Additional options passed to session configuration. Useful options include: +#' \itemize{ +#' \item \code{file_separate}: Vector to split scenario names into multiple columns. Format: c("type", "separator", "col1", "col2", ...). +#' Type can be "separate" (split all), "first" (first element), or "last" (last element). +#' Example: \code{file_separate = c("separate", "_", "model", "scenario", "carbon_price")} splits "SSP2_1p5C_high" into three columns. +#' \item \code{nice_region_names}: Named vector to rename regions for display. Example: \code{c("usa_te" = "USA", "eur" = "Europe")} +#' \item \code{restrict_regions}: Character vector of regions to display (filters out others) +#' } +#' +#' @return Invisibly returns NULL. Launches Shiny application if launch=TRUE. +#' +#' @examples +#' \dontrun{ +#' # Basic usage with defaults +#' run_witch() +#' +#' # Disable historical data +#' run_witch(add_historical = FALSE) +#' +#' # Specify custom paths +#' run_witch(results_dir = "results") +#' +#' # Compare multiple result directories +#' run_witch(results_dir = c("results_bau", "results_policy")) +#' +#' # Load data without launching UI (for scripting) +#' run_witch(launch = FALSE) +#' +#' # Split scenario names into separate columns +#' # If files are named like "SSP2_BAU_low", "SSP2_1p5C_high", etc. +#' run_witch(file_separate = c("separate", "_", "SSP", "policy", "sensitivity")) +#' +#' # Extract only last part of scenario name +#' run_witch(file_separate = c("last", "_", "sensitivity")) +#' +#' # Rename regions and restrict display +#' run_witch( +#' F = c("usa_te" = "USA", "eur" = "Europe"), +#' restrict_regions = c("USA", "Europe", "China") +#' ) +#' } +#' +#' @export +run_witch <- function(results_dir="./", restrict_files="results_", exclude_files="", removepattern="results_", + add_historical=TRUE, deploy_online=FALSE, figure_format="png", write_plotdata_csv=FALSE, + launch=TRUE, ...) { +# Clean up any global variables from previous sessions +.cleanup_witchplot_globals() +if(!is.vector(results_dir)) results_dir <- c(results_dir) +# Normalize results_dir to avoid double slashes in file paths +results_dir <- normalizePath(results_dir, winslash="/", mustWork=FALSE) +# Set all options +opts <- list(results_dir=results_dir, restrict_files=restrict_files, exclude_files=exclude_files, removepattern=removepattern, deploy_online=deploy_online, figure_format=figure_format, add_historical=add_historical, write_plotdata_csv=write_plotdata_csv, ...) +options(opts) +assign("results_dir", results_dir, envir=.GlobalEnv) +assign("restrict_files", restrict_files, envir=.GlobalEnv) +assign("exclude_files", exclude_files, envir=.GlobalEnv) +assign("removepattern", removepattern, envir=.GlobalEnv) +assign("deploy_online", deploy_online, envir=.GlobalEnv) +assign("figure_format", figure_format, envir=.GlobalEnv) +assign("add_historical", add_historical, envir=.GlobalEnv) +assign("write_plotdata_csv", write_plotdata_csv, envir=.GlobalEnv) +# Clear memoise cache for get_witch when add_historical changes +if(exists("get_witch")) { + memoise::forget(get_witch) +} +.initialize_witchplot_session() +if(launch) shiny::runApp(appDir=system.file("gdxcompaR", "witch", package="witchplot")) +} + +#' Launch RICE50+ Model Interactive Visualization +#' +#' Loads RICE50+ model GDX result files and launches an interactive Shiny application +#' for scenario comparison and visualization with regional disaggregation. +#' +#' @param results_dir Path(s) to results directory containing GDX files (default: "./") +#' @param reg_id Regional aggregation ID, e.g., "ed58" for 58 regions (default: "ed58") +#' @param year0 Base year for the model (default: 2015) +#' @param tstep Time step in years (default: 5) +#' @param restrict_files Pattern to filter GDX files (default: "results_") +#' @param exclude_files Pattern to exclude GDX files (default: "") +#' @param removepattern Pattern to remove from scenario names (default: "") +#' @param add_historical Logical, add historical data where available (default: TRUE) +#' @param deploy_online Logical, whether to deploy online (default: FALSE) +#' @param figure_format Output format for figures: "png", "pdf", "svg" (default: "png") +#' @param write_plotdata_csv Logical, save plot data as CSV (default: FALSE) +#' @param launch Logical, launch Shiny app immediately (default: TRUE) +#' @param ... Additional options passed to session configuration. Useful options include: +#' \itemize{ +#' \item \code{file_separate}: Vector to split scenario names into multiple columns. Format: c("type", "separator", "col1", "col2", ...). +#' Type can be "separate" (split all), "first" (first element), or "last" (last element). +#' Example: \code{file_separate = c("separate", "_", "model", "scenario", "carbon_price")} splits "SSP2_1p5C_high" into three columns. +#' \item \code{nice_region_names}: Named vector to rename regions for display. Example: \code{c("usa_te" = "USA", "eur" = "Europe")} +#' \item \code{restrict_regions}: Character vector of regions to display (filters out others) +#' } +#' +#' @return Invisibly returns NULL. Launches Shiny application if launch=TRUE. +#' +#' @examples +#' \dontrun{ +#' # Basic usage +#' run_rice() +#' +#' # Disable historical data +#' run_rice(add_historical = FALSE) +#' +#' # Custom regional aggregation +#' run_rice(reg_id = "ed57", year0 = 2020, tstep = 10) +#' +#' # Specify custom paths +#' run_rice(results_dir = "results") +#' } +#' +#' @export +run_rice <- function(results_dir="./", reg_id="ed58", year0=2015, tstep=5, restrict_files="results_", exclude_files="", removepattern="results_", + add_historical=TRUE, deploy_online=FALSE, figure_format="png", write_plotdata_csv=FALSE, + launch=TRUE, ...) { +# Clean up any global variables from previous sessions +.cleanup_witchplot_globals() +if(!is.vector(results_dir)) results_dir <- c(results_dir) +# Normalize results_dir to avoid double slashes in file paths +results_dir <- normalizePath(results_dir, winslash="/", mustWork=FALSE) +# Set all options +opts <- list(results_dir=results_dir, reg_id=reg_id, year0=year0, tstep=tstep, restrict_files=restrict_files, exclude_files=exclude_files, removepattern=removepattern, deploy_online=deploy_online, figure_format=figure_format, add_historical=add_historical, write_plotdata_csv=write_plotdata_csv, ...) +options(opts) +assign("results_dir", results_dir, envir=.GlobalEnv) +assign("reg_id", reg_id, envir=.GlobalEnv) +assign("year0", year0, envir=.GlobalEnv) +assign("tstep", tstep, envir=.GlobalEnv) +assign("restrict_files", restrict_files, envir=.GlobalEnv) +assign("exclude_files", exclude_files, envir=.GlobalEnv) +assign("removepattern", removepattern, envir=.GlobalEnv) +assign("deploy_online", deploy_online, envir=.GlobalEnv) +assign("figure_format", figure_format, envir=.GlobalEnv) +assign("add_historical", add_historical, envir=.GlobalEnv) +assign("write_plotdata_csv", write_plotdata_csv, envir=.GlobalEnv) +# Clear memoise cache for get_witch when add_historical changes +if(exists("get_witch")) { + memoise::forget(get_witch) +} +# Load map_var_hist from CSV file +map_var_hist_file <- system.file("config", "map_var_hist_rice.csv", package="witchplot") +if(file.exists(map_var_hist_file)) { + map_var_hist <- data.table::fread(map_var_hist_file) + map_var_hist <- map_var_hist %>% dplyr::rowwise() %>% dplyr::mutate(conv=eval(parse(text=conv))) %>% data.table::as.data.table() +} else { + warning("map_var_hist_rice.csv not found, historical data mapping disabled") + map_var_hist <- data.table::data.table() +} +assign("map_var_hist", map_var_hist, envir=.GlobalEnv) +.initialize_witchplot_session() +if(launch) shiny::runApp(appDir=system.file("gdxcompaR", "rice", package="witchplot")) +} + +#' Launch FIDELIO Model Interactive Visualization +#' +#' Loads FIDELIO model GDX result files and launches an interactive Shiny application +#' for analyzing economic impacts and input-output model results. +#' +#' @param results_dir Path(s) to results directory containing GDX files (default: "./") +#' @param restrict_files Pattern to filter GDX files (default: "results_") +#' @param exclude_files Pattern to exclude GDX files (default: "") +#' @param removepattern Pattern to remove from scenario names (default: "") +#' @param deploy_online Logical, whether to deploy online (default: FALSE) +#' @param figure_format Output format for figures (default: "png") +#' @param add_historical Logical, add historical data where available (default: TRUE) +#' @param write_plotdata_csv Logical, save plot data as CSV (default: FALSE) +#' @param launch Logical, launch Shiny app immediately (default: TRUE) +#' @param ... Additional options passed to session configuration +#' +#' @return Invisibly returns NULL. Launches Shiny application if launch=TRUE. +#' +#' @examples +#' \dontrun{ +#' run_fidelio() +#' run_fidelio(results_dir = "results") +#' } +#' +#' @export +run_fidelio <- function(results_dir="./", restrict_files="results_", exclude_files="", removepattern="results_", + add_historical=TRUE, deploy_online=FALSE, figure_format="png", write_plotdata_csv=FALSE, + launch=TRUE, ...) { +# Clean up any global variables from previous sessions +.cleanup_witchplot_globals() +if(!is.vector(results_dir)) results_dir <- c(results_dir) +# Normalize results_dir to avoid double slashes in file paths +results_dir <- normalizePath(results_dir, winslash="/", mustWork=FALSE) +# Set all options +opts <- list(results_dir=results_dir, restrict_files=restrict_files, exclude_files=exclude_files, removepattern=removepattern, deploy_online=deploy_online, figure_format=figure_format, add_historical=add_historical, write_plotdata_csv=write_plotdata_csv, ...) +options(opts) +assign("results_dir", results_dir, envir=.GlobalEnv) +assign("restrict_files", restrict_files, envir=.GlobalEnv) +assign("exclude_files", exclude_files, envir=.GlobalEnv) +assign("removepattern", removepattern, envir=.GlobalEnv) +assign("deploy_online", deploy_online, envir=.GlobalEnv) +assign("figure_format", figure_format, envir=.GlobalEnv) +assign("add_historical", add_historical, envir=.GlobalEnv) +assign("write_plotdata_csv", write_plotdata_csv, envir=.GlobalEnv) +# Clear memoise cache for get_witch when add_historical changes +if(exists("get_witch")) { + memoise::forget(get_witch) +} +.initialize_witchplot_session() +if(launch) shiny::runApp(appDir=system.file("gdxcompaR", "fidelio", package="witchplot")) +} + +#' Launch IIASA Database Comparison Viewer +#' +#' Loads IAM scenario data in IAMC format (CSV/XLSX files or IIASA database connection) +#' and launches an interactive Shiny application for comparing scenarios. +#' +#' By default (iamc_filename=NULL), automatically discovers and loads all CSV and XLSX files +#' in the results_dir. Files are combined into a single dataset for comparison. +#' Supports multiple directories - pass as a vector to load and compare across directories. +#' +#' @param results_dir Path(s) to director(ies) containing IAMC format files. Can be a vector for multiple directories (default: "./") +#' @param reg_id Regional aggregation(s) to display, e.g., c("witch20", "global") (default: c("witch20", "global")) +#' @param iamc_filename Specific IAMC file to load (CSV, XLSX, or CSV.ZIP). If NULL, loads all CSV/XLSX files in results_dir (default: NULL) +#' @param iamc_databasename Name of IIASA database to connect to (e.g., "ENGAGE"). Alternative to iamc_filename (default: NULL) +#' @param restrict_files Pattern to restrict which files are loaded (default: "" = load all). Only files matching this pattern are included. +#' @param exclude_files Pattern to exclude files from loading (default: "" = exclude none). Files matching this pattern are skipped. +#' @param year0 Base year for the model (default: 2005) +#' @param tstep Time step in years (default: 5) +#' @param deploy_online Logical, whether to deploy online (default: FALSE) +#' @param figure_format Output format for figures (default: "png") +#' @param add_historical Logical, add historical data where available (default: TRUE) +#' @param write_plotdata_csv Logical, save plot data as CSV (default: FALSE) +#' @param map_var_hist Data frame mapping IAMC variables to historical data sources. If NULL, uses default mapping. +#' @param launch Logical, launch Shiny app immediately (default: TRUE) +#' @param ... Additional options passed to session configuration +#' +#' @return Invisibly returns NULL. Launches Shiny application if launch=TRUE. +#' +#' @examples +#' \dontrun{ +#' # Auto-load all CSV/XLSX files in current directory +#' run_iiasadb() +#' +#' # Load specific file +#' run_iiasadb(iamc_filename = "scenarios.csv") +#' +#' # Load from custom directory +#' run_iiasadb(results_dir = "EIEE-MIP") +#' +#' # Compare across multiple directories +#' run_iiasadb(results_dir = c("results_v1", "results_v2")) +#' +#' # Connect to IIASA database +#' run_iiasadb(iamc_databasename = "ENGAGE") +#' +#' # Load only files matching a pattern +#' run_iiasadb(restrict_files = "SPARCCLE") +#' +#' # Exclude files matching a pattern +#' run_iiasadb(exclude_files = "template") +#' } +#' +#' @export +run_iiasadb <- function(results_dir="./", reg_id=c("r5"), iamc_filename=NULL, iamc_databasename=NULL, + restrict_files="", exclude_files="", + add_historical=TRUE, deploy_online=FALSE, figure_format="png", write_plotdata_csv=FALSE, + launch=TRUE, ...) { +# Clean up any global variables from previous sessions +.cleanup_witchplot_globals() +if(!is.vector(results_dir)) results_dir <- c(results_dir) +# Normalize results_dir to avoid double slashes in file paths +results_dir <- normalizePath(results_dir, winslash="/", mustWork=FALSE) +# Set all options +opts <- list(results_dir=results_dir, reg_id=reg_id, restrict_files=restrict_files, exclude_files=exclude_files, deploy_online=deploy_online, figure_format=figure_format, add_historical=add_historical, write_plotdata_csv=write_plotdata_csv, ...) +options(opts) +assign("restrict_files", restrict_files, envir=.GlobalEnv) +assign("exclude_files", exclude_files, envir=.GlobalEnv) +assign("results_dir", results_dir, envir=.GlobalEnv) +assign("reg_id", reg_id, envir=.GlobalEnv) +assign("deploy_online", deploy_online, envir=.GlobalEnv) +assign("figure_format", figure_format, envir=.GlobalEnv) +assign("add_historical", add_historical, envir=.GlobalEnv) +assign("write_plotdata_csv", write_plotdata_csv, envir=.GlobalEnv) +# Clear memoise cache for get_witch when add_historical changes +if(exists("get_witch")) { + memoise::forget(get_witch) +} +if(!is.null(iamc_filename)) assign("iamc_filename", iamc_filename, envir=.GlobalEnv) +if(!is.null(iamc_databasename)) assign("iamc_databasename", iamc_databasename, envir=.GlobalEnv) +# Load map_var_hist from CSV file +map_var_hist_file <- system.file("config", "map_var_hist_iiasadb.csv", package="witchplot") +if(file.exists(map_var_hist_file)) { + map_var_hist <- data.table::fread(map_var_hist_file) + map_var_hist <- map_var_hist %>% dplyr::rowwise() %>% dplyr::mutate(conv=eval(parse(text=conv))) %>% data.table::as.data.table() +} else { + warning("map_var_hist_iiasadb.csv not found, historical data mapping disabled") + map_var_hist <- data.table::data.table() +} +assign("map_var_hist", map_var_hist, envir=.GlobalEnv) +# IIASADB doesn't use GDX files, so don't initialize GDX session +if(!is.null(iamc_databasename)) { +# Try to find snapshot in results_dir first, then fall back to package +snapshot_file <- NULL +if(exists("results_dir") && length(results_dir) > 0) { + results_snapshot <- file.path(results_dir[1], "iiasadb_snapshot.Rdata") + if(file.exists(results_snapshot)) { + snapshot_file <- results_snapshot + } +} +# Fall back to package location if not found in results_dir +if(is.null(snapshot_file)) { + pkg_snapshot <- system.file("gdxcompaR", "iiasadb", "iiasadb_snapshot.Rdata", package="witchplot") + if(file.exists(pkg_snapshot)) { + snapshot_file <- pkg_snapshot + } +} + +load_from_db <- TRUE +snapshot_loaded_from_file <- FALSE +if(!is.null(snapshot_file)) { +input <- menu(c("Yes", "No"), title="There is a snapshot available. Do you want to load it?") +if(input==1) { + load(snapshot_file, envir=.GlobalEnv) + message("Loaded snapshot from: ", snapshot_file) + load_from_db <- FALSE + snapshot_loaded_from_file <- TRUE +} +} +if(load_from_db) { + message("Fetching data from IIASA database: ", iamc_databasename) + iiasadb_snapshot <- download_iiasadb(database=iamc_databasename, varlist="*", region="World", modlist="*", scenlist="*", add_metadata=FALSE) + names(iiasadb_snapshot) <- toupper(names(iiasadb_snapshot)) + iiasadb_snapshot <- iiasadb_snapshot %>% dplyr::select(MODEL, SCENARIO, REGION, VARIABLE, UNIT, YEAR, VALUE) %>% dplyr::rename(value=VALUE) %>% dplyr::filter(!is.na(value)) + assign("iiasadb_snapshot", iiasadb_snapshot, envir=.GlobalEnv) +} +} else { +# Load files from all directories +file_list <- list() +total_files <- 0 + +for(results_path in results_dir) { + pathdir_label <- basename(results_path) + message("\nLoading from directory: ", results_path) + + # If iamc_filename is NULL, automatically find and combine all CSV and XLSX files + if(is.null(iamc_filename)) { + csv_files <- list.files(results_path, pattern="\\.csv$", full.names=FALSE, ignore.case=TRUE) + csv_files <- csv_files[!stringr::str_detect(csv_files, "\\.zip$")] # Exclude .csv.zip files + xlsx_files <- list.files(results_path, pattern="\\.xlsx$", full.names=FALSE, ignore.case=TRUE) + csvzip_files <- list.files(results_path, pattern="\\.csv\\.zip$", full.names=FALSE, ignore.case=TRUE) + all_files <- c(csv_files, xlsx_files, csvzip_files) + + # Exclude Excel/Office lock files (e.g. ~$filename.xlsx) + all_files <- all_files[!stringr::str_detect(all_files, "^~\\$")] + + # Apply restrict_files filter (keep only files matching pattern) + if(restrict_files != "") { + all_files <- all_files[stringr::str_detect(all_files, restrict_files)] + } + # Apply exclude_files filter (remove files matching pattern) + if(exclude_files != "") { + all_files <- all_files[!stringr::str_detect(all_files, exclude_files)] + } + + if(length(all_files)==0) { + warning("No CSV or XLSX files found in: ", results_path) + next + } + + message("Found ", length(all_files), " file(s): ", paste(all_files, collapse=", ")) + + # Load and combine all files from this directory + for(fname in all_files) { + message(" Loading: ", fname) + + # Try to load file with error handling + file_data <- tryCatch({ + if(stringr::str_detect(fname, "\\.xlsx$")) { + openxlsx::read.xlsx(file.path(results_path, fname), sheet=1) + } else if(stringr::str_detect(fname, "\\.csv\\.zip$")) { + data.table::fread(cmd=paste0('unzip -cq "', file.path(results_path, fname), '" ', gsub(".zip", "", basename(fname))), header=TRUE, quote="\"", sep=",", check.names=FALSE, fill=TRUE) + } else { + data.table::fread(file.path(results_path, fname), header=TRUE, quote="\"", sep=",", check.names=FALSE, fill=TRUE) + } + }, error = function(e) { + warning(" Failed to load ", fname, ": ", e$message, ". Skipping this file.") + return(NULL) + }) + + # Skip if file failed to load + if(is.null(file_data)) next + + # Check if file has required IAMC columns + names(file_data) <- toupper(names(file_data)) + required_cols <- c("MODEL", "SCENARIO", "REGION", "VARIABLE", "UNIT") + if(!all(required_cols %in% names(file_data))) { + warning(" ", fname, " does not appear to be IAMC format (missing required columns). Skipping.") + next + } + + # Add pathdir column if multiple directories + if(length(results_dir) > 1) { + file_data$PATHDIR <- pathdir_label + } + + # Use unique key for file_list to avoid overwrites across directories + file_key <- paste0(pathdir_label, "___", fname) + file_list[[file_key]] <- file_data + total_files <- total_files + 1 + } + } else { + # Load specific file if iamc_filename is provided + if(!file.exists(file.path(results_path, iamc_filename))) { + warning("File not found: ", file.path(results_path, iamc_filename)) + next + } + + message(" Loading: ", iamc_filename) + if(stringr::str_detect(iamc_filename, "\\.xlsx$")) { + file_data <- openxlsx::read.xlsx(file.path(results_path, iamc_filename), sheet=1) + names(file_data) <- toupper(names(file_data)) + } else if(stringr::str_detect(iamc_filename, "\\.csv\\.zip$")) { + file_data <- data.table::fread(cmd=paste0('unzip -cq "', file.path(results_path, iamc_filename), '" ', gsub(".zip", "", basename(iamc_filename))), header=TRUE, quote="\"", sep=",", check.names=FALSE) + names(file_data) <- toupper(names(file_data)) + } else if(stringr::str_detect(iamc_filename, "\\.csv$") && !stringr::str_detect(iamc_filename, "\\.csv\\.zip$")) { + file_data <- data.table::fread(file.path(results_path, iamc_filename), header=TRUE, quote="\"", sep=",", check.names=FALSE) + names(file_data) <- toupper(names(file_data)) + } + + # Add pathdir column if multiple directories + if(length(results_dir) > 1) { + file_data$PATHDIR <- pathdir_label + } + + file_key <- paste0(pathdir_label, "___", iamc_filename) + file_list[[file_key]] <- file_data + total_files <- total_files + 1 + } +} + +if(length(file_list)==0) stop("No IAMC files found in any of the specified directories") + +iiasadb_snapshot <- data.table::rbindlist(file_list, fill=TRUE) +message("\nCombined ", total_files, " file(s) from ", length(results_dir), " director(ies) with ", nrow(iiasadb_snapshot), " total rows") + +# Convert year columns to numeric and pivot longer +iiasadb_snapshot <- iiasadb_snapshot %>% dplyr::mutate(dplyr::across(matches("^\\d{4}$"), ~suppressWarnings(as.numeric(.x)))) + +# Determine which columns to keep (not year columns) +if(length(results_dir) > 1) { + non_year_cols <- c("MODEL", "SCENARIO", "REGION", "VARIABLE", "UNIT", "PATHDIR") +} else { + non_year_cols <- c("MODEL", "SCENARIO", "REGION", "VARIABLE", "UNIT") +} + +iiasadb_snapshot <- iiasadb_snapshot %>% + tidyr::pivot_longer(cols=-dplyr::all_of(non_year_cols), names_to="YEAR") %>% + dplyr::mutate(YEAR=as.integer(YEAR)) %>% + as.data.frame() + +assign("iiasadb_snapshot", iiasadb_snapshot, envir=.GlobalEnv) +} +iiasadb_snapshot <- iiasadb_snapshot %>% dplyr::mutate(REGION=toupper(REGION)) +if(!exists("iiasadb_snapshot")) stop("Please check you specified a correct iiasadb file or connection.") + +# Also assign to iiasadb_data for use with get_iiasadb() function +assign("iiasadb_data", iiasadb_snapshot, envir=.GlobalEnv) + +# Pre-load historical data if add_historical is enabled +if(add_historical) { + iiasadb_with_historical <- list() + for(varname in map_var_hist$varname_model) { + if(nrow(iiasadb_snapshot %>% dplyr::filter(VARIABLE==varname))>0) { + iiasadb_with_historical[[varname]] <- add_historical_values(iiasadb_snapshot %>% dplyr::filter(VARIABLE==varname), varname=varname, iiasadb=TRUE, verbose=FALSE) + } + } + iiasadb_historical <- data.table::rbindlist(iiasadb_with_historical) %>% dplyr::filter(stringr::str_detect(SCENARIO, "historical")) %>% as.data.frame() +} else { + # Create empty historical data frame + iiasadb_historical <- data.frame() +} + +assign("iiasadb_snapshot", iiasadb_snapshot, envir=.GlobalEnv) +assign("iiasadb_historical", iiasadb_historical, envir=.GlobalEnv) + +# Save the snapshot only if we fetched new data (not if we loaded from existing snapshot) +# For iamc_databasename: only save if we downloaded from DB +# For iamc_filename/files: always save since we loaded from files +should_save <- (!exists("snapshot_loaded_from_file") || !snapshot_loaded_from_file) + +if(should_save) { + save_path <- NULL + if(exists("results_dir") && length(results_dir) > 0) { + # Save to results_dir + save_path <- file.path(results_dir[1], "iiasadb_snapshot.Rdata") + save(iiasadb_snapshot, iiasadb_historical, file=save_path) + message("Saved snapshot to: ", save_path) + } else { + # Fall back to package location if results_dir doesn't exist + pkg_save_path <- system.file("gdxcompaR", "iiasadb", "iiasadb_snapshot.Rdata", package="witchplot") + if(pkg_save_path != "" && dir.exists(dirname(pkg_save_path))) { + save_path <- pkg_save_path + save(iiasadb_snapshot, iiasadb_historical, file=save_path) + message("Saved snapshot to: ", save_path) + } else { + # Try inst/ directory if package location doesn't work + inst_path <- file.path("inst", "gdxcompaR", "iiasadb", "iiasadb_snapshot.Rdata") + if(dir.exists(dirname(inst_path))) { + save_path <- inst_path + save(iiasadb_snapshot, iiasadb_historical, file=save_path) + message("Saved snapshot to: ", save_path) + } + } + } +} +if(launch) shiny::runApp(appDir=system.file("gdxcompaR", "iiasadb", package="witchplot")) +} diff --git a/README.md b/README.md index 94a9969..ee3c9bb 100644 --- a/README.md +++ b/README.md @@ -1,42 +1,325 @@ -witch-plot - A library of (static and dynamic) plotting tools for the WITCH model and DICE/RICE model series. +# witchplot -Started in 2014 by Johannes Emmerling to have quick and reproducible sets of graphs for diagnostics, inspection, and publication. +Interactive Visualization Toolkit for GAMS IAM Model Results (WITCH, RICE, DICE, FIDELIO, ...) with the aim to make model results easily accessible, able to validate, and comparable with an open source tool, see, e.g., https://doi.org/10.12688/openreseurope.18824.2. -## Requirements + -1) **Installation:** Install R, Rtools, RStudio, GAMS, Github Desktop, (and optionally VSCode as advanced editor) +## Installation -* R from https://cran.r-project.org/bin/windows/base/ -* Rtools from https://cran.r-project.org/bin/windows/Rtools/ -* RStudio from https://rstudio.com/products/rstudio/download/#download -* GAMS from https://www.gams.com/download/ (Run the installer in advanced mode and mark the check-box `Add GAMS directory to PATH environment variable`). -* GitHub Desktop from https://desktop.github.com/ and log-in with your personal GitHub account. -* VisualStudio Code from https://code.visualstudio.com/ (optional) + -## Installation and running the program +Install directly from GitHub using devtools: -Get the source code either cloning it in Github desktop (preferred), download it from https://github.com/witch-team/witch-plot, or using git at the command line. + + +```r + +# Install devtools if not already installed + +if (!require("devtools")) install.packages("devtools") + + + +# Install witchplot from GitHub + +devtools::install_github("witch-team/witchplot") + + + +# Load the package + +library(witchplot) + +``` + + + +Alternatively, install from a local source: + + + +```r + +install.packages("path/to/witchplot", repos=NULL, type="source") + +library(witchplot) + +``` + + + +## Usage + + By default, running run_{modelname} loads all the files and functions, and launches a shiny app to analyze and visualize the results. By using the argument launch=FALSE only data is loaded and you can develop your own code using the existing functions (see below). + +### WITCH Model + +```r + +library(witchplot) + +run_witch() # Uses defaults: results_dir="./" + + + +# Or specify custom paths: + +run_witch( +results_dir=c("results", "results_v2"), +) -To run the program, open the folder "witch-plot" in Rstudio as a project or execute on the command line -```Shell -Rscript plotgdx_[witch|rice].R ``` -The script will automatically search all results_*.gdx files in the specified folder in the second line of the script and include all in the analysis and plots. + + +### RICE50+ Model + +```r + +run_rice() # Uses defaults in current folder + + + +# Or specify custom paths: + +run_rice( + +results_dir="results", +) + +``` + + + +### FIDELIO Model + +```r + +run_fidelio() # Uses defaults + +``` + + + +### IIASADB (for IAMC format model results data + +```r + +# REads all CSV and XLSX files from results directory (default behavior) + +run_iiasadb() # Automatically finds and combines all .csv, .xlsx, .csv.zip files + + + +# Or load a specific file + +run_iiasadb(iamc_filename="data.csv") + + +# Or connect to IIASA database directly + +run_iiasadb(iamc_databasename="IIASA-database-name") + +``` + + + +## Options (with defaults) + + + +```r + +options( + +deploy_online=FALSE, # Save graphs if not deployed online + +figure_format="png", # Output format + +add_historical=TRUE, # Add historical data + +yearmin=1980, # Minimum year for plots + +yearmax=2100, # Maximum year for plots + +write_plotdata_csv=FALSE # Save plot data as CSV + +) + +``` + + + +## Parameters + + + +- **results_dir**: Path(s) to results directories (default: "./", can be vector for multiple dirs) + +- **restrict_files**: File pattern filter (default: "results_") + +- **exclude_files**: Files to exclude + +- **launch**: Whether to launch Shiny app (default: TRUE). Set to FALSE to load data without launching UI + +- **All options**: Can be passed as function parameters + + + +## Available Functions + + + +### Main Application Launchers + +- **run_witch()** - Launch WITCH model interactive visualization app + +- **run_rice()** - Launch RICE50+ model interactive visualization app + +- **run_fidelio()** - Launch FIDELIO model interactive visualization app + +- **run_iiasadb()** - Launch IIASA database comparison app + + + +### Data Loading & Processing + +- **get_witch()** - Load WITCH model variables from GDX files + +- **get_iiasadb()** - Retrieve data from IIASA scenario databases + +- **add_historical_values()** - Add historical data to model projections + +- **write_witch_data_csv()** - Export WITCH variables to CSV + + + +### Plotting Functions + + + +#### Energy Plots + +- **Primary_Energy_Mix()** - Primary energy mix over time (area/bar plots) + +- **Electricity_Mix()** - Electricity generation mix visualization + +- **Energy_Trade()** - Fuel trade flows between regions + +- **Investment_Plot()** - Energy investment trajectories + +- **Power_capacity()** - Power generation capacity evolution + + + +#### Emission Plots + +- **Plot_Global_Emissions()** - Global emission trajectories + +- **Global_Emissions_Stacked()** - Stacked emissions with carbon budget + +- **Intensity_Plot()** - Emission intensity metrics + +- **Sectoral_Emissions()** - Emissions by sector + +- **Mitigation_Sources()** - Sources of emission reductions + +- **Mitigation_Decomposition()** - Decomposition of mitigation effort + + + +#### Climate Plots + +- **climate_plot()** - Temperature and climate indicators + +- **gridded_temp_map()** - Gridded temperature change maps + + + +#### Policy & Cost Plots + +- **Policy_Cost()** - Climate policy costs (GDP loss, consumption) + +- **Policy_Cost_Decomposition()** - Decompose policy costs by component + +- **Carbon_Price()** - Carbon price trajectories + +- **Social_Cost_of_Carbon()** - Social cost of carbon by region + +- **SCC_plot()** - Social cost of carbon visualization + + + +#### Inequality Plots + +- **plot_inequality()** - Income/consumption inequality (Lorenz, Gini, quantiles) + +- **plot_winners_losers_time()** - Winners and losers from policy over time + +- **compute_global_inequality()** - Global inequality metrics + + + +#### Maps + +- **witchmap()** - Regional maps of model variables + +- **countrymap()** - Country-level map visualization + +- **map_simple()** - Simple regional mapping + +- **plot_map_region_definition()** - Show regional aggregation definitions + + + +#### RICE50+ Specific + +- **plot_macc_fit()** - Marginal abatement cost curve fitting + + + +### Utility Functions + +- **plot_witch()** - Generic plotting function for WITCH data + +- **get_plot_witch()** - Load and plot WITCH variables + +- **create_witch_plot_online()** - Generate online plot collections + +- **diagnostics_plots()** - Model diagnostics and calibration checks + + + +### Helper Functions + +- **ttoyear()** - Convert time index to year + +- **yeartot()** - Convert year to time index + +- **saveplot()** - Save plots with consistent formatting + +- **add_change_from_reference()** - Calculate change from reference scenario + +- **make_global_tr()** - Aggregate to global totals + +- **make_cumulative()** - Calculate cumulative values over time + +- **unit_conversion()** - Convert units with metadata -## Main Functions +- **default_meta_param()** - Get default regional aggregation metadata -### get_witch("varname") + -Loads the variable "varname" from all results GDX files and stores them in a data.frame names "varname". +## Authors -### gdxcompaR -ShinyApp based dynamic comparison tool for multiple results files. -Simply run 'runApp(appDir = "gdxcompaR/witch")' or runApp(appDir = "gdxcompaR/rice") to launch the interactive App after sourcing the main file 'plotgdx_witch.R' or 'plotgdx_rice.R'. + +Copyright (c) 2025 Johannes Emmerling and WITCH Team + -## Further Information +## License -For further information please contact johannes.emmerling@eiee.org + +Apache License \ No newline at end of file diff --git a/data-raw/README.md b/data-raw/README.md new file mode 100644 index 0000000..7ab2471 --- /dev/null +++ b/data-raw/README.md @@ -0,0 +1,103 @@ +# Data-Raw Directory + +This directory contains source data files and scripts for generating package data. + +## Organization + +``` +data-raw/ +├── data_historical_values.gdx # Source file (NOT in git, NOT released) +├── generate_historical_data.R # Script to process source file +└── README.md # This file +``` + +## Source File: `data_historical_values.gdx` + +**Location**: `data-raw/data_historical_values.gdx` + +**Status**: +- ❌ Not tracked in git (see `.gitignore`) +- ❌ Not released with package +- ✅ Contains full historical data (all regions, all variables) + +**Purpose**: Master source file with complete historical data that gets processed into region-specific files. + +## Processing Script: `generate_historical_data.R` + +**Purpose**: Converts the source GDX file into region-specific files for package distribution. + +**What it does**: +1. Reads `data-raw/data_historical_values.gdx` +2. Converts to multiple region mappings (witch17, witch20, ed58, etc.) +3. Saves processed files to `data/` directory: + - `data/data_historical_values_witch17.gdx` + - `data/data_historical_values_witch20.gdx` + - `data/data_historical_values_ed58.gdx` + - etc. +4. Extracts set dependencies and saves to: + - `data/historical_data_set_dependencies.csv` + - `data/historical_data_set_dependencies_summary.csv` + +**Usage**: +```r +# From package root directory: +source("data-raw/generate_historical_data.R") +``` + +## Generated Files in `data/` Directory + +These files ARE released with the package: + +- `data_historical_values_{reg_id}.gdx` - Region-specific historical data +- `historical_data_set_dependencies.csv` - Full set dependency details +- `historical_data_set_dependencies_summary.csv` - Used by package functions + +## Workflow + +### Initial Setup +1. Place source file: `data-raw/data_historical_values.gdx` +2. Run: `source("data-raw/generate_historical_data.R")` +3. Commit generated files in `data/` to git +4. DO NOT commit source file (automatically ignored) + +### Updating Historical Data +1. Update source file: `data-raw/data_historical_values.gdx` +2. Run: `source("data-raw/generate_historical_data.R")` +3. Review changes in `data/` directory +4. Commit updated files in `data/` + +### Package Build +The `data/` directory files are automatically included in the package build. +The `data-raw/` source file is excluded. + +## Why This Organization? + +**Separation of concerns**: +- `data-raw/` = Private source data (large, complete, not released) +- `data/` = Processed data (optimized, regional, released with package) + +**Benefits**: +- Smaller package size (only necessary regions) +- User privacy (full data not distributed) +- Reproducibility (script documents processing) +- Flexibility (easy to add new regions) + +## Git Tracking + +``` +data-raw/ +├── data_historical_values.gdx # ❌ NOT in git (.gitignore) +├── generate_historical_data.R # ✅ IN git +└── README.md # ✅ IN git + +data/ +├── data_historical_values_*.gdx # ✅ IN git (processed files) +└── historical_data_set_dependencies*.csv # ✅ IN git +``` + +## Notes + +- The source file must be manually provided/updated +- Region mappings processed are defined in `generate_historical_data.R` +- Set dependencies are automatically extracted during processing +- Processed files are ready for package distribution diff --git a/data-raw/generate_historical_data.R b/data-raw/generate_historical_data.R new file mode 100644 index 0000000..c52e503 --- /dev/null +++ b/data-raw/generate_historical_data.R @@ -0,0 +1,255 @@ +# Generate historical data files for each region mapping +# This script creates data_historical_values_{reg_id}.gdx files +# for each region mapping available in witchtools + +library(witchtools) +library(gdxtools) +library(data.table) + +# Get region and time mappings from witchtools +region_mappings <- witchtools::region_mappings +time_mappings <- witchtools::time_mappings + +# Only process a subset of region mappings +# These are the most commonly used mappings for WITCH models and IIASA databases +mappings_to_process <- c( + # WITCH standard mappings + "witch17", + # RICE mappings + "maxiso3", "ed58", + # IIASA/generic mappings + "global", "r5" +) + +cat("Will process", length(mappings_to_process), "region mappings:\n") +cat(paste(mappings_to_process, collapse = ", "), "\n\n") + +# Determine the correct path to the source GDX file +if (file.exists("data-raw/data_historical_values.gdx")) { + source_gdx <- normalizePath("data-raw/data_historical_values.gdx", winslash = "/") + data_dir <- "data" +} else { + stop("Source GDX file not found.\n", + "Please place the source file at: data-raw/data_historical_values.gdx\n", + "This file should contain the full (unreleased) historical data.") +} + +cat("Using source file:", source_gdx, "\n") +cat("Output directory:", data_dir, "\n\n") + +# Create data directory if it doesn't exist +if (!dir.exists(data_dir)) { + dir.create(data_dir, recursive = TRUE) +} + +# Load the source GDX file once +cat("Loading source GDX file...\n") +source_gdx_obj <- gdx(source_gdx) + +# Manual conversion function +convert_historical_data <- function(source_gdx_obj, reg_id, region_mapping) { + cat(" Converting to", reg_id, "regions...\n") + + converted_params <- list() + + # Process each parameter + for (param_name in source_gdx_obj$parameters$name) { + param_data <- gdxtools::extract(source_gdx_obj, param_name) + + # Identify the region column (iso3 or any region mapping name) + col_names <- names(param_data)[names(param_data) != "value"] + region_col <- NULL + + if ("iso3" %in% col_names) { + region_col <- "iso3" + } else { + # Check if any column matches a region mapping name + for (col in col_names) { + if (col %in% names(region_mappings)) { + region_col <- col + break + } + } + } + + if (!is.null(region_col)) { + # This parameter has a region dimension - aggregate it + param_data <- as.data.table(param_data) + + # Uppercase the region column to match mapping (iso3 codes are uppercase) + if (region_col == "iso3") { + param_data[[region_col]] <- toupper(param_data[[region_col]]) + } + + # Get the appropriate mapping + if (region_col == "iso3") { + # Map from iso3 to target region + mapping <- region_mapping + setnames(mapping, c("target_region", "source_region")) + param_data <- merge(param_data, mapping, + by.x = region_col, by.y = "source_region", + all.x = TRUE, allow.cartesian = TRUE) + + # Remove rows where mapping failed + param_data <- param_data[!is.na(target_region)] + + # Remove the original region column + param_data[[region_col]] <- NULL + + # Aggregate by target region and other dimensions + group_cols <- c("target_region", setdiff(col_names, region_col)) + param_data <- param_data[, .(value = sum(value, na.rm = TRUE)), by = group_cols] + + # Rename target_region to 'n' and preserve original column order + # Original columns had region_col at a specific position, we need to put 'n' there + setnames(param_data, "target_region", "n") + + # Reorder columns to match original order (with 'n' replacing region_col position) + # Original order was: col_names (which includes region_col) + "value" + # New order should be: col_names with region_col replaced by 'n' + "value" + original_order <- col_names + original_order[original_order == region_col] <- "n" + param_data <- param_data[, c(original_order, "value"), with = FALSE] + } else { + # Already in a region mapping format, just rename to 'n' + # Preserve column order + old_names <- names(param_data) + setnames(param_data, region_col, "n") + # Ensure value is last + new_order <- setdiff(names(param_data), "value") + param_data <- param_data[, c(new_order, "value"), with = FALSE] + } + } else { + # No region dimension, keep as-is (e.g., global parameters) + param_data <- as.data.table(param_data) + } + + # Handle V1 column that might actually be year + if ("V1" %in% names(param_data) && !("year" %in% names(param_data))) { + # Check if V1 contains year-like values (4-digit numbers >= 1900) + v1_vals <- param_data[[1]] # V1 is first column + v1_numeric <- suppressWarnings(as.numeric(as.character(v1_vals))) + if (!all(is.na(v1_numeric)) && all(v1_numeric[!is.na(v1_numeric)] >= 1900 & v1_numeric[!is.na(v1_numeric)] <= 2200)) { + # V1 contains years, rename it + setnames(param_data, "V1", "year") + } + } + + # Ensure year column is numeric if it exists + if ("year" %in% names(param_data)) { + param_data[, year := as.numeric(as.character(year))] + } + + # Store the converted parameter + converted_params[[param_name]] <- param_data + } + + return(converted_params) +} + +# Loop through selected region mappings and create converted files +cat("\nGenerating historical data files for selected region mappings...\n\n") + +for (.reg_id in mappings_to_process) { + # Check if this mapping exists in witchtools + if (!(.reg_id %in% names(region_mappings))) { + warning("Region mapping '", .reg_id, "' not found in witchtools. Skipping.") + next + } + + cat("Processing region mapping:", .reg_id, "\n") + + tryCatch({ + # Get the region mapping + region_mapping <- as.data.table(region_mappings[[.reg_id]]) + + # The mapping has two columns: target (reg_id name) and source (iso3) + # Rename columns for clarity + col1 <- names(region_mapping)[1] # target region (e.g., "witch17", "global") + col2 <- names(region_mapping)[2] # source region (usually "iso3") + + # Convert historical data + converted_params <- convert_historical_data(source_gdx_obj, .reg_id, region_mapping) + + # Write to GDX file + target_file <- file.path(data_dir, paste0("data_historical_values_", .reg_id, ".gdx")) + cat(" Writing GDX file...\n") + gdxtools::write.gdx(target_file, params = converted_params) + + cat(" Created:", target_file, "\n") + + }, error = function(e) { + warning("Failed to process region mapping '", .reg_id, "': ", e$message) + }) +} + +cat("\nHistorical data generation complete!\n") +cat("Generated files:\n") +print(list.files(data_dir, pattern = "^data_historical_values_.*\\.gdx$")) + +# Extract set dependencies from the ORIGINAL source GDX file +cat("\nExtracting set dependencies from original historical data file...\n") + +# Read the original source file to get true set dependencies +cat("Reading:", source_gdx, "\n") + +# Get all region mapping names to replace with 'n' +region_set_names <- names(region_mappings) + +# Create a named list where each element is a parameter +# and contains a vector of set names with regions replaced by 'n' +hist_set_deps <- list() + +# Get all parameters from the GDX file +params <- source_gdx_obj$parameters + +if (nrow(params) > 0) { + for (i in 1:nrow(params)) { + param_name <- params$name[i] + param_dim <- params$dim[i] + + if (param_dim > 0) { + # Get the actual data to see column names + param_data <- tryCatch({ + gdxtools::extract(source_gdx_obj, param_name) + }, error = function(e) { + NULL + }) + + if (!is.null(param_data) && ncol(param_data) > 1) { + # Get column names (excluding 'value') + col_names <- names(param_data) + col_names <- col_names[col_names != "value"] + + # Replace column names to match WITCH conventions: + # - Any region mapping name or 'iso3' becomes 'n' + # - Keep everything else as is (including 'year') + col_names_standardized <- sapply(col_names, function(name) { + if (name == "iso3" || name %in% region_set_names) { + return("n") + } else { + return(name) + } + }, USE.NAMES = FALSE) + + # Store as a vector + hist_set_deps[[param_name]] <- col_names_standardized + + cat(" ", param_name, ": ", paste(col_names_standardized, collapse = ", "), "\n", sep = "") + } + } + } +} + +# Save as RDS file (preserves list structure) +if (length(hist_set_deps) > 0) { + rds_file <- file.path(data_dir, "historical_data_set_dependencies.rds") + saveRDS(hist_set_deps, rds_file) + cat("\nSet dependencies saved to:", rds_file, "\n") + cat("Total parameters:", length(hist_set_deps), "\n") + + cat("\nExample set dependencies:\n") + print(head(hist_set_deps, 10)) +} else { + warning("No set dependencies extracted!") +} diff --git a/data/data_historical_values_ed58.gdx b/data/data_historical_values_ed58.gdx new file mode 100644 index 0000000..9a72261 Binary files /dev/null and b/data/data_historical_values_ed58.gdx differ diff --git a/data/data_historical_values_global.gdx b/data/data_historical_values_global.gdx new file mode 100644 index 0000000..38a089c Binary files /dev/null and b/data/data_historical_values_global.gdx differ diff --git a/data/data_historical_values_r5.gdx b/data/data_historical_values_r5.gdx new file mode 100644 index 0000000..5cf1e03 Binary files /dev/null and b/data/data_historical_values_r5.gdx differ diff --git a/data/data_historical_values_witch17.gdx b/data/data_historical_values_witch17.gdx new file mode 100644 index 0000000..d61b937 Binary files /dev/null and b/data/data_historical_values_witch17.gdx differ diff --git a/data/historical_data_set_dependencies.rds b/data/historical_data_set_dependencies.rds new file mode 100644 index 0000000..c130e39 Binary files /dev/null and b/data/historical_data_set_dependencies.rds differ diff --git a/examples_witch.R b/examples_witch.R deleted file mode 100644 index ce75040..0000000 --- a/examples_witch.R +++ /dev/null @@ -1,113 +0,0 @@ - - -MCOST_INV <- get_witch("MCOST_INV", check_calibration = T) -ggplot(MCOST_INV %>% filter(jreal %in% c("elpv", "elcsp", "elwindon", "elwindoff") & t<=10 & n=="usa")) + geom_line(aes(ttoyear(t), value*1e3, color=file)) +facet_grid(pathdir ~ jreal, ncol=1) + xlab("") + ylab("Capital cost [$/kW]") - - - -utility_cebge_global <- utility_cebge_global %>% mutate(growth_rate=as.numeric(gsub("R",3.4,str_sub(file,-1,-1))), cb=str_sub(file,-6,-4)) -ggplot(utility_cebge_global) + geom_point(aes(growth_rate, value, color=cb)) + geom_line(aes(growth_rate, value, color=cb)) + scale_x_continuous(limits = c(0,10), breaks=seq(0,10,1)) + ylab("Welfare (global CEBGE)") -saveplot("CEBGE") - -#implicit carbon price in cooperative -#1e3/c2co2 -# get_witch("eqq_emi_co2ffi_c_world", field = "m") -# get_witch("eqq_y_c_world", field = "m") -# setnames(eqq_y_c_world, "value", "marg_cons") -# eqq_emi_co2ffi_c_world$marg_cons <- eqq_y_c_world$marg_cons -# eqq_emi_co2ffi_c_world$co2price <- -1e3/(44/12)*eqq_emi_co2ffi_c_world$value/eqq_emi_co2ffi_c_world$marg_cons -# ggplot(eqq_emi_co2ffi_c_world %>% filter(ttoyear(t)<=2100)) + geom_line(aes(ttoyear(t), co2price, color=file)) -#Carbon budget -Q_EMI <- get_witch("Q_EMI") -Q_EMI <- Q_EMI %>% mutate(growth_rate=as.numeric(gsub("R",3.4,str_sub(file,-1,-1))), cb=str_sub(file,-6,-4)) -ggplot(Q_EMI %>% filter(e=="co2" & t>=4 & t<=20) %>% group_by(file) %>% summarize(cb=sum(value)*5*44/12)) + geom_bar(aes(file, cb, fill=file), stat = "identity") + ylab("Carbon budget 2018-2100") -# compare all gases -ggplot(Q_EMI %>% filter(e %in% c("co2", "co2ffi", "co2lu", "ch4", "n2o") & t>=4 & t<=20) %>% group_by(cb, growth_rate, e) %>% summarize(cbact=sum(value)*5*44/12)) + geom_bar(aes(growth_rate, cbact, fill=cb), stat = "identity") + ylab("Carbon budget 2018-2102") + facet_grid(e ~ cb) + theme(legend.position = "none") -saveplot("Compare all gases") -# 2100: -ggplot(Q_EMI %>% filter(e=="co2" & t>=4 & ttoyear(t)<=2100) %>% group_by(file,cb,growth_rate) %>% summarize(cbactual=sum(value)*5*44/12)) + geom_bar(aes(growth_rate, cbactual, fill=cb), stat = "identity") + ylab("Carbon budget 2018-2003, all CO2") + facet_grid(. ~ cb) + theme(legend.position = "none") -saveplot("Actual Carbon Budget") -#CO2 over time -ggplot(Q_EMI %>% filter(e=="co2" & ttoyear(t)>=2000) %>% group_by(t, cb, growth_rate) %>% summarize(co2emi=sum(value)*44/12)) + geom_line(aes(ttoyear(t), co2emi, color=as.factor(growth_rate)), stat = "identity") + ylab("CO2 Emissions") + facet_grid(cb ~ .) + theme(legend.position = "bottom") + xlab("") -saveplot("Emission Profile") -#Carbon Price -carbonprice <- get_witch("carbonprice") -carbonprice <- carbonprice %>% mutate(growth_rate=as.numeric(gsub("R",3.4,str_sub(file,-1,-1))), cb=str_sub(file,-6,-4)) -ggplot(carbonprice %>% group_by(t, cb,growth_rate, file) %>% filter(ttoyear(t)<=2100) %>% summarize(ctax=mean(value)*1e3/(44/12))) + geom_line(aes(ttoyear(t), ctax, color=cb), stat = "identity") + ylab("Carbon price [$/tCO2eq]") + facet_grid(cb ~ growth_rate) -saveplot("Carbon price") - - - -netzeroyear <- Q_EMI %>% filter(e=="co2" & ttoyear(t)>=2015 & ttoyear(t) <=2100) %>% mutate(year=ttoyear(t)) %>% select(-t) %>% group_by(year, cb, growth_rate) %>% summarize(co2emi=sum(value)*44/12) %>% ungroup() %>% group_by(cb, growth_rate) %>% summarize(cbactual=sum(co2emi*5), net0year=min(2100,min(year[co2emi<0.1]))) -ggplot(netzeroyear) + geom_point(aes(growth_rate, net0year, color=cb)) + geom_line(aes(growth_rate, net0year, color=cb)) + xlab("") + scale_x_continuous(limits = c(0,10), breaks=seq(0,10,1)) -saveplot("Net zero year") - -netzeroyear <- merge(netzeroyear, utility_cebge_global %>% select(-file,-pathdir,-t), by = c("cb", "growth_rate"), all=T) -ggplot(netzeroyear) + geom_point(aes(net0year, value, color=cb)) + geom_line(aes(net0year, value, color=cb)) + ylab("Welfare (global CEBGE)") -saveplot("Net0year welfare") -ggplot(netzeroyear) + geom_line(aes(growth_rate, value*((cbactual-390)/as.numeric(cb))^(-2.2), color=cb)) + scale_x_continuous(limits = c(0,10), breaks=seq(0,10,1)) + ylab("Welfare (global CEBGE)") -saveplot("welfare correction") - - - - - - -diagnostics_plots() #Basic diagnostic plots - -#Main part, get data plots etc. -Plot_Global_Emissions(bauscen = "bau") -get_plot_witch("carbonprice", "Carbon Price", "na", "na", aggregation = "global_mean") -get_plot_witch("Q", "GDP", "iq", "y", aggregation = "global_sum") -get_plot_witch("Q", "GDP", "iq", "y", aggregation = "regional") -get_plot_witch("Q_EMI", "CO2_Emissions", "e", "co2", aggregation = "global_sum", cumulative = T) -get_plot_witch("TEMP", "Temperature", "m", "atm", aggregation = "global_mean") -get_plot_witch("Q_EMI", "CCS_Emissions", "e", "ccs", aggregation = "global_sum") -get_plot_witch("Q_EMI", "CCS_Emissions_Stored", "e", "ccs", aggregation = "global_sum", cumulative = T) -get_plot_witch("tpes", "tpes", "na", "na", aggregation = "global_sum") -get_plot_witch("Q_OUT", "Oil_Extraction", "f", "oil", aggregation = "regional") - - - - -#Special Plots: -#Special focus regions to report for -regions_focus <- c("World") - -Energy_Trade(fuelplot = "oil") -Primary_Energy_Mix(PES_y = "value", regions = regions_focus, years = seq(1990, 2100, 5), plot_type = "area") -Electricity_Mix(Electricity_y = "value", regions = regions_focus, years = seq(1990, 2100, 5), plot_type = "area") - -Intensity_Plot(years = c(2050,2100), regions="World", year0=2010, scenplot = scenlist) -Global_Emissions_Stacked(scenario = scenlist[1]) - -#Mitigation_Decomposition(regions=regions_focus, scenario_stringency_order = c("DIAG-Base", "DIAG-C30-gr5"), scen_short=c("Base", "C30-gr5"), plotname="Mitigation Decomposition") -Investment_Plot(regions=regions_focus) -Sectoral_Emissions(regions=regions_focus) -Policy_Cost(discount_rate=5, regions=regions_focus, bauscen = "bau", show_numbers=TRUE, tmax=10) - -#Impacts and SCC -SCC_plot(regions = "World") -#Climate plot -climate_plot() - -#Impact Map -t_map = 20; bau_scen = scenlist[1] -Q <- get_witch("Q") -impact_map_data <- Q %>% filter(iq=="y" & t==t_map) %>% group_by(n, pathdir) %>% mutate(value = -((value/sum(value[file==bau_scen]))-1)*100) %>% filter(is.finite(value)) -witchmap(impact_map_data, file_report=scenlist[2], t_report=t_map, mapcolor="Reds", map_name="Impact Map", map_legend = "GDP loss [%]") - -#Export multiple variables as time series panel dataset "witch_dataset_long.csv" -#write_witch_data_csv(c("l", "ykali"), years = seq(1960, 2100, 20)) - - - - - - - - - - - - diff --git a/gdxcompaR/fidelio/server.R b/gdxcompaR/fidelio/server.R deleted file mode 100644 index 5eb746e..0000000 --- a/gdxcompaR/fidelio/server.R +++ /dev/null @@ -1,574 +0,0 @@ -# Define server -shinyServer(function(input, output, session) { - # some global flags - verbose <- FALSE - save_plot <- FALSE - growth_rate <- FALSE - - # get list of variables and parameters in all files - list_of_variables <- NULL - for (f in filelist) { - .gdx <- gdx(paste(file.path(fullpathdir[1], f), ".gdx", sep = "")) - list_of_variables <- c(list_of_variables, all_items(.gdx)$variables) - list_of_variables <- c(list_of_variables, all_items(.gdx)$parameters) # also all parameters - } - list_of_variables <- unique(list_of_variables) - list_of_variables <- c(sort(str_subset(list_of_variables, "^[:upper:]")), sort(str_subset(list_of_variables, "^[:lower:]"))) - list_of_variables <- str_subset(list_of_variables, pattern = "_t$") #FIDELIO keep only time dependent variables - - # Scenario selector - output$select_scenarios <- renderUI({ - selectInput("scenarios_selected", "Select scenarios", unname(scenlist), size = length(scenlist), selectize = F, multiple = T, selected = unname(scenlist)) - }) - - # Variable selector - output$select_variable <- renderUI({ - selectInput("variable_selected", "Select variable", list_of_variables, size = 1, selectize = F, multiple = F, selected = list_of_variables[1]) - }) - variable_selected_reactive <- reactive({ - input$variable_selected - }) - - # Reactively update variable selector - variable_input <- reactive({ - return(input$variable_selected) - }) - - # Display selected variable and set - output$varname <- renderText({ - paste("Variable:", variable_selected_reactive(), " Element:", paste(input$additional_set_id_selected, collapse = ",")) - }) - - # Display selected variable and set - output$varname2 <- renderText({ - paste("Variable:", variable_selected_reactive(), " Element:", paste(input$additional_set_id_selected, collapse = ",")) - }) - - # REGION selector - output$select_regions <- renderUI({ - regions_for_selector <- c(witch_regions, "World") - selectInput("regions_selected", "Select regions", regions_for_selector, size = min(17, length(regions_for_selector)), selectize = F, multiple = T, selected = witch_regions) - }) - - observeEvent(input$button_saveplotdata, { - variable <- input$variable_selected - print("Current plot saved in subdirectory 'graphs'") - saveplot(variable, width = 14, height = 7) - }) - - # Additional selector for specific Panels - - - - # MAIN CODE FOR PLOT GENERATION - output$gdxcompaRplot <- renderPlot({ - assign("historical", input$add_historical, envir = .GlobalEnv) - ylim_zero <- input$ylim_zero - field_show <- input$field - growth_rate <- input$growth_rate - # plotly_dynamic <- input$plotly_dynamic - variable <- input$variable_selected - if (is.null(variable)) variable <- list_of_variables[1] - # get data - afd <- get_witch(variable, check_calibration = TRUE, field = field_show) - if (verbose) print(str_glue("Variable {variable} loaded.")) - # get the name of the additional set - #get the name of the additional set - additional_sets <- setdiff(colnames(afd), c(file_group_columns, "pathdir", "t", "n", "value")) - #extract additional set elements - if(length(additional_sets)==0){additional_set_id="na"; set_elements = "na"; additional_set_id2="na"; set_elements2 = "na"} - else if(length(additional_sets)==1) - { - additional_set_id <- additional_sets[1] - set_elements <- unique(tolower(as.data.frame(afd)[, match(additional_set_id, colnames(afd))])) - set_elements <- sort(set_elements) - additional_set_id2 <- "na" - set_elements2 <- "na" - } - else if(length(additional_sets)==2) - { - additional_set_id <- additional_sets[1] - set_elements <- unique(tolower(as.data.frame(afd)[, match(additional_set_id, colnames(afd))])) - set_elements <- sort(set_elements) - additional_set_id2 <- additional_sets[2] - set_elements2 <- unique(tolower(as.data.frame(afd)[, match(additional_set_id2, colnames(afd))])) - set_elements2 <- sort(set_elements2) - } - - #Selector for additional set - output$choose_additional_set <- renderUI({ - variable <- variable_input() - if (is.null(variable)) { - variable <- list_of_variables[1] - } - sel <- input$additional_set_id_selected - if (is.null(sel)) { - if ("co2_ffi" %in% set_elements) { - sel <- "co2_ffi" - } else { - sel <- set_elements[1] - } - } - size_elements <- min(length(set_elements), 5) - selectInput(inputId = "additional_set_id_selected", - label = "Indices 1:", - choices = set_elements, - size = size_elements, - selectize = FALSE, - multiple = TRUE, - selected = sel) - }) - #Selector for additional set #2 - output$choose_additional_set2 <- renderUI({ - variable <- variable_input() - if(is.null(variable)) variable <- list_of_variables[1] - sel2 <- input$additional_set_id_selected2 - size_elements2 <- min(length(set_elements2), 5) - selectInput(inputId = "additional_set_id_selected2", - label = "Indices 2:", - choices = set_elements2, - size = size_elements2, - selectize = FALSE, - multiple = TRUE, - selected = sel2) - }) - # get input from sliders/buttons - yearlim <- input$yearlim - additional_set_selected <- input$additional_set_id_selected - regions <- input$regions_selected - scenarios <- input$scenarios_selected - - # in case they have not yet been set, set to default values - if (is.null(regions)) regions <- display_regions - if (is.null(additional_set_selected)) additional_set_selected <- set_elements[1] - if ((additional_set_id != "na" & additional_set_selected[1] == "na") | !(additional_set_selected[1] %in% set_elements)) additional_set_selected <- set_elements[1] - - # SUBSET data and PLOT - # choose additional selected element - if (additional_set_id != "na") { - afd[[additional_set_id]] <- tolower(afd[[additional_set_id]]) # to fix erroneous gams cases (y and Y etc.) - afd <- subset(afd, get(additional_set_id) %in% additional_set_selected) - afd[[additional_set_id]] <- NULL # remove additional set column - # afd$t <- as.character(afd$t) - if (length(additional_set_selected) > 1) { - afd <- afd %>% - group_by_at(setdiff(names(afd), "value")) %>% - summarize(value = sum(value), .groups = "drop") - } - } - - # time frame - afd <- subset(afd, ttoyear(t) >= yearlim[1] & ttoyear(t) <= yearlim[2]) - # clean data - afd <- afd %>% filter(!is.na(value)) - - # Computation of World/global sum/average - # now based on meta param to guess max, mean, sum - if (nrow(afd) > 0) { - afd_global <- afd - afd_global$n <- NULL - if (variable %in% default_meta_param()$parameter) { - if (default_meta_param()[parameter == variable & type == "nagg"]$value == "sum") { - afd_global <- afd_global %>% - group_by_at(setdiff(names(afd_global), "value")) %>% - summarize(value = sum(value), .groups = "drop") - } else if (default_meta_param()[parameter == variable & type == "nagg"]$value == "mean") { - afd_global <- afd_global %>% - group_by_at(setdiff(names(afd_global), "value")) %>% - summarize(value = mean(value), .groups = "drop") - } - } else { - afd_global <- afd_global %>% - group_by_at(setdiff(names(afd_global), "value")) %>% - summarize(value = sum(value), .groups = "drop") - } - afd_global <- afd_global %>% - mutate(n = "World") %>% - as.data.frame() - afd <- rbind(afd, afd_global[, names(afd)]) - } - - # in case growth rates - if (growth_rate) { - afd <- afd %>% - group_by_at(setdiff(names(afd), c("t", "value"))) %>% - arrange(t) %>% - mutate(year = ttoyear(t), growthrate = ((value / lag(value))^(1 / (year - lag(year))) - 1) * 100) %>% - select(-year, -value) %>% - dplyr::rename(value = growthrate) %>% - mutate(value = ifelse(is.na(value), 0, value)) %>% - ungroup() - } - - #all upper case except for world - afd$n <- ifelse(afd$n == "World", "World", toupper(afd$n)) - - - # scenarios, potentially add stochastic scenarios to show - afd <- subset(afd, file %in% c(scenarios, paste0(scenarios, "(b1)"), paste0(scenarios, "(b2)"), paste0(scenarios, "(b3)")) | str_detect(file, "historical") | str_detect(file, "valid")) - - # Unit conversion - unit_conv <- unit_conversion(variable) - if (growth_rate) unit_conv$unit <- " % p.a." - unit_conv$convert <- 1 - afd$value <- afd$value * unit_conv$convert - afd$year <- ttoyear(afd$t) - - if (regions[1] == "World" | length(regions) == 1) { # if only World is displayed or only one region, show files with colors - p <- ggplot(subset(afd, n %in% regions & (!str_detect(file, "historical") & !str_detect(file, "valid"))), aes(ttoyear(t), value, colour = file)) + - geom_line(stat = "identity", linewidth = 1.5) + - xlab("year") + - ylab(unit_conv$unit) + - xlim(yearlim[1], yearlim[2]) - if (ylim_zero) p <- p + ylim(0, NA) - p <- p + geom_line(data = subset(afd, n %in% regions & str_detect(file, "historical")), aes(year, value, colour = file), stat = "identity", linewidth = 1.0, linetype = "solid") - # legends: - p <- p + theme(text = element_text(size = 16), legend.position = "bottom", legend.direction = "horizontal", legend.box = "vertical", legend.key = element_rect(colour = NA), legend.title = element_blank()) + guides(color = guide_legend(title = NULL)) - } else { - p <- ggplot(subset(afd, n %in% regions & (!str_detect(file, "historical") & !str_detect(file, "valid"))), aes(ttoyear(t), value, colour = n, linetype = file)) + - geom_line(stat = "identity", linewidth = 1.5) + - xlab("year") + - ylab(unit_conv$unit) + - scale_colour_manual(values = region_palette) + - xlim(yearlim[1], yearlim[2]) - p <- p + geom_line(data = subset(afd, n %in% regions & str_detect(file, "historical")), aes(year, value, colour = n, group = interaction(n, file)), linetype = "solid", stat = "identity", linewidth = 1.0) - # legends: - p <- p + theme(text = element_text(size = 16), legend.position = "bottom", legend.direction = "horizontal", legend.box = "vertical", legend.key = element_rect(colour = NA), legend.title = element_blank()) + guides(color = guide_legend(title = NULL, nrow = 2), linetype = guide_legend(title = NULL)) - } - if (length(fullpathdir) != 1) { - p <- p + facet_grid(. ~ pathdir) - } - if(nrow(afd)>0) print(p + labs(title = variable)) - }) - - - - # RICE50x Stacked area plot - output$gdxcompaRstackedplot <- renderPlot({ - assign("historical", input$add_historical, envir = .GlobalEnv) - ylim_zero <- input$ylim_zero - variable <- input$variable_selected - if (is.null(variable)) variable <- list_of_variables[1] - # get data - afd <- get_witch(variable, check_calibration = TRUE) - if (verbose) print(str_glue("Variable {variable} loaded.")) - # get the name of the additional set - additional_sets <- setdiff(colnames(afd), c(file_group_columns, "pathdir", "t", "n", "value")) - # extract additional set elements - if (length(additional_sets) == 0) { - additional_set_id <- "na" - set_elements <- "na" - additional_set_id2 <- "na" - set_elements2 <- "na" - } else if (length(additional_sets) == 1) { - additional_set_id <- additional_sets[1] - set_elements <- unique(tolower(as.data.frame(afd)[, match(additional_set_id, colnames(afd))])) - } else if (length(additional_sets) == 2) { - additional_set_id <- additional_sets[1] - set_elements <- unique(tolower(as.data.frame(afd)[, match(additional_set_id, colnames(afd))])) - } - - # Selector for additional set - output$choose_additional_set <- renderUI({ - variable <- variable_selected_reactive() - if (is.null(variable)) variable <- list_of_variables[1] - sel <- input$additional_set_id_selected - size_elements <- min(length(set_elements), 5) - selectInput("additional_set_id_selected", "Additional set element", set_elements, size = size_elements, selectize = F, multiple = T, selected = sel) - }) - - # get input from sliders/buttons - yearlim <- input$yearlim - additional_set_selected <- input$additional_set_id_selected - regions <- input$regions_selected - scenarios <- input$scenarios_selected - - # in case they have not yet been set, set to default values - if (is.null(regions)) regions <- display_regions - if (is.null(additional_set_selected)) additional_set_selected <- set_elements[1] - if ((additional_set_id != "na" & additional_set_selected[1] == "na") | !(additional_set_selected[1] %in% set_elements)) additional_set_selected <- set_elements[1] - - # SUBSET data and PLOT - # choose additional selected element - if (additional_set_id != "na") { - afd[[additional_set_id]] <- tolower(afd[[additional_set_id]]) # to fix erroneous gams cases (y and Y etc.) - afd <- subset(afd, get(additional_set_id) %in% additional_set_selected) - afd[[additional_set_id]] <- NULL # remove additional set column - if (length(additional_set_selected) > 1) { - afd <- afd %>% - group_by_at(setdiff(names(afd), "value")) %>% - summarize(value = sum(value), .groups = "drop") - } - } - - # time frame - afd <- subset(afd, ttoyear(t) >= yearlim[1] & ttoyear(t) <= yearlim[2]) - # clean data - afd <- afd %>% filter(!is.na(value)) - - # scenarios, potentially add stochastic scenarios to show - afd <- subset(afd, file %in% c(scenarios, paste0(scenarios, "(b1)"), paste0(scenarios, "(b2)"), paste0(scenarios, "(b3)")) | str_detect(file, "historical") | str_detect(file, "valid")) - - # remove duplicate historical/validation data - # afd <- subset(afd, file %in% c(scenarios, str_subset(unique(afd$file), "valid")[1], str_subset(unique(afd$file), "historical")[1])) - # for figure on files add historical for evrey scenario - afd_hist <- subset(afd, file %in% c(str_subset(unique(afd$file), "historical")[1])) - afd <- subset(afd, file %in% c(scenarios)) - for (scen in scenarios) - { - afd_hist$file <- scen - if (scen == scenarios[1]) { - afd_hist_temp <- afd_hist - } else { - afd_hist_temp <- rbind(afd_hist_temp, afd_hist) - } - } - afd <- rbind(afd, afd_hist) - - # Unit conversion - unit_conv <- unit_conversion(variable) - afd$value <- afd$value * unit_conv$convert - afd$year <- ttoyear(afd$t) - - p_stacked <- ggplot(subset(afd, n %in% regions & (!str_detect(file, "historical") & !str_detect(file, "valid"))), aes(ttoyear(t), value, fill = n)) + - geom_area(stat = "identity", size = 1.5) + - xlab("year") + - ylab(unit_conv$unit) + - scale_fill_manual(values = region_palette) + - xlim(yearlim[1], yearlim[2]) - # p_stacked <- p_stacked + geom_area(data=subset(afd, n %in% regions & str_detect(file, "historical")),aes(year, value, fill=n), linetype = "solid", stat="identity", size=1.0) - # p_stacked <- p_stacked + geom_area(data=subset(afd, n %in% regions & str_detect(file, "valid")),aes(year, value, fill=n), size=4.0) - # legends: - p_stacked <- p_stacked + theme(text = element_text(size = 16), legend.position = "bottom", legend.direction = "horizontal", legend.box = "vertical", legend.key = element_rect(colour = NA), legend.title = element_blank()) + guides(fill = guide_legend(title = NULL, nrow = 2)) - if (!is.null(scenarios)) p_stacked <- p_stacked + facet_wrap(. ~ file) - print(p_stacked + labs(title = variable)) - }) - - - - - - - - # MAIN CODE FOR PLOTLY GENERATION (copied from standard ggplot) - output$gdxompaRplotly <- renderPlotly({ - assign("historical", input$add_historical, envir = .GlobalEnv) - ylim_zero <- input$ylim_zero - growth_rate <- input$growth_rate - field_show <- input$field - plotly_dynamic <- input$plotly_dynamic - variable <- input$variable_selected - if(is.null(variable)) variable <- list_of_variables[1] - #get data - afd <- get_witch(variable, check_calibration=TRUE, field = field_show) - if(verbose) print(str_glue("Variable {variable} loaded.")) - #get the name of the additional set - additional_sets <- setdiff(colnames(afd), c(file_group_columns, "pathdir", "t", "n", "value")) - #extract additional set elements - if(length(additional_sets)==0){additional_set_id="na"; set_elements = "na"; additional_set_id2="na"; set_elements2 = "na"} - else if(length(additional_sets)==1) - { - additional_set_id <- additional_sets[1] - set_elements <- unique(tolower(as.data.frame(afd)[, match(additional_set_id, colnames(afd))])) - additional_set_id2="na"; set_elements2 = "na" - } - else if(length(additional_sets)==2) - { - additional_set_id <- additional_sets[1] - set_elements <- unique(tolower(as.data.frame(afd)[, match(additional_set_id, colnames(afd))])) - additional_set_id2 <- additional_sets[2] - set_elements2 <- unique(tolower(as.data.frame(afd)[, match(additional_set_id2, colnames(afd))])) - } - - #Selector for additional set - output$choose_additional_set <- renderUI({ - variable <- variable_input() - if(is.null(variable)) variable <- list_of_variables[1] - sel <- input$additional_set_id_selected - size_elements <- min(length(set_elements), 5) - selectInput("additional_set_id_selected", "Additional set element", set_elements, size=size_elements, selectize = F, multiple = T, selected = sel) - }) - #Selector for additional set #2 - output$choose_additional_set2 <- renderUI({ - variable <- variable_input() - if(is.null(variable)) variable <- list_of_variables[1] - sel2 <- input$additional_set_id_selected2 - size_elements2 <- min(length(set_elements2), 5) - selectInput("additional_set_id_selected2", "Additional set element 2", set_elements2, size=size_elements2, selectize = F, multiple = T, selected = sel2) - }) - - #get input from sliders/buttons - yearlim <- input$yearlim - additional_set_selected <- input$additional_set_id_selected - additional_set_selected2 <- input$additional_set_id_selected2 - regions <- input$regions_selected - scenarios <- input$scenarios_selected - - #in case they have not yet been set, set to default values - if(is.null(regions)) regions <- display_regions - if(is.null(additional_set_selected)) additional_set_selected <- set_elements[1] - if((additional_set_id!="na" & additional_set_selected[1]=="na") | !(additional_set_selected[1] %in% set_elements)) additional_set_selected <- set_elements[1] - if(is.null(additional_set_selected2)) additional_set_selected2 <- set_elements2[1] - if((additional_set_id2!="na" & additional_set_selected2[1]=="na") | !(additional_set_selected2[1] %in% set_elements2)) additional_set_selected2 <- set_elements2[1] - - # SUBSET data and PLOT - #choose additional selected element - if(additional_set_id!="na"){ - afd[[additional_set_id]] <- tolower(afd[[additional_set_id]]) # to fix erroneous gams cases (y and Y etc.) - afd <- subset(afd, get(additional_set_id) %in% additional_set_selected) - afd[[additional_set_id]] <- NULL #remove additional set column - #afd$t <- as.character(afd$t) - if(length(additional_set_selected) >1) afd <- afd %>% group_by_at(setdiff(names(afd), "value")) %>% summarize(value=sum(value), .groups = 'drop') - } - if(additional_set_id2!="na"){ - afd[[additional_set_id2]] <- tolower(afd[[additional_set_id2]]) # to fix erroneous gams cases (y and Y etc.) - afd <- subset(afd, get(additional_set_id2) %in% additional_set_selected2) - afd[[additional_set_id2]] <- NULL #remove additional set column - if(length(additional_set_selected2) >1) afd <- afd %>% group_by_at(setdiff(names(afd), "value")) %>% summarize(value=sum(value), .groups = 'drop') - } - - #time frame - afd <- subset(afd, ttoyear(t)>= yearlim[1] & ttoyear(t) <= yearlim[2]) - #clean data - afd <- afd %>% filter(!is.na(value)) - - #Computation of World/glboal sum/average - #now based on meta param to guess max, mean, sum - if(nrow(afd)>0){ - afd_global <- afd - afd_global$n <- NULL - if(variable %in% default_meta_param()$parameter){ - if(default_meta_param()[parameter==variable & type=="nagg"]$value=="sum"){ - afd_global <- afd_global %>% group_by_at(setdiff(names(afd_global), "value")) %>% summarize(value=sum(value), .groups = 'drop') - }else if(default_meta_param()[parameter==variable & type=="nagg"]$value=="mean"){ - afd_global <- afd_global %>% group_by_at(setdiff(names(afd_global), "value")) %>% summarize(value=mean(value), .groups = 'drop') - }else{ - afd_global <- afd_global %>% group_by_at(setdiff(names(afd_global), "value")) %>% summarize(value=sum(value), .groups = 'drop') - } - } - afd_global <- afd_global %>% mutate(n = "World") %>% as.data.frame() - afd <- rbind(afd, afd_global[, names(afd)]) - } - #same for EU - if(nrow(afd)>0){ - eu <- get_witch("eu"); if(!exists("eu")) eu_regions <- c("europe") else eu_regions <- unique(eu$n) - afd_global <- afd %>% filter(n %in% eu_regions) - afd_global$n <- NULL - if(variable %in% default_meta_param()$parameter){ - if(default_meta_param()[parameter==variable & type=="nagg"]$value=="sum"){ - afd_global <- afd_global %>% group_by_at(setdiff(names(afd_global), "value")) %>% summarize(value=sum(value), .groups = 'drop') - }else if(default_meta_param()[parameter==variable & type=="nagg"]$value=="mean"){ - afd_global <- afd_global %>% group_by_at(setdiff(names(afd_global), "value")) %>% summarize(value=mean(value), .groups = 'drop') - } - }else{ - afd_global <- afd_global %>% group_by_at(setdiff(names(afd_global), "value")) %>% summarize(value=sum(value), .groups = 'drop') - } - afd_global <- afd_global %>% mutate(n = "EU") %>% as.data.frame() - afd <- rbind(afd, afd_global[, names(afd)]) - } - - # in case growth rates - if (growth_rate) { - afd <- afd %>% - group_by_at(setdiff(names(afd), c("t", "value"))) %>% - arrange(t) %>% - mutate(year = ttoyear(t), growthrate = ((value / lag(value))^(1 / (year - lag(year))) - 1) * 100) %>% - select(-year, -value) %>% - dplyr::rename(value = growthrate) %>% - ungroup() - } - - #scenarios, potentially add stochastic scenarios to show - afd <- subset(afd, file %in% c(scenarios, paste0(scenarios, "(b1)"),paste0(scenarios, "(b2)"), paste0(scenarios, "(b3)")) | str_detect(file, "historical") | str_detect(file, "valid")) - - #Unit conversion - unit_conv <- unit_conversion(variable) - afd$value <- afd$value * unit_conv$convert - afd$year <- ttoyear(afd$t) - - if(regions[1]=="World" | regions[1]=="EU" | length(regions)==1){#if only World is displayed or only one region, show files with colors - p_dyn <- ggplot(subset(afd, n %in% regions & (!str_detect(file, "historical") & !str_detect(file, "valid"))),aes(year,value,colour=file)) + geom_line(stat="identity", linewidth=1.5) + xlab(NULL) + ylab(unit_conv$unit) + xlim(yearlim[1],yearlim[2]) - - if(nrow(afd %>% filter(n %in% regions & str_detect(file, "historical"))) > 0 ) p_dyn <- p_dyn + geom_line(data=subset(afd, n %in% regions & str_detect(file, "historical")),aes(year,value,colour=file), stat="identity", linewidth=1.0, linetype="solid") - if(nrow(afd %>% filter(n %in% regions & str_detect(file, "valid"))) > 0 ) p_dyn <- p_dyn + geom_point(data=subset(afd, n %in% regions & str_detect(file, "valid")),aes(year,value,colour=file), size=4.0, shape=18) - - # Add a horizontal line at y=0 - if(ylim_zero) { - p <- p + geom_hline(yintercept = 0, alpha = 0.5) - } - - if("valid" %in% unique(afd %>% filter(n %in% regions))$file) p_dyn <- p_dyn + geom_point(data=subset(afd, n %in% regions & str_detect(file, "valid")),aes(year,value,colour=file), size=4.0, shape=18) - - #legends: - p_dyn <- p_dyn + theme(text = element_text(size=16), legend.position="bottom", legend.direction = "horizontal", legend.box = "vertical", legend.key = element_rect(colour = NA), legend.title=element_blank()) + guides(color=guide_legend(title=NULL)) - }else{ - p_dyn <- ggplot(subset(afd, n %in% regions & (!str_detect(file, "historical") & !str_detect(file, "valid"))),aes(year,value,colour=n, linetype=file)) + geom_line(stat="identity", linewidth=1.5) + xlab(NULL) + ylab(unit_conv$unit) + scale_colour_manual(values = region_palette) + xlim(yearlim[1],yearlim[2]) - if("historical" %in% unique(afd %>% filter(n %in% regions))$file) p_dyn <- p_dyn + geom_line(data=subset(afd, n %in% regions & str_detect(file, "historical")),aes(ttoyear(t),value,colour=n), linetype = "solid", stat="identity", size=1.0) - if("valid" %in% unique(afd %>% filter(n %in% regions))$file) p_dyn <- p_dyn + geom_point(data=subset(afd, n %in% regions & str_detect(file, "valid")),aes(year,value, shape=file), size=4.0) - #legends: - p_dyn <- p_dyn + theme(text = element_text(size=16), legend.position="bottom", legend.direction = "horizontal", legend.box = "vertical", legend.key = element_rect(colour = NA), legend.title=element_blank()) + guides(color=guide_legend(title=NULL, nrow = 2), linetype=guide_legend(title=NULL)) - } - if(length(fullpathdir)!=1){p_dyn <- p_dyn + facet_grid(. ~ pathdir)} - p_dyn <- p_dyn + theme(legend.position = "none") - print(p_dyn) - if(length(ggplot_build(p_dyn)$data[[1]]) > 0) ggplotly() - }) - - - - output$diagnostics <- renderPlot({ - # get input from sliders/buttons - variable <- input$variable_selected - yearlim <- input$yearlim - scenarios <- input$scenarios_selected - - get_witch("elapsed") - if (!exists("elapsed")) elapsed <- data.frame(file = scenlist, value = 0) - get_witch("Y") - get_witch("TATM") - get_witch("MIU") - get_witch("l") - # get_witch("DAMFRAC") - # compute Gini index - gini <- Y %>% - left_join(l %>% rename(pop = value), by = c("t", "n", "file", "pathdir")) %>% - group_by(t, file, pathdir) %>% - summarize(value = reldist::gini(value / pop, weights = pop)) - # style - diagplot <- ggarrange( - ggplot(elapsed %>% filter(file %in% scenarios)) + - geom_bar(aes(file, value, fill = file), stat = "identity") + - ylab("Run time (minutes)") + - theme(axis.text.x = element_text(angle = 90, hjust = 1)) + - theme(axis.title.x = element_blank(), axis.text.x = element_blank(), axis.ticks.x = element_blank()) + - scale_y_time(labels = function(l) strftime(l, "%M:%S")), - ggarrange( - ggplot(MIU %>% group_by(t, file, pathdir) %>% summarise(value = mean(value)) %>% filter(file %in% scenarios)) + - geom_line(aes(ttoyear(t), value, color = file), size = 1) + - ylab("MIU") + - xlab(""), - ggplot(Y %>% filter(file %in% scenarios) %>% group_by(t, file, pathdir) %>% summarise(value = sum(value))) + - geom_line(aes(ttoyear(t), value, color = file), size = 1) + - ylab("GDP [T$]") + - xlab(""), - ncol = 2, common.legend = T, legend = "none" - ), - ggarrange( - ggplot(TATM %>% filter(file %in% scenarios & !is.na(value))) + - geom_line(aes(ttoyear(t), value, color = file), size = 1) + - ylab("TATM") + - xlab(""), - ggplot(gini %>% filter(file %in% scenarios)) + - geom_line(aes(ttoyear(t), value, color = file), size = 1) + - ylab("Gini index") + - xlab("") + - ylim(0, 1), - ncol = 2, common.legend = T, legend = "none" - ), - nrow = 3, common.legend = T, legend = "bottom" - ) - print(diagplot) - }) - - - }) diff --git a/gdxcompaR/iiasadb/server.R b/gdxcompaR/iiasadb/server.R deleted file mode 100644 index 4677b8e..0000000 --- a/gdxcompaR/iiasadb/server.R +++ /dev/null @@ -1,205 +0,0 @@ -#Create gdxcompaR based on iiasa form csv or xlsx files or direct database connection - -#require packages if online deployed -if(deploy_online){ - suppressPackageStartupMessages(require(tidyverse)) - require(plotly) - require(shinyWidgets) - add_historical_values <- function(x, varname, check_calibration, iiasadb, verbose){ - x <- rbind(x, iiasadb_historical %>% filter(VARIABLE==varname)) - return(x) - } -} - - -# Define server -shinyServer(function(input, output, session) { - #some global flags - verbose = FALSE - - #get list of variables - regions <- unique(iiasadb_snapshot$REGION) - models <- unique(iiasadb_snapshot$MODEL) - variables <- unique(iiasadb_snapshot$VARIABLE) - variables <- sort(variables) - variable_atstart <- ifelse("Population" %in% variables, "Population", variables[1]) - scenarios <- unique(iiasadb_snapshot$SCENARIO) - - #Scenario selector - output$select_scenarios <- renderUI({ - selectInput("scenarios_selected", "Select scenarios", scenarios, size=length(scenarios), selectize = F, multiple = T, selected = scenarios) - }) - - #Variable selector - output$select_variable <- renderUI({ - pickerInput( - inputId = "variable_selected", - label = "Variable:", - choices = variables, - selected = variable_atstart, - options = list( - `live-search` = TRUE) - ) - }) - - # Reactively update variable selector - variable_input <- reactive({ - return(input$variable_selected) - }) - - #MODEL selector - output$select_models <- renderUI({ - selectInput("models_selected", "Select models", models, size=length(models), selectize = F, multiple = T, selected = models) - }) - - #REGION selector - output$select_regions <- renderUI({ - regions_for_selector <- regions - selectInput("regions_selected", "Select regions", regions_for_selector, size=1, selectize = F, multiple = F, selected = "World") - }) - - #Compare models or scenarios - # output$compare_models_scenarios <- renderUI({ - # compare_models_scenarios_selector <- "Scenarios" - # radioButtons("choice_models_scenarios", "Use color for", c("Scenarios", "Models"),selected = "Scenarios", inline=T) - # }) - - observeEvent(input$button_saveplotdata, { - variable <- input$variable_selected - print("Current plot saved in subdirectory 'graphs'") - saveplot(variable, width = 14, height = 7) - }) - - #Additional selector for specific Panels - - - - # MAIN CODE FOR PLOT GENERATION - output$iiasadb_compaR <- renderPlot({ - ylim_zero <- input$ylim_zero - variable <- input$variable_selected - if(is.null(variable)) variable <- variables[1] - #get data - allfilesdata <- subset(iiasadb_snapshot, VARIABLE==variable) - unitplot <- unique(allfilesdata$UNIT)[1] - #add historical data - allfilesdata <- add_historical_values(allfilesdata, varname = variable, check_calibration = T, iiasadb = T, verbose = F) - - #get input from sliders/buttons - yearlim <- input$yearlim - regions <- input$regions_selected - models_selected <- input$models_selected - #get all possible scenarios - scenarios_selected <- input$scenarios_selected - #select scenarios - allfilesdata <- subset(allfilesdata, SCENARIO %in% c(scenarios_selected, "historical")) - allfilesdata <- subset(allfilesdata, !(MODEL %in% setdiff(models, models_selected))) - - #time frame - allfilesdata <- subset(allfilesdata, YEAR>=yearlim[1] & YEAR<=yearlim[2]) - #clean data - allfilesdata <- subset(allfilesdata, !is.na(value)) - - if(is.null(regions)) regions <- "World" - - if(regions[1]=="World" | length(regions)==1){#if only World is displayed or only one region, show files with colors - if(length(models_selected)==1){ - p <- ggplot(subset(allfilesdata, REGION %in% regions & SCENARIO!="historical"),aes(YEAR,value,colour=SCENARIO)) + geom_line(stat="identity", linewidth=1.5) + xlab("") + ylab(unitplot) + xlim(yearlim[1],yearlim[2]) - }else{ - p <- ggplot(subset(allfilesdata, REGION %in% regions & SCENARIO!="historical"),aes(YEAR,value,colour=MODEL, linetype=SCENARIO)) + geom_line(stat="identity", linewidth=1.5) + xlab("") + ylab(unitplot) + xlim(yearlim[1],yearlim[2]) - } - p <- p + geom_line(data=subset(allfilesdata, REGION %in% regions & SCENARIO=="historical"), aes(YEAR,value, linetype=MODEL), stat="identity", linewidth=1.0, colour = "black") - if(ylim_zero) p <- p + ylim(0, NA) - #legends: - p <- p + theme(text = element_text(size=16), legend.position="bottom", legend.direction = "horizontal", legend.box = "vertical", legend.key = element_rect(colour = NA), legend.title=element_blank()) + guides(color=guide_legend(title=NULL), linetype=guide_legend(title=NULL)) - }else{ - p <- ggplot(subset(allfilesdata, REGION %in% regions & SCENARIO!="historical"),aes(YEAR,value,colour=interaction(REGION, MODEL), linetype=SCENARIO)) + geom_line(stat="identity", linewidth=1.5) + xlab("year") + ylab(unitplot) + xlim(yearlim[1],yearlim[2]) + facet_grid(. ~ REGION) - p <- p + geom_line(data=subset(allfilesdata, REGION %in% regions & SCENARIO=="historical"), aes(YEAR,value,colour=REGION, linetype=MODEL), stat="identity", linewidth=1.0) - if(ylim_zero) p <- p + ylim(0, NA) - #legends: - p <- p + theme(text = element_text(size=16), legend.position="bottom", legend.direction = "horizontal", legend.box = "vertical", legend.key = element_rect(colour = NA), legend.title=element_blank()) + guides(color=guide_legend(title=NULL, nrow = 2), linetype=guide_legend(title=NULL)) - } - if(nrow(allfilesdata)>0) print(p + labs(title=variable)) - }) - - - - - # MAIN CODE FOR PLOT GENERATION - output$iiasadb_compaRly <- renderPlotly({ - ylim_zero <- input$ylim_zero - variable <- input$variable_selected - if(is.null(variable)) variable <- variables[1] - #get data - allfilesdata <- subset(iiasadb_snapshot, VARIABLE==variable) - unitplot <- unique(allfilesdata$UNIT)[1] - #add historical data - allfilesdata <- add_historical_values(allfilesdata, varname = variable, check_calibration = T, iiasadb = T, verbose = F) - - #get input from sliders/buttons - yearlim <- input$yearlim - regions <- input$regions_selected - models_selected <- input$models_selected - #get all possible scenarios - scenarios_selected <- input$scenarios_selected - #select scenarios - allfilesdata <- subset(allfilesdata, SCENARIO %in% c(scenarios_selected, "historical")) - allfilesdata <- subset(allfilesdata, !(MODEL %in% setdiff(models, models_selected))) - - #time frame - allfilesdata <- subset(allfilesdata, YEAR>=yearlim[1] & YEAR<=yearlim[2]) - #clean data - allfilesdata <- subset(allfilesdata, !is.na(value)) - - if(is.null(regions)) regions <- "World" - - if(regions[1]=="World" | length(regions)==1){#if only World is displayed or only one region, show files with colors - if(length(models_selected)==1){ - p <- ggplot(subset(allfilesdata, REGION %in% regions & SCENARIO!="historical"),aes(YEAR,value,colour=SCENARIO)) + geom_line(stat="identity", linewidth=1.5) + xlab("") + ylab(unitplot) + xlim(yearlim[1],yearlim[2]) - }else{ - p <- ggplot(subset(allfilesdata, REGION %in% regions & SCENARIO!="historical"),aes(YEAR,value,colour=MODEL, linetype=SCENARIO)) + geom_line(stat="identity", linewidth=1.5) + xlab("") + ylab(unitplot) + xlim(yearlim[1],yearlim[2]) - } - if("historical" %in% unique(allfilesdata %>% filter(REGION %in% regions))$SCENARIO) p <- p + geom_line(data=subset(allfilesdata, REGION %in% regions & SCENARIO=="historical"), aes(YEAR,value, linetype=MODEL), stat="identity", linewidth=1.0, colour = "black") - if(ylim_zero) p <- p + ylim(0, NA) - #legends: - p <- p + theme(text = element_text(size=16), legend.position="bottom", legend.direction = "horizontal", legend.box = "vertical", legend.key = element_rect(colour = NA), legend.title=element_blank()) + guides(color=guide_legend(title=NULL), linetype=guide_legend(title=NULL)) - }else{ - p <- ggplot(subset(allfilesdata, REGION %in% regions & SCENARIO!="historical"),aes(YEAR,value,colour=interaction(REGION, MODEL), linetype=SCENARIO)) + geom_line(stat="identity", linewidth=1.5) + xlab("year") + ylab(unitplot) + xlim(yearlim[1],yearlim[2]) + facet_grid(. ~ REGION) - if("historical" %in% unique(allfilesdata %>% filter(REGION %in% regions))$SCENARIO) p <- p + geom_line(data=subset(allfilesdata, REGION %in% regions & SCENARIO=="historical"), aes(YEAR,value,colour=REGION, linetype=MODEL), stat="identity", linewidth=1.0) - if(ylim_zero) p <- p + ylim(0, NA) - #legends: - p <- p + theme(text = element_text(size=16), legend.position="bottom", legend.direction = "horizontal", legend.box = "vertical", legend.key = element_rect(colour = NA), legend.title=element_blank()) + guides(color=guide_legend(title=NULL, nrow = 2), linetype=guide_legend(title=NULL)) - } - p_dyn <- p + theme(legend.position = "none") + labs(title=variable) - #print(p_dyn) - if(length(ggplot_build(p_dyn)$data[[1]]) > 0) ggplotly(p_dyn) - }) - - #plotoutput shiny frames for these three plots - output$iiasadb_coverage_scenarios <- renderPlot({ - models_selected <- input$models_selected - scenarios_selected <- input$scenarios_selected - #Scenarios - suppressWarnings(ggplot(iiasadb_snapshot %>% filter(SCENARIO %in% scenarios_selected & MODEL %in% models_selected) %>% group_by(MODEL, SCENARIO) %>% filter(!str_detect(REGION, "\\|")) %>% summarize(REGION=unique(REGION)) %>% ungroup() %>% group_by(MODEL, SCENARIO) %>% summarize(REGIONS=length(REGION)), aes(SCENARIO, MODEL, fill=REGIONS)) + geom_tile() + theme_minimal() + theme(axis.text.x = element_text(angle = 90, vjust = 0.5, hjust=1)) + geom_text(aes(label=REGIONS)) + theme(text = element_text(size=16)) + scale_fill_gradient2(low = "white", mid = "yellow", high = "darkgreen") + scale_x_discrete(labels = function(x) str_wrap(x, width = 50))) - }) - - output$iiasadb_coverage_regions <- renderPlot({ - models_selected <- input$models_selected - scenarios_selected <- input$scenarios_selected - #Regions - suppressWarnings(ggplot(iiasadb_snapshot %>% filter(SCENARIO %in% scenarios_selected & MODEL %in% models_selected) %>% group_by(MODEL, SCENARIO) %>% filter(!str_detect(REGION, "\\|")) %>% summarize(REGION=unique(REGION)) %>% ungroup() %>% group_by(MODEL, REGION) %>% summarize(SCENARIOS=length(SCENARIO)), aes(REGION, MODEL, fill=SCENARIOS)) + geom_tile() + theme_minimal() + theme(axis.text.x = element_text(angle = 90, vjust = 0.5, hjust=1)) + geom_text(aes(label=SCENARIOS)) + theme(text = element_text(size=16)) + scale_fill_gradient2(low = "white", mid = "yellow", high = "darkgreen") + scale_x_discrete(labels = function(x) str_wrap(x, width = 50))) - }) - - output$iiasadb_coverage_variables <- renderPlot({ - models_selected <- input$models_selected - scenarios_selected <- input$scenarios_selected - #Variables - #suppressWarnings(ggplot(iiasadb_snapshot %>% group_by(MODEL, SCENARIO) %>% filter(!str_detect(REGION, "\\|")) %>% summarize(VARIABLE=unique(VARIABLE)) %>% ungroup() %>% group_by(MODEL, VARIABLE) %>% summarize(SCENARIOS=length(SCENARIO)), aes(VARIABLE, MODEL, fill=SCENARIOS)) + geom_tile() + theme_minimal() + theme(axis.text.x = element_text(angle = 90, vjust = 0.5, hjust=1)) + geom_text(aes(label=SCENARIOS)) + theme(text = element_text(size=16)) + scale_fill_gradient2(low = "white", mid = "yellow", high = "darkgreen") + scale_x_discrete(labels = function(x) str_wrap(x, width = 50))) - ggplot(iiasadb_snapshot %>% group_by(MODEL, SCENARIO) %>% filter(!str_detect(REGION, "\\|")) %>% reframe(VARIABLE=unique(VARIABLE)) %>% group_by(MODEL, VARIABLE) %>% summarize(SCENARIOS=length(SCENARIO)), aes(VARIABLE, MODEL, fill=SCENARIOS)) + geom_tile() + theme_minimal() + theme(axis.text.x = element_text(angle = 90, vjust = 0.5, hjust=1)) + geom_text(aes(label=SCENARIOS), size=3) + theme(text = element_text(size=10)) + scale_fill_gradient2(low = "white", mid = "yellow", high = "darkgreen") + scale_x_discrete(labels = function(x) str_wrap(x, width = 50)) + coord_flip() - }) - - - - - -}) diff --git a/gdxcompaR/rice/server.R b/gdxcompaR/rice/server.R deleted file mode 100644 index df078c2..0000000 --- a/gdxcompaR/rice/server.R +++ /dev/null @@ -1,578 +0,0 @@ -# Define server -shinyServer(function(input, output, session) { - # some global flags - verbose <- FALSE - save_plot <- FALSE - growth_rate <- FALSE - - # get list of variables and parameters in all files - list_of_variables <- NULL - for (f in filelist) { - .gdx <- gdx(paste(file.path(fullpathdir[1], f), ".gdx", sep = "")) - list_of_variables <- c(list_of_variables, all_items(.gdx)$variables) - list_of_variables <- c(list_of_variables, all_items(.gdx)$parameters) # also all parameters - } - list_of_variables <- unique(list_of_variables) - list_of_variables <- c(sort(str_subset(list_of_variables, "^[:upper:]")), sort(str_subset(list_of_variables, "^[:lower:]"))) - - #Scenario selector - output$select_scenarios <- renderUI({ - selectInput(inputId = "scenarios_selected", - label = "Scenarios:", - choices = unname(scenlist), - size = length(scenlist), - selectize = FALSE, - multiple = TRUE, - selected = unname(scenlist)) # Select all scenarios by default - }) - - - #Variable selector - output$select_variable <- renderUI({ - pickerInput( - inputId = "variable_selected", - label = "Variable:", - choices = list_of_variables, - selected = "E", - options = list( - `live-search` = TRUE) - ) - }) - variable_selected_reactive <- reactive({input$variable_selected}) - - # Display selected variable and set - output$varname <- renderText({ - paste("Variable:", variable_selected_reactive(), " Element:", paste(input$additional_set_id_selected, collapse = ",")) - }) - - #REGION selector - output$select_regions <- renderUI({ - regions_for_selector <- list(Aggregate = list("World"), - `Native regions` = witch_regions) - selectInput(inputId = "regions_selected", - label = "Regions:", - regions_for_selector, - size = max(10, length(regions_for_selector)), - selectize = FALSE, - multiple = TRUE, - selected = "World") - }) - - observeEvent(input$button_saveplotdata, { - variable <- input$variable_selected - print("Current plot saved in subdirectory 'graphs'") - saveplot(variable, width = 14, height = 7) - }) - - # Additional selector for specific Panels - - - - # MAIN CODE FOR PLOT GENERATION - output$gdxcompaRplot <- renderPlot({ - assign("historical", input$add_historical, envir = .GlobalEnv) - ylim_zero <- input$ylim_zero - field_show <- input$field - growth_rate <- input$growth_rate - # plotly_dynamic <- input$plotly_dynamic - variable <- input$variable_selected - if (is.null(variable)) variable <- list_of_variables[1] - # get data - afd <- get_witch(variable, check_calibration = TRUE, field = field_show) - if (verbose) print(str_glue("Variable {variable} loaded.")) - # get the name of the additional set - additional_sets <- setdiff(colnames(afd), c(file_group_columns, "pathdir", "t", "n", "value")) - # extract additional set elements - if (length(additional_sets) == 0) { - additional_set_id <- "na" - set_elements <- "na" - additional_set_id2 <- "na" - set_elements2 <- "na" - } else if (length(additional_sets) == 1) { - additional_set_id <- additional_sets[1] - set_elements <- unique(tolower(as.data.frame(afd)[, match(additional_set_id, colnames(afd))])) - } else if (length(additional_sets) == 2) { - additional_set_id <- additional_sets[1] - set_elements <- unique(tolower(as.data.frame(afd)[, match(additional_set_id, colnames(afd))])) - } - - # Selector for additional set - output$choose_additional_set <- renderUI({ - variable <- variable_selected_reactive() - if (is.null(variable)) variable <- list_of_variables[1] - sel <- input$additional_set_id_selected - size_elements <- min(length(set_elements), 5) - selectInput("additional_set_id_selected", "Additional set element", set_elements, size = size_elements, selectize = F, multiple = T, selected = sel) - }) - - # get input from sliders/buttons - yearlim <- input$yearlim - additional_set_selected <- input$additional_set_id_selected - regions <- input$regions_selected - scenarios <- input$scenarios_selected - - # in case they have not yet been set, set to default values - if (is.null(regions)) regions <- display_regions - if (is.null(additional_set_selected)) additional_set_selected <- set_elements[1] - if ((additional_set_id != "na" & additional_set_selected[1] == "na") | !(additional_set_selected[1] %in% set_elements)) additional_set_selected <- set_elements[1] - - # SUBSET data and PLOT - # choose additional selected element - if (additional_set_id != "na") { - afd[[additional_set_id]] <- tolower(afd[[additional_set_id]]) # to fix erroneous gams cases (y and Y etc.) - afd <- subset(afd, get(additional_set_id) %in% additional_set_selected) - afd[[additional_set_id]] <- NULL # remove additional set column - # afd$t <- as.character(afd$t) - if (length(additional_set_selected) > 1) { - afd <- afd %>% - group_by_at(setdiff(names(afd), "value")) %>% - summarize(value = sum(value), .groups = "drop") - } - } - - # time frame - afd <- subset(afd, ttoyear(t) >= yearlim[1] & ttoyear(t) <= yearlim[2]) - # clean data - afd <- afd %>% filter(!is.na(value)) - - # Computation of World/global sum/average - # now based on meta param to guess max, mean, sum - if (nrow(afd) > 0) { - afd_global <- afd - afd_global$n <- NULL - if (variable %in% default_meta_param()$parameter) { - if (default_meta_param()[parameter == variable & type == "nagg"]$value == "sum") { - afd_global <- afd_global %>% - group_by_at(setdiff(names(afd_global), "value")) %>% - summarize(value = sum(value), .groups = "drop") - } else if (default_meta_param()[parameter == variable & type == "nagg"]$value == "mean") { - afd_global <- afd_global %>% - group_by_at(setdiff(names(afd_global), "value")) %>% - summarize(value = mean(value), .groups = "drop") - } - } else { - afd_global <- afd_global %>% - group_by_at(setdiff(names(afd_global), "value")) %>% - summarize(value = sum(value), .groups = "drop") - } - afd_global <- afd_global %>% - mutate(n = "World") %>% - as.data.frame() - afd <- rbind(afd, afd_global[, names(afd)]) - } - - # in case growth rates - if (growth_rate) { - afd <- afd %>% - group_by_at(setdiff(names(afd), c("t", "value"))) %>% - arrange(t) %>% - mutate(year = ttoyear(t), growthrate = ((value / lag(value))^(1 / (year - lag(year))) - 1) * 100) %>% - select(-year, -value) %>% - dplyr::rename(value = growthrate) %>% - mutate(value = ifelse(is.na(value), 0, value)) %>% - ungroup() - } - - - # scenarios, potentially add stochastic scenarios to show - afd <- subset(afd, file %in% c(scenarios, paste0(scenarios, "(b1)"), paste0(scenarios, "(b2)"), paste0(scenarios, "(b3)")) | str_detect(file, "historical") | str_detect(file, "valid")) - - # Unit conversion - unit_conv <- unit_conversion(variable) - if (growth_rate) unit_conv$unit <- " % p.a." - unit_conv$convert <- 1 - afd$value <- afd$value * unit_conv$convert - afd$year <- ttoyear(afd$t) - - if (regions[1] == "World" | length(regions) == 1) { # if only World is displayed or only one region, show files with colors - p <- ggplot(subset(afd, n %in% regions & (!str_detect(file, "historical") & !str_detect(file, "valid"))), aes(ttoyear(t), value, colour = file)) + - geom_line(stat = "identity", linewidth = 1.5) + - xlab("year") + - ylab(unit_conv$unit) + - xlim(yearlim[1], yearlim[2]) - if (ylim_zero) p <- p + ylim(0, NA) - p <- p + geom_line(data = subset(afd, n %in% regions & str_detect(file, "historical")), aes(year, value, colour = file), stat = "identity", linewidth = 1.0, linetype = "solid") - p <- p + geom_point(data = subset(afd, n %in% regions & str_detect(file, "valid")), aes(year, value, colour = file), size = 4.0, shape = 18) - # legends: - p <- p + theme(text = element_text(size = 16), legend.position = "bottom", legend.direction = "horizontal", legend.box = "vertical", legend.key = element_rect(colour = NA), legend.title = element_blank()) + guides(color = guide_legend(title = NULL)) - } else { - p <- ggplot(subset(afd, n %in% regions & (!str_detect(file, "historical") & !str_detect(file, "valid"))), aes(ttoyear(t), value, colour = n, linetype = file)) + - geom_line(stat = "identity", linewidth = 1.5) + - xlab("year") + - ylab(unit_conv$unit) + - scale_colour_manual(values = region_palette) + - xlim(yearlim[1], yearlim[2]) - p <- p + geom_line(data = subset(afd, n %in% regions & str_detect(file, "historical")), aes(year, value, colour = n, group = interaction(n, file)), linetype = "solid", stat = "identity", linewidth = 1.0) - p <- p + geom_point(data = subset(afd, n %in% regions & str_detect(file, "valid")), aes(year, value, colour = n, shape = file), size = 4.0) - # legends: - p <- p + theme(text = element_text(size = 16), legend.position = "bottom", legend.direction = "horizontal", legend.box = "vertical", legend.key = element_rect(colour = NA), legend.title = element_blank()) + guides(color = guide_legend(title = NULL, nrow = 2), linetype = guide_legend(title = NULL)) - } - if (length(fullpathdir) != 1) { - p <- p + facet_grid(. ~ pathdir) - } - if(nrow(afd)>0) print(p + labs(title = variable)) - }) - - - - # RICE50x Stacked area plot - output$gdxcompaRstackedplot <- renderPlot({ - assign("historical", input$add_historical, envir = .GlobalEnv) - ylim_zero <- input$ylim_zero - field_show <- input$field - variable <- input$variable_selected - if (is.null(variable)) variable <- list_of_variables[1] - # get data - afd <- get_witch(variable, check_calibration = TRUE, field = field_show) - if (verbose) print(str_glue("Variable {variable} loaded.")) - # get the name of the additional set - additional_sets <- setdiff(colnames(afd), c(file_group_columns, "pathdir", "t", "n", "value")) - # extract additional set elements - if (length(additional_sets) == 0) { - additional_set_id <- "na" - set_elements <- "na" - additional_set_id2 <- "na" - set_elements2 <- "na" - } else if (length(additional_sets) == 1) { - additional_set_id <- additional_sets[1] - set_elements <- unique(tolower(as.data.frame(afd)[, match(additional_set_id, colnames(afd))])) - } else if (length(additional_sets) == 2) { - additional_set_id <- additional_sets[1] - set_elements <- unique(tolower(as.data.frame(afd)[, match(additional_set_id, colnames(afd))])) - } - - # Selector for additional set - output$choose_additional_set <- renderUI({ - variable <- variable_selected_reactive() - if (is.null(variable)) variable <- list_of_variables[1] - sel <- input$additional_set_id_selected - size_elements <- min(length(set_elements), 5) - selectInput("additional_set_id_selected", "Additional set element", set_elements, size = size_elements, selectize = F, multiple = T, selected = sel) - }) - - # get input from sliders/buttons - yearlim <- input$yearlim - additional_set_selected <- input$additional_set_id_selected - regions <- input$regions_selected - scenarios <- input$scenarios_selected - - # in case they have not yet been set, set to default values - if (is.null(regions)) regions <- display_regions - if (is.null(additional_set_selected)) additional_set_selected <- set_elements[1] - if ((additional_set_id != "na" & additional_set_selected[1] == "na") | !(additional_set_selected[1] %in% set_elements)) additional_set_selected <- set_elements[1] - - # SUBSET data and PLOT - # choose additional selected element - if (additional_set_id != "na") { - afd[[additional_set_id]] <- tolower(afd[[additional_set_id]]) # to fix erroneous gams cases (y and Y etc.) - afd <- subset(afd, get(additional_set_id) %in% additional_set_selected) - afd[[additional_set_id]] <- NULL # remove additional set column - # afd$t <- as.character(afd$t) - if (length(additional_set_selected) > 1) { - afd <- afd %>% - group_by_at(setdiff(names(afd), "value")) %>% - summarize(value = sum(value), .groups = "drop") - } - } - - # time frame - afd <- subset(afd, ttoyear(t) >= yearlim[1] & ttoyear(t) <= yearlim[2]) - # clean data - afd <- afd %>% filter(!is.na(value)) - - # scenarios, potentially add stochastic scenarios to show - afd <- subset(afd, file %in% c(scenarios, paste0(scenarios, "(b1)"), paste0(scenarios, "(b2)"), paste0(scenarios, "(b3)")) | str_detect(file, "historical") | str_detect(file, "valid")) - - # remove duplicate historical/validation data - # afd <- subset(afd, file %in% c(scenarios, str_subset(unique(afd$file), "valid")[1], str_subset(unique(afd$file), "historical")[1])) - # for figure on files add historical for evrey scenario - afd_hist <- subset(afd, file %in% c(str_subset(unique(afd$file), "historical")[1])) - afd <- subset(afd, file %in% c(scenarios)) - for (scen in scenarios) - { - afd_hist$file <- scen - if (scen == scenarios[1]) { - afd_hist_temp <- afd_hist - } else { - afd_hist_temp <- rbind(afd_hist_temp, afd_hist) - } - } - afd <- rbind(afd, afd_hist) - - # Unit conversion - unit_conv <- unit_conversion(variable) - afd$value <- afd$value * unit_conv$convert - afd$year <- ttoyear(afd$t) - - p_stacked <- ggplot(subset(afd, n %in% regions & (!str_detect(file, "historical") & !str_detect(file, "valid"))), aes(ttoyear(t), value, fill = n)) + - geom_area(stat = "identity", size = 1.5) + - xlab("year") + - ylab(unit_conv$unit) + - scale_fill_manual(values = region_palette) + - xlim(yearlim[1], yearlim[2]) - # p_stacked <- p_stacked + geom_area(data=subset(afd, n %in% regions & str_detect(file, "historical")),aes(year, value, fill=n), linetype = "solid", stat="identity", size=1.0) - # p_stacked <- p_stacked + geom_area(data=subset(afd, n %in% regions & str_detect(file, "valid")),aes(year, value, fill=n), size=4.0) - # legends: - p_stacked <- p_stacked + theme(text = element_text(size = 16), legend.position = "bottom", legend.direction = "horizontal", legend.box = "vertical", legend.key = element_rect(colour = NA), legend.title = element_blank()) + guides(fill = guide_legend(title = NULL, nrow = 2)) - if (!is.null(scenarios)) p_stacked <- p_stacked + facet_wrap(. ~ file) - if(nrow(afd)>0) print(p_stacked + labs(title = variable)) - }) - - - - - - - - - # MAIN CODE FOR PLOTLY GENERATION (copied from standard ggplot) - output$gdxompaRplotly <- renderPlotly({ - assign("historical", input$add_historical, envir = .GlobalEnv) - ylim_zero <- input$ylim_zero - field_show <- input$field - growth_rate <- input$growth_rate - plotly_dynamic <- input$plotly_dynamic - variable <- input$variable_selected - if (is.null(variable)) variable <- list_of_variables[1] - # get data - afd <- get_witch(variable, check_calibration = TRUE, field = field_show) - if (verbose) print(str_glue("Variable {variable} loaded.")) - # get the name of the additional set - additional_sets <- setdiff(colnames(afd), c(file_group_columns, "pathdir", "t", "n", "value")) - # extract additional set elements - if (length(additional_sets) == 0) { - additional_set_id <- "na" - set_elements <- "na" - additional_set_id2 <- "na" - set_elements2 <- "na" - } else if (length(additional_sets) == 1) { - additional_set_id <- additional_sets[1] - set_elements <- unique(tolower(as.data.frame(afd)[, match(additional_set_id, colnames(afd))])) - additional_set_id2 <- "na" - set_elements2 <- "na" - } - - # Selector for additional set - output$choose_additional_set <- renderUI({ - variable <- variable_selected_reactive() - if (is.null(variable)) variable <- list_of_variables[1] - sel <- input$additional_set_id_selected - size_elements <- min(length(set_elements), 5) - selectInput("additional_set_id_selected", "Additional set element", set_elements, size = size_elements, selectize = F, multiple = T, selected = sel) - }) - - # get input from sliders/buttons - yearlim <- input$yearlim - additional_set_selected <- input$additional_set_id_selected - regions <- input$regions_selected - scenarios <- input$scenarios_selected - - # in case they have not yet been set, set to default values - if (is.null(regions)) regions <- display_regions - if (is.null(additional_set_selected)) additional_set_selected <- set_elements[1] - if ((additional_set_id != "na" & additional_set_selected[1] == "na") | !(additional_set_selected[1] %in% set_elements)) additional_set_selected <- set_elements[1] - - # SUBSET data and PLOT - # choose additional selected element - if (additional_set_id != "na") { - afd[[additional_set_id]] <- tolower(afd[[additional_set_id]]) # to fix erroneous gams cases (y and Y etc.) - afd <- subset(afd, get(additional_set_id) %in% additional_set_selected) - afd[[additional_set_id]] <- NULL # remove additional set column - # afd$t <- as.character(afd$t) - if (length(additional_set_selected) > 1) { - afd <- afd %>% - group_by_at(setdiff(names(afd), "value")) %>% - summarize(value = sum(value)) - } - } - - # time frame - afd <- subset(afd, ttoyear(t) >= yearlim[1] & ttoyear(t) <= yearlim[2]) - # clean data - afd <- afd %>% filter(!is.na(value)) - - # Computation of World/glboal sum/average - # now based on meta param to guess max, mean, sum - if (nrow(afd) > 0) { - afd_global <- afd - afd_global$n <- NULL - if (variable %in% default_meta_param()$parameter) { - if (default_meta_param()[parameter == variable & type == "nagg"]$value == "sum") { - afd_global <- afd_global %>% - group_by_at(setdiff(names(afd_global), "value")) %>% - summarize(value = sum(value), .groups = "drop") - } else if (default_meta_param()[parameter == variable & type == "nagg"]$value == "mean") { - afd_global <- afd_global %>% - group_by_at(setdiff(names(afd_global), "value")) %>% - summarize(value = mean(value), .groups = "drop") - } - } else { - afd_global <- afd_global %>% - group_by_at(setdiff(names(afd_global), "value")) %>% - summarize(value = sum(value), .groups = "drop") - } - afd_global <- afd_global %>% - mutate(n = "World") %>% - as.data.frame() - afd <- rbind(afd, afd_global[, names(afd)]) - } - - # in case growth rates - if (growth_rate) { - afd <- afd %>% - group_by_at(setdiff(names(afd), c("t", "value"))) %>% - arrange(t) %>% - mutate(year = ttoyear(t), growthrate = ((value / lag(value))^(1 / (year - lag(year))) - 1) * 100) %>% - select(-year, -value) %>% - dplyr::rename(value = growthrate) %>% - ungroup() - } - - # scenarios, potentially add stochastic scenarios to show - afd <- subset(afd, file %in% c(scenarios, paste0(scenarios, "(b1)"), paste0(scenarios, "(b2)"), paste0(scenarios, "(b3)")) | str_detect(file, "historical") | str_detect(file, "valid")) - - # Unit conversion - unit_conv <- unit_conversion(variable) - if (growth_rate) unit_conv$unit <- " % p.a." - unit_conv$convert <- 1 - afd$value <- afd$value * unit_conv$convert - afd$year <- ttoyear(afd$t) - - if (regions[1] == "World" | length(regions) == 1) { # if only World is displayed or only one region, show files with colors - p_dyn <- ggplot(subset(afd, n %in% regions & (!str_detect(file, "historical") & !str_detect(file, "valid"))), aes(year, value, colour = file)) + - geom_line(stat = "identity", size = 1.5) + - xlab("year") + - ylab(unit_conv$unit) + - xlim(yearlim[1], yearlim[2]) - if (ylim_zero) p_dyn <- p_dyn + ylim(0, NA) - # p_dyn <- p_dyn + geom_line(data=subset(afd, n %in% regions & str_detect(file, "historical")),aes(year,value,colour=file), stat="identity", size=1.0, linetype="solid") - p_dyn <- p_dyn + geom_point(data = subset(afd, n %in% regions & str_detect(file, "valid")), aes(year, value, colour = file), size = 4.0, shape = 18) - # legends: - p_dyn <- p_dyn + theme(text = element_text(size = 16), legend.position = "bottom", legend.direction = "horizontal", legend.box = "vertical", legend.key = element_rect(colour = NA), legend.title = element_blank()) + guides(color = guide_legend(title = NULL)) - } else { - p_dyn <- ggplot(subset(afd, n %in% regions & (!str_detect(file, "historical") & !str_detect(file, "valid"))), aes(year, value, colour = n, linetype = file)) + - geom_line(stat = "identity", linewidth = 1.5) + - xlab("year") + - ylab(unit_conv$unit) + - scale_colour_manual(values = region_palette) + - xlim(yearlim[1], yearlim[2]) - # if("historical" %in% unique(allfilesdata %>% filter(n %in% regions))$file) p_dyn <- p_dyn + geom_line(data=subset(afd, n %in% regions & str_detect(file, "historical")),aes(ttoyear(t),value,colour=n), linetype = "solid", stat="identity", size=1.0) - if("valid" %in% unique(allfilesdata %>% filter(n %in% regions))$file) p_dyn <- p_dyn + geom_point(data = subset(afd, n %in% regions & str_detect(file, "valid")), aes(year, value, shape = file), size = 4.0) - # legends: - p_dyn <- p_dyn + theme(text = element_text(size = 16), legend.position = "bottom", legend.direction = "horizontal", legend.box = "vertical", legend.key = element_rect(colour = NA), legend.title = element_blank()) + guides(color = guide_legend(title = NULL, nrow = 2), linetype = guide_legend(title = NULL)) - } - if (length(fullpathdir) != 1) { - p_dyn <- p_dyn + facet_grid(. ~ pathdir) - } - p_dyn <- p_dyn + theme(legend.position = "none") - if(nrow(afd)>0) print(p_dyn) - if(length(ggplot_build(p_dyn)$data[[1]]) > 0) ggplotly() - }) - - - - output$gdxcompaRmap <- renderPlot({ - # get input from sliders/buttons - variable <- input$variable_selected - yearlim <- input$yearlim - scenarios <- input$scenarios_selected - map_new(variable, yearmap = yearlim[2], scenplot = scenarios, title = str_glue("{variable} in {yearlim[2]}")) - }) - - - output$diagnostics <- renderPlot({ - # get input from sliders/buttons - variable <- input$variable_selected - yearlim <- input$yearlim - scenarios <- input$scenarios_selected - - elapsed <- get_witch("elapsed") - if (!exists("elapsed")) elapsed <- data.frame(file = scenlist, value = 0) - Y <- get_witch("Y") - TATM <- get_witch("TATM") - MIU <- get_witch("MIU") - l <- get_witch("l") - # compute Gini index - gini <- Y %>% - left_join(l %>% rename(pop = value), by = c("t", "n", "file", "pathdir")) %>% - group_by(t, file, pathdir) %>% - summarize(value = reldist::gini(value / pop, weights = pop)) - # style - diagplot <- list() - for(p in subdir){ - diagplot[[p]] <- ggarrange( - ggplot(elapsed %>% filter(file %in% scenarios & pathdir==p)) + - geom_bar(aes(file, value, fill = file), stat = "identity") + - ylab("Run time (minutes)") + ylim(0, max(elapsed$value)) + - theme(axis.text.x = element_text(angle = 90, hjust = 1)) + - theme(axis.title.x = element_blank(), axis.text.x = element_blank(), axis.ticks.x = element_blank()) + ggtitle(p) + - scale_y_continuous(limits = c(0, max(elapsed$value)), labels = function(l) strftime(as.POSIXct(l, origin = "1970-01-01"), "%M:%S")) - , - ggarrange( - ggplot(MIU %>% group_by(t, file, pathdir) %>% summarise(value = mean(value)) %>% filter(file %in% scenarios & pathdir==p)) + - geom_line(aes(ttoyear(t), value, color = file), linewidth = 1) + - ylab("MIU") + - xlab(""), - ggplot(Y %>% filter(file %in% scenarios & pathdir==p) %>% group_by(t, file, pathdir) %>% summarise(value = sum(value))) + - geom_line(aes(ttoyear(t), value, color = file), linewidth = 1) + - ylab("GDP [T$]") + - xlab(""), - ncol = 2, common.legend = T, legend = "none" - ), - ggarrange( - ggplot(TATM %>% filter(file %in% scenarios & pathdir==p & !is.na(value))) + - geom_line(aes(ttoyear(t), value, color = file), linewidth = 1) + - ylab("TATM") + - xlab(""), - ggplot(gini %>% filter(file %in% scenarios & pathdir==p)) + - geom_line(aes(ttoyear(t), value, color = file), linewidth = 1) + - ylab("Gini index") + - xlab("") + - ylim(0, 1), - ncol = 2, common.legend = T, legend = "none" - ), - nrow = 3, common.legend = T, legend = "bottom" - ) - } - diagplot_all <- ggarrange(plotlist = diagplot, ncol = length(diagplot), common.legend = T) - print(diagplot_all) - }) - - - output$inequalityplot <- renderPlot({ - # get input from sliders/buttons - variable_ineq <- input$variable_selected - yearlim <- input$yearlim - regions <- input$regions_selected - scenarios <- input$scenarios_selected - inequality_plot_type_selected <- input$inequality_plot_type_selected - inequality_value_share <- input$inequality_value_share - plot_inequality(variable = variable_ineq, plot_type = inequality_plot_type_selected, value_share = inequality_value_share, quantile_set = "dist", regions = regions[1], years = seq(yearlim[1], yearlim[2]), years_lorenz = range(yearlim[1], yearlim[2]), scenplot = scenarios) - }) - - output$tatmplot <- renderPlot({ - yearlim <- input$yearlim - scenarios <- input$scenarios_selected - gridded_temp_map(yearplot = yearlim[2], scenplot = scenarios, pathadj = "../../") - }) - - - output$iterationplot <- renderPlot({ - yearlim <- input$yearlim - scenarios <- input$scenarios_selected - regions <- input$regions_selected - viter <- get_witch("viter") - # Assuming viter is your dataframe and 'value' is the column with actual values - # First, group by the variables that define your sequences - viter <- viter %>% group_by(n, file, pathdir, v, iter) %>% arrange(t) %>% mutate(seen_nonzero = cumsum(value != 0) > 0) %>% complete(t) %>% mutate(value = ifelse(is.na(value) & !seen_nonzero, 0, value)) %>% select(-seen_nonzero) %>% ungroup() - viter <- viter %>% group_by(n, file, pathdir, v, iter) %>% summarise(value = mean(value[ttoyear(t) >= yearlim[1] & ttoyear(t) <= yearlim[2]])) - viter <- viter %>% filter(file %in% scenarios) - if(regions[1]!="World") viter <- viter %>% filter(n %in% regions) - p_iter <- ggplot(viter) + geom_line(aes(iter, value, color=n, group=n)) + facet_grid(v ~ file, scales = "free_y") + theme(legend.position = "none") - print(p_iter) - #ggplotly() - }) - - - -}) diff --git a/gdxcompaR/witch/server.R b/gdxcompaR/witch/server.R deleted file mode 100644 index 0198711..0000000 --- a/gdxcompaR/witch/server.R +++ /dev/null @@ -1,568 +0,0 @@ -# Define server - -#require packages if online deployed -if(deploy_online){ - suppressPackageStartupMessages(require(tidyverse)) - require(plotly) - require(shinyWidgets) - add_historical_values <- function(x, varname, check_calibration, iiasadb, verbose){return(x)} - get_witch <- function(variable, check_calibration, field){return(allvariables[[variable]])} -} - -shinyServer(function(input, output, session) { - - #some global flags - verbose = FALSE - - # Get list of variables and parameters in all files - list_of_variables <- NULL - for(f in filelist){ - .gdx <- gdx(paste(file.path(fullpathdir[1], f),".gdx",sep="")) - # Select all variables and parameters - # -> with "t" in their domain names - # -> with dimension <= 3 - for (item in c("variables", "parameters")) { - info_item <- .gdx[[item]] - info_item <- info_item[info_item$dim <= 4,] - info_item <- info_item[sapply(info_item$domnames, function(x) "t" %in% x),] - list_of_variables <- c(list_of_variables, info_item$name) - } - } - list_of_variables <- unique(list_of_variables) - list_of_variables <- c(sort(str_subset(list_of_variables, "^[:upper:]")), - sort(str_subset(list_of_variables, "^[:lower:]"))) - #Scenario selector - output$select_scenarios <- renderUI({ - selectInput(inputId = "scenarios_selected", - label = "Scenarios:", - choices = unname(scenlist), - size = length(scenlist), - selectize = FALSE, - multiple = TRUE, - selected = unname(scenlist)) # Select all scenarios by default - }) - - - #Variable selector - output$select_variable <- renderUI({ - pickerInput( - inputId = "variable_selected", - label = "Variable:", - choices = list_of_variables, - selected = "Q_EMI", - options = list( - `live-search` = TRUE) - ) - }) - - # Reactively update variable selector - variable_input <- reactive({ - return(input$variable_selected) - }) - - #Display selected variable and set - output$varname <- renderText({ - paste0(variable_input(), - "|", str_trunc(paste(input$additional_set_id_selected, - collapse=","), 20), - ifelse(is.null(input$additional_set_id_selected2) | - input$additional_set_id_selected2 == "na" , - "", - paste0("|", str_trunc(paste(input$additional_set_id_selected2, - collapse=","), 20))), - "|", str_trunc(paste(input$regions_selected, - collapse=","), 10)) - }) - - #REGION selector - output$select_regions <- renderUI({ - regions_for_selector <- list(Aggregate = list("World", "EU"), - `Native regions` = witch_regions) - selectInput(inputId = "regions_selected", - label = "Regions:", - regions_for_selector, - size = max(10, length(regions_for_selector)), - selectize = FALSE, - multiple = TRUE, - selected = "World") - }) - - observeEvent(input$button_saveplotdata, { - variable <- input$variable_selected - print("Current plot saved in subdirectory 'graphs'") - saveplot(variable, width = 14, height = 7) - }) - - # MAIN CODE FOR PLOT GENERATION - output$gdxompaRplot <- renderPlot({ - assign("historical", input$add_historical, envir = .GlobalEnv) - ylim_zero <- input$ylim_zero - field_show <- input$field - #plotly_dynamic <- input$plotly_dynamic - variable <- input$variable_selected - if(is.null(variable)) variable <- list_of_variables[1] - #get data - afd <- get_witch(variable, check_calibration=T, field = field_show) - if(verbose) print(str_glue("Variable {variable} loaded.")) - #get the name of the additional set - additional_sets <- setdiff(colnames(afd), c(file_group_columns, "pathdir", "t", "n", "value")) - #extract additional set elements - if(length(additional_sets)==0){additional_set_id="na"; set_elements = "na"; additional_set_id2="na"; set_elements2 = "na"} - else if(length(additional_sets)==1) - { - additional_set_id <- additional_sets[1] - set_elements <- unique(tolower(as.data.frame(afd)[, match(additional_set_id, colnames(afd))])) - set_elements <- sort(set_elements) - additional_set_id2 <- "na" - set_elements2 <- "na" - } - else if(length(additional_sets)==2) - { - additional_set_id <- additional_sets[1] - set_elements <- unique(tolower(as.data.frame(afd)[, match(additional_set_id, colnames(afd))])) - set_elements <- sort(set_elements) - additional_set_id2 <- additional_sets[2] - set_elements2 <- unique(tolower(as.data.frame(afd)[, match(additional_set_id2, colnames(afd))])) - set_elements2 <- sort(set_elements2) - } - - #Selector for additional set - output$choose_additional_set <- renderUI({ - variable <- variable_input() - if (is.null(variable)) { - variable <- list_of_variables[1] - } - sel <- input$additional_set_id_selected - if (is.null(sel)) { - if ("co2_ffi" %in% set_elements) { - sel <- "co2_ffi" - } else { - sel <- set_elements[1] - } - } - size_elements <- min(length(set_elements), 5) - selectInput(inputId = "additional_set_id_selected", - label = "Indices 1:", - choices = set_elements, - size = size_elements, - selectize = FALSE, - multiple = TRUE, - selected = sel) - }) - #Selector for additional set #2 - output$choose_additional_set2 <- renderUI({ - variable <- variable_input() - if(is.null(variable)) variable <- list_of_variables[1] - sel2 <- input$additional_set_id_selected2 - size_elements2 <- min(length(set_elements2), 5) - selectInput(inputId = "additional_set_id_selected2", - label = "Indices 2:", - choices = set_elements2, - size = size_elements2, - selectize = FALSE, - multiple = TRUE, - selected = sel2) - }) - - #get input from sliders/buttons - yearlim <- input$yearlim - additional_set_selected <- input$additional_set_id_selected - additional_set_selected2 <- input$additional_set_id_selected2 - regions <- input$regions_selected - scenarios <- input$scenarios_selected - - #in case they have not yet been set, set to default values - if (is.null(regions)) { - regions <- display_regions - } - if (is.null(additional_set_selected)) { - additional_set_selected <- set_elements[1] - } - if((additional_set_id!="na" & additional_set_selected[1]=="na") | !(additional_set_selected[1] %in% set_elements)) additional_set_selected <- set_elements[1] - if(is.null(additional_set_selected2)) additional_set_selected2 <- set_elements2[1] - if((additional_set_id2!="na" & additional_set_selected2[1]=="na") | !(additional_set_selected2[1] %in% set_elements2)) additional_set_selected2 <- set_elements2[1] - - # SUBSET data and PLOT - #choose additional selected element - if(additional_set_id!="na"){ - afd[[additional_set_id]] <- tolower(afd[[additional_set_id]]) # to fix erroneous gams cases (y and Y etc.) - afd <- subset(afd, get(additional_set_id) %in% additional_set_selected) - afd[[additional_set_id]] <- NULL #remove additional set column - #afd$t <- as.character(afd$t) - if(length(additional_set_selected) >1) afd <- afd %>% group_by_at(setdiff(names(afd), "value")) %>% summarize(value=sum(value), .groups = 'drop') - } - if(additional_set_id2!="na"){ - afd[[additional_set_id2]] <- tolower(afd[[additional_set_id2]]) # to fix erroneous gams cases (y and Y etc.) - afd <- subset(afd, get(additional_set_id2) %in% additional_set_selected2) - afd[[additional_set_id2]] <- NULL #remove additional set column - if(length(additional_set_selected2) >1) afd <- afd %>% group_by_at(setdiff(names(afd), "value")) %>% summarize(value=sum(value), .groups = 'drop') - } - - #time frame - if (input$time_filter) { - afd <- subset(afd, ttoyear(t) >= yearlim[1] & ttoyear(t) <= yearlim[2]) - } - #clean data - afd <- afd %>% filter(!is.na(value)) - - #Computation of World/global sum/average - #now based on meta param to guess max, mean, sum - if(nrow(afd)>0){ - afd_global <- afd - afd_global$n <- NULL - if(variable %in% default_meta_param()$parameter){ - if(default_meta_param()[parameter==variable & type=="nagg"]$value=="sum"){ - afd_global <- afd_global %>% group_by_at(setdiff(names(afd_global), "value")) %>% summarize(value=sum(value), .groups = 'drop') - }else if(default_meta_param()[parameter==variable & type=="nagg"]$value=="mean"){ - afd_global <- afd_global %>% group_by_at(setdiff(names(afd_global), "value")) %>% summarize(value=mean(value), .groups = 'drop') - } - }else{ - afd_global <- afd_global %>% group_by_at(setdiff(names(afd_global), "value")) %>% summarize(value=sum(value), .groups = 'drop') - } - afd_global <- afd_global %>% mutate(n = "World") %>% as.data.frame() - afd <- rbind(afd, afd_global[, names(afd)]) - } - #same for EU - if(nrow(afd)>0){ - eu <- get_witch("eu"); if(!exists("eu")) eu_regions <- c("europe") else eu_regions <- unique(eu$n) - afd_global <- afd %>% filter(n %in% eu_regions) - afd_global$n <- NULL - if(variable %in% default_meta_param()$parameter){ - if(default_meta_param()[parameter==variable & type=="nagg"]$value=="sum"){ - afd_global <- afd_global %>% group_by_at(setdiff(names(afd_global), "value")) %>% summarize(value=sum(value), .groups = 'drop') - }else if(default_meta_param()[parameter==variable & type=="nagg"]$value=="mean"){ - afd_global <- afd_global %>% group_by_at(setdiff(names(afd_global), "value")) %>% summarize(value=mean(value), .groups = 'drop') - } - }else{ - afd_global <- afd_global %>% group_by_at(setdiff(names(afd_global), "value")) %>% summarize(value=sum(value), .groups = 'drop') - } - afd_global <- afd_global %>% mutate(n = "EU") %>% as.data.frame() - afd <- rbind(afd, afd_global[, names(afd)]) - } - - #scenarios, potentially add stochastic scenarios to show - afd <- subset(afd, file %in% c(scenarios, paste0(scenarios, "(b1)"),paste0(scenarios, "(b2)"), paste0(scenarios, "(b3)")) | str_detect(file, "historical") | str_detect(file, "valid")) - - #Unit conversion - unit_conv <- unit_conversion(variable) - afd$value <- afd$value * unit_conv$convert - afd$year <- ttoyear(afd$t) - - # If only World/EU is displayed or only one region, show files with colors - if ( length(regions)==1 | (length(regions) == 1 & regions[1] %in% c("World","EU"))) { - p <- ggplot(subset(afd, n %in% regions & - !str_detect(file, "historical") & - !str_detect(file, "valid")), - aes(ttoyear(t), value, colour=file)) + - geom_line(stat="identity", linewidth=1.5) + - xlab(NULL) + - ylab(unit_conv$unit) + - coord_cartesian(xlim = yearlim) - - # Add a horizontal line at y=0 - if(ylim_zero) { - p <- p + geom_hline(yintercept = 0, alpha = 0.5) - } - - p <- p + geom_line(data=subset(afd, n %in% regions & str_detect(file, "historical")),aes(year,value,colour=file), stat="identity", linewidth=1.0, linetype="solid") - p <- p + geom_point(data=subset(afd, n %in% regions & str_detect(file, "valid")),aes(year,value,colour=file), size=4.0, shape=18) - #legends: - p <- p + theme(text = element_text(size=16), legend.position="bottom", legend.direction = "horizontal", legend.box = "vertical", legend.key = element_rect(colour = NA), legend.title=element_blank()) + guides(color=guide_legend(title=NULL)) - }else{ - p <- ggplot(subset(afd, n %in% regions & - !str_detect(file, "historical") & - !str_detect(file, "valid")), - aes(ttoyear(t), value, colour=n, linetype=file)) + - geom_line(stat="identity", linewidth=1.5) + - xlab(NULL) + - ylab(unit_conv$unit) + - scale_colour_manual(values = region_palette) + - coord_cartesian(xlim = yearlim) - - # Add a horizontal line at y=0 - if(ylim_zero) { - p <- p + geom_hline(yintercept = 0, alpha = 0.5) - } - - p <- p + geom_line(data=subset(afd, n %in% regions & str_detect(file, "historical")),aes(year, value, colour=n, group=interaction(n, file)), linetype = "solid", stat="identity", linewidth=1.0) - p <- p + geom_point(data=subset(afd, n %in% regions & str_detect(file, "valid")),aes(year, value, colour=n, shape=file), size=4.0) - #legends: - p <- p + theme(text = element_text(size=16), legend.position="bottom", legend.direction = "horizontal", legend.box = "vertical", legend.key = element_rect(colour = NA), legend.title=element_blank()) + guides(color=guide_legend(title=NULL, nrow = 2), linetype=guide_legend(title=NULL)) - - } - if(length(fullpathdir)!=1){p <- p + facet_grid(. ~ pathdir)} - if(nrow(afd)>0) print(p + labs(title=variable)) - }) - - ################################################################################################ - - # MAIN CODE FOR PLOTLY GENERATION (copied from standard ggplot) - output$gdxompaRplotly <- renderPlotly({ - assign("historical", input$add_historical, envir = .GlobalEnv) - ylim_zero <- input$ylim_zero - field_show <- input$field - plotly_dynamic <- input$plotly_dynamic - variable <- input$variable_selected - if(is.null(variable)) variable <- list_of_variables[1] - #get data - afd <- get_witch(variable, check_calibration=TRUE, field = field_show) - if(verbose) print(str_glue("Variable {variable} loaded.")) - #get the name of the additional set - additional_sets <- setdiff(colnames(afd), c(file_group_columns, "pathdir", "t", "n", "value")) - #extract additional set elements - if(length(additional_sets)==0){additional_set_id="na"; set_elements = "na"; additional_set_id2="na"; set_elements2 = "na"} - else if(length(additional_sets)==1) - { - additional_set_id <- additional_sets[1] - set_elements <- unique(tolower(as.data.frame(afd)[, match(additional_set_id, colnames(afd))])) - additional_set_id2="na"; set_elements2 = "na" - } - else if(length(additional_sets)==2) - { - additional_set_id <- additional_sets[1] - set_elements <- unique(tolower(as.data.frame(afd)[, match(additional_set_id, colnames(afd))])) - additional_set_id2 <- additional_sets[2] - set_elements2 <- unique(tolower(as.data.frame(afd)[, match(additional_set_id2, colnames(afd))])) - } - - #Selector for additional set - output$choose_additional_set <- renderUI({ - variable <- variable_input() - if(is.null(variable)) variable <- list_of_variables[1] - sel <- input$additional_set_id_selected - size_elements <- min(length(set_elements), 5) - selectInput("additional_set_id_selected", "Additional set element", set_elements, size=size_elements, selectize = F, multiple = T, selected = sel) - }) - #Selector for additional set #2 - output$choose_additional_set2 <- renderUI({ - variable <- variable_input() - if(is.null(variable)) variable <- list_of_variables[1] - sel2 <- input$additional_set_id_selected2 - size_elements2 <- min(length(set_elements2), 5) - selectInput("additional_set_id_selected2", "Additional set element 2", set_elements2, size=size_elements2, selectize = F, multiple = T, selected = sel2) - }) - - #get input from sliders/buttons - yearlim <- input$yearlim - additional_set_selected <- input$additional_set_id_selected - additional_set_selected2 <- input$additional_set_id_selected2 - regions <- input$regions_selected - scenarios <- input$scenarios_selected - - #in case they have not yet been set, set to default values - if(is.null(regions)) regions <- display_regions - if(is.null(additional_set_selected)) additional_set_selected <- set_elements[1] - if((additional_set_id!="na" & additional_set_selected[1]=="na") | !(additional_set_selected[1] %in% set_elements)) additional_set_selected <- set_elements[1] - if(is.null(additional_set_selected2)) additional_set_selected2 <- set_elements2[1] - if((additional_set_id2!="na" & additional_set_selected2[1]=="na") | !(additional_set_selected2[1] %in% set_elements2)) additional_set_selected2 <- set_elements2[1] - - # SUBSET data and PLOT - #choose additional selected element - if(additional_set_id!="na"){ - afd[[additional_set_id]] <- tolower(afd[[additional_set_id]]) # to fix erroneous gams cases (y and Y etc.) - afd <- subset(afd, get(additional_set_id) %in% additional_set_selected) - afd[[additional_set_id]] <- NULL #remove additional set column - #afd$t <- as.character(afd$t) - if(length(additional_set_selected) >1) afd <- afd %>% group_by_at(setdiff(names(afd), "value")) %>% summarize(value=sum(value), .groups = 'drop') - } - if(additional_set_id2!="na"){ - afd[[additional_set_id2]] <- tolower(afd[[additional_set_id2]]) # to fix erroneous gams cases (y and Y etc.) - afd <- subset(afd, get(additional_set_id2) %in% additional_set_selected2) - afd[[additional_set_id2]] <- NULL #remove additional set column - if(length(additional_set_selected2) >1) afd <- afd %>% group_by_at(setdiff(names(afd), "value")) %>% summarize(value=sum(value), .groups = 'drop') - } - - #time frame - afd <- subset(afd, ttoyear(t)>= yearlim[1] & ttoyear(t) <= yearlim[2]) - #clean data - afd <- afd %>% filter(!is.na(value)) - - #Computation of World/glboal sum/average - #now based on meta param to guess max, mean, sum - if(nrow(afd)>0){ - afd_global <- afd - afd_global$n <- NULL - if(variable %in% default_meta_param()$parameter){ - if(default_meta_param()[parameter==variable & type=="nagg"]$value=="sum"){ - afd_global <- afd_global %>% group_by_at(setdiff(names(afd_global), "value")) %>% summarize(value=sum(value), .groups = 'drop') - }else if(default_meta_param()[parameter==variable & type=="nagg"]$value=="mean"){ - afd_global <- afd_global %>% group_by_at(setdiff(names(afd_global), "value")) %>% summarize(value=mean(value), .groups = 'drop') - }else{ - afd_global <- afd_global %>% group_by_at(setdiff(names(afd_global), "value")) %>% summarize(value=sum(value), .groups = 'drop') - } - } - afd_global <- afd_global %>% mutate(n = "World") %>% as.data.frame() - afd <- rbind(afd, afd_global[, names(afd)]) - } - #same for EU - if(nrow(afd)>0){ - eu <- get_witch("eu"); if(!exists("eu")) eu_regions <- c("europe") else eu_regions <- unique(eu$n) - afd_global <- afd %>% filter(n %in% eu_regions) - afd_global$n <- NULL - if(variable %in% default_meta_param()$parameter){ - if(default_meta_param()[parameter==variable & type=="nagg"]$value=="sum"){ - afd_global <- afd_global %>% group_by_at(setdiff(names(afd_global), "value")) %>% summarize(value=sum(value), .groups = 'drop') - }else if(default_meta_param()[parameter==variable & type=="nagg"]$value=="mean"){ - afd_global <- afd_global %>% group_by_at(setdiff(names(afd_global), "value")) %>% summarize(value=mean(value), .groups = 'drop') - } - }else{ - afd_global <- afd_global %>% group_by_at(setdiff(names(afd_global), "value")) %>% summarize(value=sum(value), .groups = 'drop') - } - afd_global <- afd_global %>% mutate(n = "EU") %>% as.data.frame() - afd <- rbind(afd, afd_global[, names(afd)]) - } - - #scenarios, potentially add stochastic scenarios to show - afd <- subset(afd, file %in% c(scenarios, paste0(scenarios, "(b1)"),paste0(scenarios, "(b2)"), paste0(scenarios, "(b3)")) | str_detect(file, "historical") | str_detect(file, "valid")) - - #Unit conversion - unit_conv <- unit_conversion(variable) - afd$value <- afd$value * unit_conv$convert - afd$year <- ttoyear(afd$t) - - if(regions[1]=="World" | regions[1]=="EU" | length(regions)==1){#if only World is displayed or only one region, show files with colors - p_dyn <- ggplot(subset(afd, n %in% regions & (!str_detect(file, "historical") & !str_detect(file, "valid"))),aes(year,value,colour=file)) + geom_line(stat="identity", linewidth=1.5) + xlab(NULL) + ylab(unit_conv$unit) + xlim(yearlim[1],yearlim[2]) - - if(nrow(afd %>% filter(n %in% regions & str_detect(file, "historical"))) > 0 ) p_dyn <- p_dyn + geom_line(data=subset(afd, n %in% regions & str_detect(file, "historical")),aes(year,value,colour=file), stat="identity", linewidth=1.0, linetype="solid") - if(nrow(afd %>% filter(n %in% regions & str_detect(file, "valid"))) > 0 ) p_dyn <- p_dyn + geom_point(data=subset(afd, n %in% regions & str_detect(file, "valid")),aes(year,value,colour=file), size=4.0, shape=18) - - # Add a horizontal line at y=0 - if(ylim_zero) { - p <- p + geom_hline(yintercept = 0, alpha = 0.5) - } - - if("valid" %in% unique(afd %>% filter(n %in% regions))$file) p_dyn <- p_dyn + geom_point(data=subset(afd, n %in% regions & str_detect(file, "valid")),aes(year,value,colour=file), size=4.0, shape=18) - - #legends: - p_dyn <- p_dyn + theme(text = element_text(size=16), legend.position="bottom", legend.direction = "horizontal", legend.box = "vertical", legend.key = element_rect(colour = NA), legend.title=element_blank()) + guides(color=guide_legend(title=NULL)) - }else{ - p_dyn <- ggplot(subset(afd, n %in% regions & (!str_detect(file, "historical") & !str_detect(file, "valid"))),aes(year,value,colour=n, linetype=file)) + geom_line(stat="identity", linewidth=1.5) + xlab(NULL) + ylab(unit_conv$unit) + scale_colour_manual(values = region_palette) + xlim(yearlim[1],yearlim[2]) - if("historical" %in% unique(afd %>% filter(n %in% regions))$file) p_dyn <- p_dyn + geom_line(data=subset(afd, n %in% regions & str_detect(file, "historical")),aes(ttoyear(t),value,colour=n), linetype = "solid", stat="identity", size=1.0) - if("valid" %in% unique(afd %>% filter(n %in% regions))$file) p_dyn <- p_dyn + geom_point(data=subset(afd, n %in% regions & str_detect(file, "valid")),aes(year,value, shape=file), size=4.0) - #legends: - p_dyn <- p_dyn + theme(text = element_text(size=16), legend.position="bottom", legend.direction = "horizontal", legend.box = "vertical", legend.key = element_rect(colour = NA), legend.title=element_blank()) + guides(color=guide_legend(title=NULL, nrow = 2), linetype=guide_legend(title=NULL)) - } - if(length(fullpathdir)!=1){p_dyn <- p_dyn + facet_grid(. ~ pathdir)} - p_dyn <- p_dyn + theme(legend.position = "none") - if(nrow(afd)>0) print(p_dyn) - if(length(ggplot_build(p_dyn)$data[[1]]) > 0) ggplotly() - }) - - output$inequalityplot <- renderPlot({ - #get input from sliders/buttons - variable_ineq <- input$variable_selected - yearlim <- input$yearlim - additional_set_selected <- input$additional_set_id_selected - additional_set_selected2 <- input$additional_set_id_selected2 - regions <- input$regions_selected - scenarios <- input$scenarios_selected - inequality_plot_type_selected <- input$inequality_plot_type_selected - inequality_value_share <- input$inequality_value_share - plot_inequality(variable = variable_ineq, plot_type = inequality_plot_type_selected, value_share = inequality_value_share, quantile_set = "dist", regions = regions[1], years = seq(yearlim[1], yearlim[2]), years_lorenz = range(yearlim[1], yearlim[2]), scenplot = scenarios) - }) - - - output$Diagnostics <- renderPlot({ - #get input from sliders/buttons - yearlim <- input$yearlim - additional_set_selected <- input$additional_set_id_selected - additional_set_selected2 <- input$additional_set_id_selected2 - regions <- input$regions_selected - scenarios <- input$scenarios_selected - diagnostics_plots(scenplot = scenarios) - }) - - - output$energymixplot <- renderPlot({ - #get input from sliders/buttons - yearlim <- input$yearlim - additional_set_selected <- input$additional_set_id_selected - additional_set_selected2 <- input$additional_set_id_selected2 - regions <- input$regions_selected - scenarios <- input$scenarios_selected - mix_plot_type_selected <- input$mix_plot_type_selected - mix_y_value_selected <- input$mix_y_value_selected - Primary_Energy_Mix(PES_y = mix_y_value_selected, regions = regions[1], years = seq(yearlim[1], yearlim[2], 1), plot_type = mix_plot_type_selected, scenplot = scenarios) - }) - - output$electricitymixplot <- renderPlot({ - #get input from sliders/buttons - yearlim <- input$yearlim - additional_set_selected <- input$additional_set_id_selected - additional_set_selected2 <- input$additional_set_id_selected2 - regions <- input$regions_selected - scenarios <- input$scenarios_selected - mix_plot_type_selected <- input$mix_plot_type_selected - mix_y_value_selected <- input$mix_y_value_selected - Electricity_Mix(Electricity_y = mix_y_value_selected, regions = regions[1], years = seq(yearlim[1], yearlim[2], 1), plot_type = mix_plot_type_selected, scenplot = scenarios) - }) - - output$investmentplot <- renderPlot({ - #get input from sliders/buttons - yearlim <- input$yearlim - additional_set_selected <- input$additional_set_id_selected - additional_set_selected2 <- input$additional_set_id_selected2 - regions <- input$regions_selected - scenarios <- input$scenarios_selected - Investment_Plot(regions="World", scenplot = scenarios) - }) - - output$policycostplot <- renderPlot({ - #get input from sliders/buttons - yearlim <- input$yearlim - additional_set_selected <- input$additional_set_id_selected - additional_set_selected2 <- input$additional_set_id_selected2 - regions <- input$regions_selected - scenarios <- input$scenarios_selected - Policy_Cost(discount_rate=5, regions=regions, bauscen = scenarios[1], show_numbers=TRUE, tmax=yeartot(yearlim[2])) - }) - - output$intensityplot <- renderPlot({ - #get input from sliders/buttons - yearlim <- input$yearlim - additional_set_selected <- input$additional_set_id_selected - additional_set_selected2 <- input$additional_set_id_selected2 - regions <- input$regions_selected - scenarios <- input$scenarios_selected - Intensity_Plot(years=c(yearlim[2], yearlim[2]-50), regions = regions, year0=2010, scenplot = scenarios, animate_plot = FALSE) - }) - - output$impactmap <- renderPlot({ - #get input from sliders/buttons - yearlim <- input$yearlim - additional_set_selected <- input$additional_set_id_selected - additional_set_selected2 <- input$additional_set_id_selected2 - regions <- input$regions_selected - scenarios <- input$scenarios_selected - - t_map = yeartot(yearlim[2]); bau_scen = scenarios[1] - Q <- get_witch("Q") - impact_map_data <- Q %>% filter(iq=="y" & t==t_map) %>% group_by(n, pathdir) %>% mutate(value = -((value/sum(value[file==bau_scen]))-1)*100) %>% filter(is.finite(value)) - scen <- scenarios[2] - witchmap(impact_map_data, file_report=scen, t_report=t_map, mapcolor="Reds", map_name="Impact Map", map_legend = str_glue("GDP loss [%] in {scen}.")) - }) - - output$climate_plot <- renderPlot({ - #get input from sliders/buttons - yearlim <- input$yearlim - additional_set_selected <- input$additional_set_id_selected - additional_set_selected2 <- input$additional_set_id_selected2 - regions <- input$regions_selected - scenarios <- input$scenarios_selected - climate_plot(scenplot = scenarios) - }) - - output$SCC_plot <- renderPlot({ - #get input from sliders/buttons - yearlim <- input$yearlim - additional_set_selected <- input$additional_set_id_selected - additional_set_selected2 <- input$additional_set_id_selected2 - regions <- input$regions_selected - scenarios <- input$scenarios_selected - scc_normalization_region <- input$scc_normalization_region - SCC_plot(scenplot = scenarios, regions = regions, normalization_region = scc_normalization_region) - }) - - - -}) \ No newline at end of file diff --git a/inst/config/README.md b/inst/config/README.md new file mode 100644 index 0000000..5e5134b --- /dev/null +++ b/inst/config/README.md @@ -0,0 +1,72 @@ +# Configuration Files + +This directory contains configuration files for mapping model variables to historical data sources. + +## Variable-Historical Data Mapping Files + +### map_var_hist_rice.csv +Maps RICE50+ model variables to historical data sources and unit conversions. + +### map_var_hist_iiasadb.csv +Maps IAMC variables to WITCH variables and historical data sources. + +## File Format + +Each CSV file should have the following columns: + +- `varname_model`: Variable name in the model/IAMC format +- `set_model`: Set name in model (if applicable) +- `element_model`: Element name in model (if applicable) +- `var_witch`: Corresponding WITCH variable name +- `set_witch`: Set name in WITCH (if applicable) +- `element_witch`: Element name in WITCH +- `conv`: Conversion factor (R expression, e.g., "44/12" for C to CO2) + +## Customizing Mappings + +To customize the variable mappings for your model: + +1. **For RICE50+**: Edit `map_var_hist_rice.csv` or provide your own data.frame to `run_rice(map_var_hist=...)` + +2. **For IIASADB**: Edit `map_var_hist_iiasadb.csv` or provide your own to `run_iiasadb(map_var_hist=...)` + +### Example: Adding a new variable mapping + +```csv +varname_model,set_model,element_model,var_witch,set_witch,element_witch,conv +Temperature,,,TATM,,,1 +GDP,,,Q,iq,y,1e3 +``` + +### Example: Using custom mapping in R + +```r +# Create custom mapping +my_mapping <- data.frame( + varname_model = c("GDP", "Population"), + set_model = c("", ""), + element_model = c("", ""), + var_witch = c("Q", "l"), + set_witch = c("iq", ""), + element_witch = c("y", ""), + conv = c("1e3", "1") +) + +# Evaluate conversion expressions +library(dplyr) +my_mapping <- my_mapping %>% + rowwise() %>% + mutate(conv = eval(parse(text=conv))) + +# Use in run function +run_rice(map_var_hist = my_mapping) +``` + +## Notes + +- Empty cells in CSV should be left blank (no quotes or spaces) +- The `conv` column contains R expressions that are evaluated at runtime +- Common conversions: + - Carbon to CO2: `44/12` + - PJ to EJ: `0.001` + - Million to units: `1e6` diff --git a/inst/config/map_var_hist_iiasadb.csv b/inst/config/map_var_hist_iiasadb.csv new file mode 100644 index 0000000..9016152 --- /dev/null +++ b/inst/config/map_var_hist_iiasadb.csv @@ -0,0 +1,8 @@ +varname_model,set_model,element_model,var_witch,set_witch,element_witch,conv +Primary Energy,,,TPES,,,0.0036 +Emissions|CO2,,,Q_EMI,e,co2,1e3*(44/12) +Emissions|CO2|Energy,,,Q_EMI,e,co2ffi,1e3*(44/12) +Emissions|CH4,,,Q_EMI,e,ch4,1e3*(44/12)/28 +Population,,,l,,,1 +GDP|MER,,,Q,iq,y,1e3 +E,ghg,co2,Q_EMI,e,co2,44/12 diff --git a/inst/config/map_var_hist_rice.csv b/inst/config/map_var_hist_rice.csv new file mode 100644 index 0000000..b390890 --- /dev/null +++ b/inst/config/map_var_hist_rice.csv @@ -0,0 +1,10 @@ +varname_model,set_model,element_model,var_witch,set_witch,element_witch,conv +Y,,,Q,iq,y,1 +EIND,ghg,co2,Q_EMI,e,co2_ffi,44/12 +ELAND,,,Q_EMI,e,co2lu,44/12 +E,ghg,ch4,Q_EMI,e,ch4,1e3*44/12/25 +E,ghg,n2o,Q_EMI,e,n2o,1e3*44/12/298 +E,ghg,co2,Q_EMI,e,co2,44/12 +pop,,,l,,,1 +K,,,K,g,fg,1 +I,,,I,g,fg,1 diff --git a/inst/gdxcompaR/fidelio/global.R b/inst/gdxcompaR/fidelio/global.R new file mode 100644 index 0000000..62e0cd7 --- /dev/null +++ b/inst/gdxcompaR/fidelio/global.R @@ -0,0 +1,21 @@ +# Global environment setup for FIDELIO Shiny app +# This file loads required packages and makes functions available + +# Suppress package startup messages and warnings about namespace conflicts +suppressPackageStartupMessages({ + library(shiny) + library(ggplot2) + library(plotly) + library(data.table) + library(dplyr) # Load dplyr after data.table to prioritize dplyr functions + library(stringr) + library(shinyWidgets) + library(gdxtools) + library(witchtools) + library(tidyr) + # Optional packages + if(requireNamespace("arrow", quietly=TRUE)) library(arrow) + if(requireNamespace("ggpubr", quietly=TRUE)) library(ggpubr) + if(requireNamespace("rnaturalearth", quietly=TRUE)) library(rnaturalearth) + if(requireNamespace("sf", quietly=TRUE)) library(sf) +}) diff --git a/inst/gdxcompaR/fidelio/server.R b/inst/gdxcompaR/fidelio/server.R new file mode 100644 index 0000000..9c4dc26 --- /dev/null +++ b/inst/gdxcompaR/fidelio/server.R @@ -0,0 +1,270 @@ +shinyServer(function(input, output, session) { +verbose <- FALSE +growth_rate <- FALSE +list_of_variables <- get_gdx_variable_list_simple(results_dir, filelist) +list_of_variables <- str_subset(list_of_variables, pattern="_t$") +output$select_scenarios <- renderUI({selectInput("scenarios_selected", "Select scenarios", unname(scenlist), size=length(scenlist), selectize=FALSE, multiple=TRUE, selected=unname(scenlist))}) +output$select_variable <- renderUI({ + default_var <- if("GDPr_t" %in% list_of_variables) "GDPr_t" else list_of_variables[1] + selectInput("variable_selected", "Select variable", list_of_variables, size=1, selectize=FALSE, multiple=FALSE, selected=default_var) +}) +output$select_regions <- renderUI({regions_for_selector <- c(witch_regions, "World"); selectInput("regions_selected", "Select regions", regions_for_selector, size=min(17, length(regions_for_selector)), selectize=FALSE, multiple=TRUE, selected=witch_regions)}) +variable_selected_reactive <- reactive({input$variable_selected}) +variable_input <- reactive({return(input$variable_selected)}) +output$varname <- renderText({ + var_text <- paste0("Variable: ", variable_selected_reactive()) + if(!is.null(input$additional_set_id_selected) && input$additional_set_id_selected[1] != "na") { + var_text <- paste0(var_text, " - Element: ", paste(input$additional_set_id_selected, collapse=",")) + } + if(!is.null(input$regions_selected) && length(input$regions_selected)==1) { + var_text <- paste0(var_text, " - Region: ", input$regions_selected[1]) + } + var_text +}) +output$varname2 <- renderText({ + var_text <- paste0("Variable: ", variable_selected_reactive()) + if(!is.null(input$additional_set_id_selected) && input$additional_set_id_selected[1] != "na") { + var_text <- paste0(var_text, " - Element: ", paste(input$additional_set_id_selected, collapse=",")) + } + if(!is.null(input$regions_selected) && length(input$regions_selected)==1) { + var_text <- paste0(var_text, " - Region: ", input$regions_selected[1]) + } + var_text +}) +observeEvent(input$button_saveplotdata, { +variable <- input$variable_selected +print("Current plot saved in subdirectory 'graphs'") +saveplot(variable, width=14, height=7) +}) +output$gdxcompaRplot <- renderUI({ +show_historical <- input$add_historical # Checkbox controls plot visibility +ylim_zero <- input$ylim_zero +field_show <- input$field +growth_rate <- input$growth_rate +variable <- input$variable_selected +if(is.null(variable)) variable <- list_of_variables[1] +afd <- get_witch(variable, , field=field_show) +if(verbose) print(str_glue("Variable {variable} loaded.")) + +# Check if variable has time dimension +has_time <- "t" %in% names(afd) + +if(!has_time) { + # Show dynamic table instead of plot + set_info <- extract_additional_sets(afd, file_group_columns) + output$choose_additional_set <- renderUI({ + variable <- variable_input() + if(is.null(variable)) variable <- list_of_variables[1] + sel <- input$additional_set_id_selected + if(is.null(sel)){ + if("co2_ffi" %in% set_info$set_elements) sel <- "co2_ffi" else sel <- set_info$set_elements[1] + } + size_elements <- min(length(set_info$set_elements), 5) + selectInput(inputId="additional_set_id_selected", label="Index 1:", choices=set_info$set_elements, size=size_elements, selectize=FALSE, multiple=TRUE, selected=sel) + }) + output$choose_additional_set2 <- renderUI({ + variable <- variable_input() + if(is.null(variable)) variable <- list_of_variables[1] + sel2 <- input$additional_set_id_selected2 + size_elements2 <- min(length(set_info$set_elements2), 5) + selectInput(inputId="additional_set_id_selected2", label="Index 2:", choices=set_info$set_elements2, size=size_elements2, selectize=FALSE, multiple=TRUE, selected=sel2) + }) + + filtered_data <- afd + # Order pathdir factor according to results_dir vector + if("pathdir" %in% names(filtered_data) && length(results_dir) > 1) { + pathdir_levels <- basename(results_dir) + filtered_data$pathdir <- factor(filtered_data$pathdir, levels=pathdir_levels) + } + additional_set_selected <- input$additional_set_id_selected + additional_set_selected2 <- input$additional_set_id_selected2 + regions <- input$regions_selected + scenarios <- input$scenarios_selected + + if(is.null(additional_set_selected)) additional_set_selected <- set_info$set_elements[1] + if(set_info$additional_set_id != "na") { + filtered_data[[set_info$additional_set_id]] <- tolower(filtered_data[[set_info$additional_set_id]]) + filtered_data <- subset(filtered_data, get(set_info$additional_set_id) %in% additional_set_selected) + } + if(set_info$additional_set_id2 != "na") { + filtered_data[[set_info$additional_set_id2]] <- tolower(filtered_data[[set_info$additional_set_id2]]) + filtered_data <- subset(filtered_data, get(set_info$additional_set_id2) %in% additional_set_selected2) + } + if(!is.null(regions) && "n" %in% names(filtered_data)) { + filtered_data <- subset(filtered_data, n %in% regions) + } + if(!is.null(scenarios)) { + filtered_data <- subset(filtered_data, file %in% scenarios) + } + return(DT::datatable(filtered_data, options = list(pageLength = 25, scrollX = TRUE), rownames = FALSE)) +} else { + # Show plot as usual + set_info <- extract_additional_sets(afd, file_group_columns) + output$choose_additional_set <- renderUI({ + variable <- variable_input() + if(is.null(variable)) variable <- list_of_variables[1] + sel <- input$additional_set_id_selected + if(is.null(sel)){ + if("co2_ffi" %in% set_info$set_elements) sel <- "co2_ffi" else sel <- set_info$set_elements[1] + } + size_elements <- min(length(set_info$set_elements), 5) + selectInput(inputId="additional_set_id_selected", label="Index 1:", choices=set_info$set_elements, size=size_elements, selectize=FALSE, multiple=TRUE, selected=sel) + }) + output$choose_additional_set2 <- renderUI({ + variable <- variable_input() + if(is.null(variable)) variable <- list_of_variables[1] + sel2 <- input$additional_set_id_selected2 + size_elements2 <- min(length(set_info$set_elements2), 5) + selectInput(inputId="additional_set_id_selected2", label="Index 2:", choices=set_info$set_elements2, size=size_elements2, selectize=FALSE, multiple=TRUE, selected=sel2) + }) + yearlim <- input$yearlim + additional_set_selected <- input$additional_set_id_selected + regions <- input$regions_selected + scenarios <- input$scenarios_selected + if(is.null(regions)) regions <- display_regions + if(is.null(additional_set_selected)) additional_set_selected <- set_info$set_elements[1] + if((set_info$additional_set_id!="na" & additional_set_selected[1]=="na") | !(additional_set_selected[1] %in% set_info$set_elements)) additional_set_selected <- set_info$set_elements[1] + plot_data <- prepare_plot_data(variable, field_show, yearlim, scenarios, set_info$additional_set_id, additional_set_selected, NULL, NULL, regions, growth_rate, time_filter=TRUE, compute_aggregates=TRUE, verbose=verbose) + afd <- plot_data$data + unit_conv <- plot_data$unit_conv + if(growth_rate){ + unit_conv$unit <- " % p.a." + unit_conv$convert <- 1 + } + afd$n <- ifelse(afd$n=="World", "World", toupper(afd$n)) + if(regions[1]=="World" | length(regions)==1){ + p <- ggplot(subset(afd, n %in% regions & !str_detect(file, "historical") & !str_detect(file, "valid")), aes(year, value, colour=file)) + geom_line(stat="identity", linewidth=1.5) + xlab("year") + ylab(unit_conv$unit) + xlim(yearlim[1], yearlim[2]) + if(ylim_zero) p <- p + ylim(0, NA) + if(show_historical) { + p <- p + geom_line(data=subset(afd, n %in% regions & str_detect(file, "historical")), aes(year, value, colour=file), stat="identity", linewidth=1.0, linetype="solid") + } + p <- p + theme(text=element_text(size=16), legend.position="bottom", legend.direction="horizontal", legend.box="vertical", legend.key=element_rect(colour=NA), legend.title=element_blank()) + guides(color=guide_legend(title=NULL)) + }else{ + p <- ggplot(subset(afd, n %in% regions & !str_detect(file, "historical") & !str_detect(file, "valid")), aes(year, value, colour=n, linetype=file)) + geom_line(stat="identity", linewidth=1.5) + xlab("year") + ylab(unit_conv$unit) + scale_colour_manual(values=region_palette) + xlim(yearlim[1], yearlim[2]) + if(show_historical) { + p <- p + geom_line(data=subset(afd, n %in% regions & str_detect(file, "historical")), aes(year, value, colour=n, group=interaction(n, file)), linetype="solid", stat="identity", linewidth=1.0) + } + p <- p + theme(text=element_text(size=16), legend.position="bottom", legend.direction="horizontal", legend.box="vertical", legend.key=element_rect(colour=NA), legend.title=element_blank()) + guides(color=guide_legend(title=NULL, nrow=2), linetype=guide_legend(title=NULL)) + } + if(length(results_dir)!=1) p <- p + facet_grid(. ~ pathdir) + if(nrow(afd)>0) { + return(renderPlot({print(p + labs(title=variable))}, height = 600)) + } +} +}) +output$gdxcompaRstackedplot <- renderPlot({ +show_historical <- input$add_historical # Checkbox controls plot visibility +ylim_zero <- input$ylim_zero +variable <- input$variable_selected +if(is.null(variable)) variable <- list_of_variables[1] +afd <- get_witch(variable, ) +if(verbose) print(str_glue("Variable {variable} loaded.")) +set_info <- extract_additional_sets(afd, file_group_columns) +output$choose_additional_set <- renderUI({ +variable <- variable_selected_reactive() +if(is.null(variable)) variable <- list_of_variables[1] +sel <- input$additional_set_id_selected +size_elements <- min(length(set_info$set_elements), 5) +selectInput("additional_set_id_selected", "Index 1:", set_info$set_elements, size=size_elements, selectize=FALSE, multiple=TRUE, selected=sel) +}) +yearlim <- input$yearlim +additional_set_selected <- input$additional_set_id_selected +regions <- input$regions_selected +scenarios <- input$scenarios_selected +if(is.null(regions)) regions <- display_regions +if(is.null(additional_set_selected)) additional_set_selected <- set_info$set_elements[1] +if((set_info$additional_set_id!="na" & additional_set_selected[1]=="na") | !(additional_set_selected[1] %in% set_info$set_elements)) additional_set_selected <- set_info$set_elements[1] +afd <- subset_by_additional_sets(afd, set_info$additional_set_id, additional_set_selected, NULL, NULL) +# Order pathdir factor according to results_dir vector +if("pathdir" %in% names(afd) && length(results_dir) > 1) { + pathdir_levels <- basename(results_dir) + afd$pathdir <- factor(afd$pathdir, levels=pathdir_levels) +} +# Calculate year if not already present BEFORE filtering +if(!"year" %in% names(afd)) { + if("tlen" %in% names(afd)) { + afd$year <- ttoyear(afd$t, afd$tlen) + } else { + afd$year <- ttoyear(afd$t) + } +} +afd <- subset(afd, year>=yearlim[1] & year<=yearlim[2]) +afd <- afd %>% filter(!is.na(value)) +afd <- subset(afd, file %in% c(scenarios, paste0(scenarios, "(b1)"), paste0(scenarios, "(b2)"), paste0(scenarios, "(b3)")) | str_detect(file, "historical") | str_detect(file, "valid")) +if(show_historical) { + afd_hist <- subset(afd, file %in% c(str_subset(unique(afd$file), "historical")[1])) + afd <- subset(afd, file %in% c(scenarios)) + for(scen in scenarios){ + afd_hist$file <- scen + if(scen==scenarios[1]) afd_hist_temp <- afd_hist else afd_hist_temp <- rbind(afd_hist_temp, afd_hist) + } + afd <- rbind(afd, afd_hist) +} else { + afd <- subset(afd, file %in% c(scenarios)) +} +unit_conv <- unit_conversion(variable) +afd$value <- afd$value * unit_conv$convert +p_stacked <- ggplot(subset(afd, n %in% regions & !str_detect(file, "historical") & !str_detect(file, "valid")), aes(year, value, fill=n)) + geom_area(stat="identity", size=1.5) + xlab("year") + ylab(unit_conv$unit) + scale_fill_manual(values=region_palette) + xlim(yearlim[1], yearlim[2]) +p_stacked <- p_stacked + theme(text=element_text(size=16), legend.position="bottom", legend.direction="horizontal", legend.box="vertical", legend.key=element_rect(colour=NA), legend.title=element_blank()) + guides(fill=guide_legend(title=NULL, nrow=2)) +if(!is.null(scenarios)) p_stacked <- p_stacked + facet_wrap(. ~ file) +print(p_stacked + labs(title=variable)) +}) +output$gdxompaRplotly <- renderPlotly({ +show_historical <- input$add_historical # Checkbox controls plot visibility +ylim_zero <- input$ylim_zero +growth_rate <- input$growth_rate +field_show <- input$field +plotly_dynamic <- input$plotly_dynamic +variable <- input$variable_selected +if(is.null(variable)) variable <- list_of_variables[1] +plot_data <- prepare_plot_data(variable, field_show, input$yearlim, input$scenarios_selected, "na", "na", NULL, NULL, input$regions_selected, growth_rate, time_filter=TRUE, compute_aggregates=TRUE, verbose=verbose) +afd <- plot_data$data +unit_conv <- plot_data$unit_conv +yearlim <- input$yearlim +regions <- input$regions_selected +if(regions[1]=="World" | regions[1]=="EU" | length(regions)==1){ +p_dyn <- ggplot(subset(afd, n %in% regions & !str_detect(file, "historical") & !str_detect(file, "valid")), aes(year, value, colour=file)) + geom_line(stat="identity", linewidth=1.5) + xlab(NULL) + ylab(unit_conv$unit) + xlim(yearlim[1], yearlim[2]) +if(show_historical && nrow(afd %>% filter(n %in% regions & str_detect(file, "historical")))>0) p_dyn <- p_dyn + geom_line(data=subset(afd, n %in% regions & str_detect(file, "historical")), aes(year, value, colour=file), stat="identity", linewidth=1.0, linetype="solid") +if(nrow(afd %>% filter(n %in% regions & str_detect(file, "valid")))>0) p_dyn <- p_dyn + geom_point(data=subset(afd, n %in% regions & str_detect(file, "valid")), aes(year, value, colour=file), size=4.0, shape=18) +if(ylim_zero) p <- p + geom_hline(yintercept=0, alpha=0.5) +if("valid" %in% unique(afd %>% filter(n %in% regions))$file) p_dyn <- p_dyn + geom_point(data=subset(afd, n %in% regions & str_detect(file, "valid")), aes(year, value, colour=file), size=4.0, shape=18) +p_dyn <- p_dyn + theme(text=element_text(size=16), legend.position="bottom", legend.direction="horizontal", legend.box="vertical", legend.key=element_rect(colour=NA), legend.title=element_blank()) + guides(color=guide_legend(title=NULL)) +}else{ +p_dyn <- ggplot(subset(afd, n %in% regions & !str_detect(file, "historical") & !str_detect(file, "valid")), aes(year, value, colour=n, linetype=file)) + geom_line(stat="identity", linewidth=1.5) + xlab(NULL) + ylab(unit_conv$unit) + scale_colour_manual(values=region_palette) + xlim(yearlim[1], yearlim[2]) +if(show_historical && "historical" %in% unique(afd %>% filter(n %in% regions))$file) p_dyn <- p_dyn + geom_line(data=subset(afd, n %in% regions & str_detect(file, "historical")), aes(ttoyear(t), value, colour=n), linetype="solid", stat="identity", size=1.0) +if("valid" %in% unique(afd %>% filter(n %in% regions))$file) p_dyn <- p_dyn + geom_point(data=subset(afd, n %in% regions & str_detect(file, "valid")), aes(year, value, shape=file), size=4.0) +p_dyn <- p_dyn + theme(text=element_text(size=16), legend.position="bottom", legend.direction="horizontal", legend.box="vertical", legend.key=element_rect(colour=NA), legend.title=element_blank()) + guides(color=guide_legend(title=NULL, nrow=2), linetype=guide_legend(title=NULL)) +} +if(length(results_dir)!=1) p_dyn <- p_dyn + facet_grid(. ~ pathdir) +p_dyn <- p_dyn + theme(legend.position="none") +print(p_dyn) +if(length(ggplot_build(p_dyn)$data[[1]])>0) ggplotly() +}) +output$diagnostics <- renderPlot({ +variable <- input$variable_selected +yearlim <- input$yearlim +scenarios <- input$scenarios_selected +get_witch("elapsed") +if(!exists("elapsed")) elapsed <- data.frame(file=scenlist, value=0) +get_witch("Y") +get_witch("TATM") +get_witch("MIU") +get_witch("l") +gini <- Y %>% left_join(l %>% rename(pop=value), by=c("t", "n", "file", "pathdir")) %>% group_by(t, file, pathdir) %>% summarize(value=reldist::gini(value/pop, weights=pop)) +diagplot <- ggarrange( +ggplot(elapsed %>% filter(file %in% scenarios)) + geom_bar(aes(file, value, fill=file), stat="identity") + ylab("Run time (minutes)") + theme(axis.text.x=element_text(angle=90, hjust=1)) + theme(axis.title.x=element_blank(), axis.text.x=element_blank(), axis.ticks.x=element_blank()) + scale_y_time(labels=function(l) strftime(l, "%M:%S")), +ggarrange( +ggplot(MIU %>% group_by(t, file, pathdir) %>% summarise(value=mean(value)) %>% filter(file %in% scenarios)) + geom_line(aes(ttoyear(t), value, color=file), size=1) + ylab("MIU") + xlab(""), +ggplot(Y %>% filter(file %in% scenarios) %>% group_by(t, file, pathdir) %>% summarise(value=sum(value))) + geom_line(aes(ttoyear(t), value, color=file), size=1) + ylab("GDP [T$]") + xlab(""), +ncol=2, common.legend=TRUE, legend="none" +), +ggarrange( +ggplot(TATM %>% filter(file %in% scenarios & !is.na(value))) + geom_line(aes(ttoyear(t), value, color=file), size=1) + ylab("TATM") + xlab(""), +ggplot(gini %>% filter(file %in% scenarios)) + geom_line(aes(ttoyear(t), value, color=file), size=1) + ylab("Gini index") + xlab("") + ylim(0, 1), +ncol=2, common.legend=TRUE, legend="none" +), +nrow=3, common.legend=TRUE, legend="bottom" +) +print(diagplot) +}) +}) diff --git a/gdxcompaR/fidelio/ui.R b/inst/gdxcompaR/fidelio/ui.R similarity index 76% rename from gdxcompaR/fidelio/ui.R rename to inst/gdxcompaR/fidelio/ui.R index 00486ab..ac10e00 100644 --- a/gdxcompaR/fidelio/ui.R +++ b/inst/gdxcompaR/fidelio/ui.R @@ -21,13 +21,9 @@ shinyUI(fluidPage( value = c(1990,2100), step = 5), div(style="display:inline-block", - checkboxInput("time_filter", - "Time filter", - value = TRUE)), - div(style="display:inline-block", - checkboxInput("add_historical", - "Historical", - value = TRUE)), + checkboxInput("add_historical", + "Show historical", + value = if(exists("add_historical")) add_historical else TRUE)), div(style="display:inline-block", checkboxInput("ylim_zero", "ymin=0", @@ -41,9 +37,8 @@ shinyUI(fluidPage( # Show the plot mainPanel( tabsetPanel(type = "tabs", id = "tabs", - tabPanel("gdxcompaR", id = "gdxcompaR", h2(textOutput("varname")),plotOutput("gdxcompaRplot", width = "100%", height = "80vh")), + tabPanel("gdxcompaR", id = "gdxcompaR", h2(textOutput("varname")),uiOutput("gdxcompaRplot")), #tabPanel("Diagnostics", id = "diagnostics", plotOutput("diagnostics", width = "100%", height = "80vh")), - tabPanel("gdxcompaRly (BETA)", id = "gdxcompaRly", plotlyOutput("gdxompaRplotly", width = "100%", height = "80vh")), tabPanel("gdxcompaR stacked", id = "gdxcompaR_stacked", h2(textOutput("varname2")),plotOutput("gdxcompaRstackedplot", width = "100%", height = "80vh")), ) diff --git a/inst/gdxcompaR/iiasadb/global.R b/inst/gdxcompaR/iiasadb/global.R new file mode 100644 index 0000000..3c92de6 --- /dev/null +++ b/inst/gdxcompaR/iiasadb/global.R @@ -0,0 +1,21 @@ +# Global environment setup for IIASADB Shiny app +# This file loads required packages and makes functions available + +# Suppress package startup messages and warnings about namespace conflicts +suppressPackageStartupMessages({ + library(shiny) + library(ggplot2) + library(plotly) + library(data.table) + library(dplyr) # Load dplyr after data.table to prioritize dplyr functions + library(stringr) + library(shinyWidgets) + library(gdxtools) + library(witchtools) + library(tidyr) + # Optional packages + if(requireNamespace("arrow", quietly=TRUE)) library(arrow) + if(requireNamespace("ggpubr", quietly=TRUE)) library(ggpubr) + if(requireNamespace("rnaturalearth", quietly=TRUE)) library(rnaturalearth) + if(requireNamespace("sf", quietly=TRUE)) library(sf) +}) diff --git a/inst/gdxcompaR/iiasadb/server.R b/inst/gdxcompaR/iiasadb/server.R new file mode 100644 index 0000000..daf4267 --- /dev/null +++ b/inst/gdxcompaR/iiasadb/server.R @@ -0,0 +1,331 @@ +#Create gdxcompaR based on iiasa form csv or xlsx files or direct database connection + +#require packages if online deployed +if(deploy_online){ + suppressPackageStartupMessages(require(tidyverse)) + require(plotly) + require(shinyWidgets) + add_historical_values <- function(x, varname, iiasadb, verbose){ + # Always keep historical data sources separate (check_calibration is always TRUE) + x <- rbind(x, iiasadb_historical %>% filter(VARIABLE==varname)) + return(x) + } +} + + +# Define server +shinyServer(function(input, output, session) { + #some global flags + verbose = FALSE + + #get list of variables + regions <- unique(iiasadb_snapshot$REGION) + models <- unique(iiasadb_snapshot$MODEL) + variables <- unique(iiasadb_snapshot$VARIABLE) + variables <- sort(variables) + variable_atstart <- ifelse("Population" %in% variables, "Population", variables[1]) + scenarios <- unique(iiasadb_snapshot$SCENARIO) + + #Scenario selector (max 10 rows with scrollbar) + output$select_scenarios <- renderUI({ + selectInput("scenarios_selected", "Select scenarios", scenarios, size=min(10, length(scenarios)), selectize = F, multiple = T, selected = scenarios) + }) + + #Variable selector + output$select_variable <- renderUI({ + pickerInput( + inputId = "variable_selected", + label = "Variable:", + choices = variables, + selected = variable_atstart, + options = list( + `live-search` = TRUE) + ) + }) + + # Reactively update variable selector + variable_input <- reactive({ + return(input$variable_selected) + }) + + # Variable name display with region info + output$varname <- renderText({ + var_text <- paste0("Variable: ", variable_input()) + if(!is.null(input$regions_selected) && length(input$regions_selected)==1) { + var_text <- paste0(var_text, " - Region: ", input$regions_selected[1]) + } + var_text + }) + + #MODEL selector (max 10 rows with scrollbar) + output$select_models <- renderUI({ + selectInput("models_selected", "Select models", models, size=min(10, length(models)), selectize = F, multiple = T, selected = models) + }) + + #REGION selector + output$select_regions <- renderUI({ + regions_for_selector <- regions + selectInput("regions_selected", "Select regions", regions_for_selector, size=1, selectize = F, multiple = F, selected = "World") + }) + + #Compare models or scenarios + # output$compare_models_scenarios <- renderUI({ + # compare_models_scenarios_selector <- "Scenarios" + # radioButtons("choice_models_scenarios", "Use color for", c("Scenarios", "Models"),selected = "Scenarios", inline=T) + # }) + + observeEvent(input$button_saveplotdata, { + variable <- input$variable_selected + print("Current plot saved in subdirectory 'graphs'") + saveplot(variable, width = 14, height = 7) + }) + + #Additional selector for specific Panels + + + + # MAIN CODE FOR PLOT GENERATION + output$iiasadb_compaR <- renderPlot({ + ylim_zero <- input$ylim_zero + show_legend <- input$show_legend + variable <- input$variable_selected + if(is.null(variable)) variable <- variables[1] + #get data using new get_iiasadb() function (similar to get_witch()) + allfilesdata <- get_iiasadb(variable, add_historical = if(exists("add_historical")) add_historical else FALSE) + unitplot <- unique(allfilesdata$UNIT)[1] + + #get input from sliders/buttons + yearlim <- input$yearlim + regions <- input$regions_selected + models_selected <- input$models_selected + #get all possible scenarios + scenarios_selected <- input$scenarios_selected + + #select scenarios - use data.frame indexing to preserve all columns + allfilesdata <- allfilesdata[allfilesdata$SCENARIO %in% c(scenarios_selected, "historical"), ] + allfilesdata <- allfilesdata[!(allfilesdata$MODEL %in% setdiff(models, models_selected)), ] + + #time frame + allfilesdata <- allfilesdata[allfilesdata$year>=yearlim[1] & allfilesdata$year<=yearlim[2], ] + #clean data + allfilesdata <- allfilesdata[!is.na(allfilesdata$value), ] + + if(is.null(regions)) regions <- "World" + # Convert regions to lowercase for matching (since n column is lowercase) + regions_lower <- tolower(regions) + + if(regions[1]=="World" | length(regions)==1){#if only World is displayed or only one region, show files with colors + # Filter data first + plot_data <- allfilesdata[allfilesdata$n %in% regions_lower & allfilesdata$SCENARIO!="historical", ] + hist_data <- allfilesdata[allfilesdata$n %in% regions_lower & allfilesdata$SCENARIO=="historical", ] + + if(length(models_selected)==1){ + # Single model: use color for scenarios + p <- ggplot(plot_data, aes(x=year, y=value, colour=SCENARIO)) + geom_line(stat="identity", linewidth=1.5) + xlab("") + ylab(unitplot) + xlim(yearlim[1],yearlim[2]) + # Use a programmatic color palette that can handle many scenarios + n_scenarios <- length(unique(plot_data$SCENARIO)) + if(n_scenarios > 12) { + # Use viridis for many scenarios (colorblind-friendly and handles many values) + p <- p + scale_colour_viridis_d(option = "turbo") + } + # Historical data in different colors (one per data source/MODEL) + if(nrow(hist_data) > 0) { + p <- p + geom_line(data=hist_data, aes(x=year, y=value, colour=MODEL), stat="identity", linewidth=1.0) + } + }else{ + # Multiple models: use color for MODEL, linetype for SCENARIO + p <- ggplot(plot_data, aes(x=year, y=value, colour=MODEL, linetype=SCENARIO)) + geom_line(stat="identity", linewidth=1.5) + xlab("") + ylab(unitplot) + xlim(yearlim[1],yearlim[2]) + # Use a programmatic color palette that can handle many models + n_models <- length(unique(plot_data$MODEL)) + if(n_models > 12) { + p <- p + scale_colour_viridis_d(option = "turbo") + } + # Historical data in different colors (one per MODEL) + if(nrow(hist_data) > 0) { + p <- p + geom_line(data=hist_data, aes(x=year, y=value, colour=MODEL), stat="identity", linewidth=1.0) + } + } + if(ylim_zero) p <- p + ylim(0, NA) + # Add faceting by pathdir if multiple directories + if(exists("results_dir") && length(results_dir) > 1 && "PATHDIR" %in% names(allfilesdata)) { + p <- p + facet_grid(. ~ PATHDIR) + } + #legends: + if(show_legend) { + p <- p + theme(text = element_text(size=16), legend.position="bottom", legend.direction = "horizontal", legend.box = "vertical", legend.key = element_rect(colour = NA), legend.title=element_blank()) + guides(color=guide_legend(title=NULL), linetype=guide_legend(title=NULL)) + } else { + p <- p + theme(text = element_text(size=16), legend.position="none") + } + }else{ + # Multiple regions: filter data first + plot_data <- allfilesdata[allfilesdata$n %in% regions_lower & allfilesdata$SCENARIO!="historical", ] + hist_data <- allfilesdata[allfilesdata$n %in% regions_lower & allfilesdata$SCENARIO=="historical", ] + + p <- ggplot(plot_data, aes(x=year, y=value, colour=interaction(n, MODEL), linetype=SCENARIO)) + geom_line(stat="identity", linewidth=1.5) + xlab("year") + ylab(unitplot) + xlim(yearlim[1],yearlim[2]) + facet_grid(. ~ n) + # Use a programmatic color palette for region-model combinations + n_colors <- length(unique(interaction(plot_data$n, plot_data$MODEL))) + if(n_colors > 12) { + p <- p + scale_colour_viridis_d(option = "turbo") + } + if(nrow(hist_data) > 0) { + p <- p + geom_line(data=hist_data, aes(x=year, y=value, colour=n, linetype=MODEL), stat="identity", linewidth=1.0) + } + if(ylim_zero) p <- p + ylim(0, NA) + # Add additional faceting by pathdir if multiple directories + if(exists("results_dir") && length(results_dir) > 1 && "PATHDIR" %in% names(allfilesdata)) { + p <- p + facet_grid(PATHDIR ~ n) + } + #legends: + if(show_legend) { + p <- p + theme(text = element_text(size=16), legend.position="bottom", legend.direction = "horizontal", legend.box = "vertical", legend.key = element_rect(colour = NA), legend.title=element_blank()) + guides(color=guide_legend(title=NULL, nrow = 2), linetype=guide_legend(title=NULL)) + } else { + p <- p + theme(text = element_text(size=16), legend.position="none") + } + } + if(nrow(allfilesdata)>0) print(p + labs(title=variable)) + }) + + # Coverage plots + output$iiasadb_coverage_scenarios <- renderPlot({ + models_selected <- input$models_selected + scenarios_selected <- input$scenarios_selected + + coverage_data <- iiasadb_snapshot %>% + filter(SCENARIO %in% scenarios_selected & MODEL %in% models_selected) %>% + group_by(MODEL, SCENARIO) %>% + reframe(REGION=unique(REGION)) %>% + group_by(SCENARIO, MODEL) %>% + summarize(REGIONS=length(REGION), .groups = 'drop') + + ggplot(coverage_data, aes(MODEL, SCENARIO, fill=REGIONS)) + + geom_tile() + + theme_minimal() + + theme( + axis.text.x = element_text(size = 14, angle = 45, hjust = 1, margin = margin(t = 10)), + axis.text.y = element_text(size = 16, hjust = 1, margin = margin(r = 15)), + text = element_text(size = 12), + axis.title = element_text(size = 16, margin = margin(15, 15, 15, 15)), + panel.grid = element_blank(), + axis.ticks = element_blank(), + plot.margin = margin(30, 30, 30, 30) + ) + + geom_text(aes(label=REGIONS), size=4, color="black") + + scale_fill_gradient2(low = "white", mid = "yellow", high = "darkgreen") + + scale_x_discrete(labels = function(x) str_wrap(x, width = 15)) + + scale_y_discrete(labels = function(x) str_wrap(x, width = 35)) + + labs(x = "Models", y = "Scenarios", fill = "Regions") + }) + + output$iiasadb_coverage_regions <- renderPlot({ + models_selected <- input$models_selected + scenarios_selected <- input$scenarios_selected + + coverage_data <- iiasadb_snapshot %>% + filter(SCENARIO %in% scenarios_selected & MODEL %in% models_selected) %>% + group_by(MODEL, SCENARIO) %>% + reframe(REGION=unique(REGION)) %>% + group_by(REGION, MODEL) %>% + summarize(SCENARIOS=length(SCENARIO), .groups = 'drop') + + ggplot(coverage_data, aes(MODEL, REGION, fill=SCENARIOS)) + + geom_tile() + + theme_minimal() + + theme( + axis.text.x = element_text(size = 14, angle = 45, hjust = 1, margin = margin(t = 10)), + axis.text.y = element_text(size = 16, hjust = 1, margin = margin(r = 15)), + text = element_text(size = 12), + axis.title = element_text(size = 16, margin = margin(15, 15, 15, 15)), + panel.grid = element_blank(), + axis.ticks = element_blank(), + plot.margin = margin(30, 30, 30, 30) + ) + + geom_text(aes(label=SCENARIOS), size=4, color="black") + + scale_fill_gradient2(low = "white", mid = "yellow", high = "darkgreen") + + scale_x_discrete(labels = function(x) str_wrap(x, width = 15)) + + scale_y_discrete(labels = function(x) str_wrap(x, width = 35)) + + labs(x = "Models", y = "Regions", fill = "Scenarios") + }) + + # Variables table with DT + output$variables_table <- DT::renderDataTable({ + models_selected <- input$models_selected + scenarios_selected <- input$scenarios_selected + + coverage_data <- iiasadb_snapshot %>% + filter(SCENARIO %in% scenarios_selected) %>% + filter(MODEL %in% models_selected) %>% + group_by(VARIABLE, MODEL) %>% + summarise(Count = n(), .groups = 'drop') %>% + pivot_wider(names_from = MODEL, values_from = Count, values_fill = 0) %>% + mutate(num_models = rowSums(select(., -all_of("VARIABLE")) > 0)) %>% + mutate(single_model = case_when( + num_models == 1 ~ { + model_cols <- select(., -all_of(c("VARIABLE", "num_models"))) + names(model_cols)[max.col(model_cols)] + }, + TRUE ~ "" + )) %>% + arrange(desc(num_models), single_model, VARIABLE) %>% + select(-all_of(c("num_models", "single_model"))) + + DT::datatable( + coverage_data, + options = list( + pageLength = 100, + scrollX = TRUE, + scrollY = "600px", + dom = 'frtip', + processing = TRUE, + deferRender = TRUE, + columnDefs = list( + list(width = '200px', targets = 0), + list(width = '80px', targets = 1:(ncol(coverage_data)-1)), + list(className = 'dt-center', targets = 1:(ncol(coverage_data)-1)) + ) + ), + rownames = FALSE, + class = 'compact stripe' + ) %>% + DT::formatStyle( + columns = names(coverage_data)[-1], + backgroundColor = DT::styleInterval( + cuts = c(0.5, 10, 50), + values = c("white", "#ffffcc", "#a1dab4", "#2c7fb8") + ) + ) + }) + + # Years coverage plot + output$iiasadb_coverage_years <- renderPlot({ + models_selected <- input$models_selected + scenarios_selected <- input$scenarios_selected + + coverage_data <- iiasadb_snapshot %>% + filter(SCENARIO %in% scenarios_selected & MODEL %in% models_selected) %>% + group_by(YEAR, MODEL) %>% + summarize(SCENARIOS_REGIONS = length(unique(paste(SCENARIO, REGION))), .groups = 'drop') + + ggplot(coverage_data, aes(MODEL, YEAR, fill=SCENARIOS_REGIONS)) + + geom_tile() + + theme_minimal() + + theme( + axis.text.x = element_text(size = 14, angle = 45, hjust = 1, margin = margin(t = 10)), + axis.text.y = element_text(size = 16, hjust = 1, margin = margin(r = 15)), + text = element_text(size = 12), + axis.title = element_text(size = 16, margin = margin(15, 15, 15, 15)), + panel.grid = element_blank(), + axis.ticks = element_blank(), + plot.margin = margin(30, 30, 30, 30) + ) + + geom_text(aes(label=SCENARIOS_REGIONS), size=3, color="black") + + scale_fill_gradient2(low = "white", mid = "yellow", high = "darkgreen") + + scale_x_discrete(labels = function(x) str_wrap(x, width = 15)) + + scale_y_continuous(breaks = function(x) seq(ceiling(min(x)/5)*5, floor(max(x)/5)*5, by = 5)) + + labs(x = "Models", y = "Years", fill = "Scenarios×Regions") + }) + + + + + +}) diff --git a/gdxcompaR/iiasadb/ui.R b/inst/gdxcompaR/iiasadb/ui.R similarity index 63% rename from gdxcompaR/iiasadb/ui.R rename to inst/gdxcompaR/iiasadb/ui.R index 8149c93..1041e6f 100644 --- a/gdxcompaR/iiasadb/ui.R +++ b/inst/gdxcompaR/iiasadb/ui.R @@ -3,12 +3,24 @@ #load data if not running locally deploy_online <<- F if(!exists("iiasadb_snapshot")){ - load("iiasadb_snapshot.Rdata", envir = .GlobalEnv) + # Try to load snapshot from results_dir first, fall back to package directory + snapshot_loaded <- FALSE + if(exists("results_dir") && length(results_dir) > 0) { + snapshot_path <- file.path(results_dir[1], "iiasadb_snapshot.Rdata") + if(file.exists(snapshot_path)) { + load(snapshot_path, envir = .GlobalEnv) + snapshot_loaded <- TRUE + } + } + # Fall back to package directory if not found in results_dir + if(!snapshot_loaded) { + load("iiasadb_snapshot.Rdata", envir = .GlobalEnv) + } #Install and load packages require_package <- function(package){ suppressPackageStartupMessages(require(package,character.only=T, quietly = TRUE)) } - pkgs <- c('data.table', 'stringr', 'countrycode', 'ggplot2', 'ggpubr', 'scales', 'RColorBrewer', 'dplyr', 'openxlsx', 'gsubfn', 'tidyr', 'rlang', 'shiny', 'shinythemes', 'shinyWidgets', 'plotly', 'purrr', 'reldist', 'tidytidbits', 'forcats', 'arrow') + pkgs <- c('data.table', 'stringr', 'countrycode', 'ggplot2', 'ggpubr', 'scales', 'RColorBrewer', 'dplyr', 'openxlsx', 'gsubfn', 'tidyr', 'rlang', 'shiny', 'shinythemes', 'shinyWidgets', 'plotly', 'purrr', 'reldist', 'tidytidbits', 'forcats', 'arrow', 'DT') res <- lapply(pkgs, require_package) deploy_online <<- T } @@ -37,8 +49,9 @@ shinyUI(fluidPage( uiOutput("select_scenarios"), uiOutput("select_models"), uiOutput("select_regions"), - #div(style="display:inline-block",uiOutput("compare_models_scenarios")), + #div(style="display:inline-block",uiOutput("compare_models_scenarios")), div(style="display:inline-block",checkboxInput("ylim_zero", " Set y-axis limit to zero", value = F)), + div(style="display:inline-block",checkboxInput("show_legend", " Show legend", value = F)), if(!deploy_online){div(style="display:inline-block",actionButton("button_saveplotdata", "Save Plot"))} @@ -49,11 +62,11 @@ shinyUI(fluidPage( mainPanel( tabsetPanel(type = "tabs", id = "tabs", tabPanel("iiasadb_compaR", id = "iiasadb_compaR", h2(textOutput("varname")),plotOutput("iiasadb_compaR", width = "100%", height = "80vh")), - tabPanel("iiasadb_compaRly", id = "iiasadb_compaRly", plotlyOutput("iiasadb_compaRly", width = "100%", height = "80vh")), - - tabPanel("Regions", id = "Regions", h2("Regions"),plotOutput("iiasadb_coverage_regions", width = "100%", height = "80vh")), - tabPanel("Scenarios", id = "Scenarios", h2("Scenarios"),plotOutput("iiasadb_coverage_scenarios", width = "100%", height = "80vh")), - tabPanel("Variables", id = "Variables", h2("Variables"),plotOutput("iiasadb_coverage_variables", width = "100%", height = "80vh")), + + tabPanel("Regions", id = "Regions", h2("Regions"), div(style="height:80vh; overflow-y:scroll;", plotOutput("iiasadb_coverage_regions", width = "100%", height = "200vh"))), + tabPanel("Scenarios", id = "Scenarios", h2("Scenarios"), div(style="height:80vh; overflow-y:scroll;", plotOutput("iiasadb_coverage_scenarios", width = "100%", height = "120vh"))), + tabPanel("Variables", id = "Variables", h2("Variables"), DT::dataTableOutput("variables_table")), + tabPanel("Years", id = "Years", h2("Years"), div(style="height:80vh; overflow-y:scroll;", plotOutput("iiasadb_coverage_years", width = "100%", height = "120vh"))) ) ) ))) \ No newline at end of file diff --git a/inst/gdxcompaR/rice/global.R b/inst/gdxcompaR/rice/global.R new file mode 100644 index 0000000..f96ceba --- /dev/null +++ b/inst/gdxcompaR/rice/global.R @@ -0,0 +1,21 @@ +# Global environment setup for RICE50+ Shiny app +# This file loads required packages and makes functions available + +# Suppress package startup messages and warnings about namespace conflicts +suppressPackageStartupMessages({ + library(shiny) + library(ggplot2) + library(plotly) + library(data.table) + library(dplyr) # Load dplyr after data.table to prioritize dplyr functions + library(stringr) + library(shinyWidgets) + library(gdxtools) + library(witchtools) + library(tidyr) + # Optional packages + if(requireNamespace("arrow", quietly=TRUE)) library(arrow) + if(requireNamespace("ggpubr", quietly=TRUE)) library(ggpubr) + if(requireNamespace("rnaturalearth", quietly=TRUE)) library(rnaturalearth) + if(requireNamespace("sf", quietly=TRUE)) library(sf) +}) diff --git a/inst/gdxcompaR/rice/server.R b/inst/gdxcompaR/rice/server.R new file mode 100644 index 0000000..f63810f --- /dev/null +++ b/inst/gdxcompaR/rice/server.R @@ -0,0 +1,326 @@ +shinyServer(function(input, output, session) { +verbose <- FALSE +growth_rate <- FALSE +list_of_variables <- get_gdx_variable_list_simple(results_dir, filelist) +output$select_scenarios <- renderUI({create_scenario_selector(scenlist)}) +output$select_variable <- renderUI({create_variable_selector(list_of_variables, default_var="E", use_picker=TRUE)}) +output$select_regions <- renderUI({create_region_selector(witch_regions, include_aggregates=c("World"), default_region="World")}) +variable_selected_reactive <- reactive({input$variable_selected}) +output$varname <- renderText({ + var_text <- paste0("Variable: ", variable_selected_reactive()) + if(!is.null(input$additional_set_id_selected) && input$additional_set_id_selected[1] != "na") { + var_text <- paste0(var_text, " - Element: ", paste(input$additional_set_id_selected, collapse=",")) + } + if(!is.null(input$regions_selected) && length(input$regions_selected)==1) { + var_text <- paste0(var_text, " - Region: ", input$regions_selected[1]) + } + var_text +}) +observeEvent(input$button_saveplotdata, { +variable <- input$variable_selected +print("Current plot saved in subdirectory 'graphs'") +saveplot(variable, width=14, height=7) +}) +output$gdxcompaRplot <- renderUI({ +show_historical <- input$add_historical # Checkbox controls plot visibility, not data loading +ylim_zero <- input$ylim_zero +field_show <- input$field +growth_rate <- input$growth_rate +stacked_plot <- input$stacked_plot +variable <- input$variable_selected +if(is.null(variable)) variable <- list_of_variables[1] +afd <- get_witch(variable, , field=field_show) # Always loads with historical if add_historical was TRUE at startup +if(verbose) print(str_glue("Variable {variable} loaded.")) + +# Check if variable has time dimension +has_time <- "t" %in% names(afd) + +if(!has_time) { + # Show dynamic table instead of plot + set_info <- extract_additional_sets(afd, file_group_columns) + output$choose_additional_set <- renderUI({ + variable <- variable_selected_reactive() + if(is.null(variable)) variable <- list_of_variables[1] + sel <- input$additional_set_id_selected + size_elements <- min(length(set_info$set_elements), 5) + selectInput("additional_set_id_selected", "Index 1:", set_info$set_elements, size=size_elements, selectize=FALSE, multiple=TRUE, selected=sel) + }) + + filtered_data <- afd + # Order pathdir factor according to results_dir vector + if("pathdir" %in% names(filtered_data) && length(results_dir) > 1) { + pathdir_levels <- basename(results_dir) + filtered_data$pathdir <- factor(filtered_data$pathdir, levels=pathdir_levels) + } + additional_set_selected <- input$additional_set_id_selected + regions <- input$regions_selected + scenarios <- input$scenarios_selected + + if(is.null(additional_set_selected)) additional_set_selected <- set_info$set_elements[1] + if(set_info$additional_set_id != "na") { + filtered_data[[set_info$additional_set_id]] <- tolower(filtered_data[[set_info$additional_set_id]]) + filtered_data <- subset(filtered_data, get(set_info$additional_set_id) %in% additional_set_selected) + } + if(!is.null(regions) && "n" %in% names(filtered_data)) { + filtered_data <- subset(filtered_data, n %in% regions) + } + if(!is.null(scenarios)) { + filtered_data <- subset(filtered_data, file %in% scenarios) + } + return(DT::datatable(filtered_data, options = list(pageLength = 25, scrollX = TRUE), rownames = FALSE)) +} else { + # Show plot as usual + set_info <- extract_additional_sets(afd, file_group_columns) + output$choose_additional_set <- renderUI({ + variable <- variable_selected_reactive() + if(is.null(variable)) variable <- list_of_variables[1] + sel <- input$additional_set_id_selected + size_elements <- min(length(set_info$set_elements), 5) + selectInput("additional_set_id_selected", "Index 1:", set_info$set_elements, size=size_elements, selectize=FALSE, multiple=TRUE, selected=sel) + }) + yearlim <- input$yearlim + additional_set_selected <- input$additional_set_id_selected + regions <- input$regions_selected + scenarios <- input$scenarios_selected + if(is.null(regions)) regions <- display_regions + if(is.null(additional_set_selected)) additional_set_selected <- set_info$set_elements[1] + if((set_info$additional_set_id!="na" & additional_set_selected[1]=="na") | !(additional_set_selected[1] %in% set_info$set_elements)) additional_set_selected <- set_info$set_elements[1] + plot_data <- prepare_plot_data(variable, field_show, yearlim, scenarios, set_info$additional_set_id, additional_set_selected, NULL, NULL, regions, growth_rate, time_filter=TRUE, compute_aggregates=TRUE, verbose=verbose) + afd <- plot_data$data + unit_conv <- plot_data$unit_conv + if(growth_rate){ + unit_conv$unit <- " % p.a." + unit_conv$convert <- 1 + } + # If stacked plot is requested, use stacked area plot + if(stacked_plot && length(regions) > 1){ + p <- ggplot(subset(afd, n %in% regions & !str_detect(file, "historical") & !str_detect(file, "valid")), aes(year, value, fill=n)) + geom_area(stat="identity", linewidth=1.5) + xlab("year") + ylab(unit_conv$unit) + scale_fill_manual(values=region_palette) + xlim(yearlim[1], yearlim[2]) + p <- p + theme(text=element_text(size=16), legend.position="bottom", legend.direction="horizontal", legend.box="vertical", legend.key=element_rect(colour=NA), legend.title=element_blank()) + guides(fill=guide_legend(title=NULL, nrow=2)) + if(!is.null(scenarios) && length(scenarios)>1) p <- p + facet_wrap(. ~ file) + } else if(regions[1]=="World" | length(regions)==1){ + p <- ggplot(subset(afd, n %in% regions & !str_detect(file, "historical") & !str_detect(file, "valid")), aes(year, value, colour=file)) + geom_line(stat="identity", linewidth=1.5) + xlab("year") + ylab(unit_conv$unit) + xlim(yearlim[1], yearlim[2]) + if(ylim_zero) p <- p + ylim(0, NA) + if(show_historical) { + p <- p + geom_line(data=subset(afd, n %in% regions & str_detect(file, "historical")), aes(year, value, colour=file), stat="identity", linewidth=1.0, linetype="solid") + p <- p + geom_point(data=subset(afd, n %in% regions & str_detect(file, "valid")), aes(year, value, colour=file), size=4.0, shape=18) + } + p <- p + theme(text=element_text(size=16), legend.position="bottom", legend.direction="horizontal", legend.box="vertical", legend.key=element_rect(colour=NA), legend.title=element_blank()) + guides(color=guide_legend(title=NULL)) + }else{ + p <- ggplot(subset(afd, n %in% regions & !str_detect(file, "historical") & !str_detect(file, "valid")), aes(year, value, colour=n, linetype=file)) + geom_line(stat="identity", linewidth=1.5) + xlab("year") + ylab(unit_conv$unit) + scale_colour_manual(values=region_palette) + xlim(yearlim[1], yearlim[2]) + if(show_historical) { + p <- p + geom_line(data=subset(afd, n %in% regions & str_detect(file, "historical")), aes(year, value, colour=n, group=interaction(n, file)), linetype="solid", stat="identity", linewidth=1.0) + p <- p + geom_point(data=subset(afd, n %in% regions & str_detect(file, "valid")), aes(year, value, colour=n, shape=file), size=4.0) + } + p <- p + theme(text=element_text(size=16), legend.position="bottom", legend.direction="horizontal", legend.box="vertical", legend.key=element_rect(colour=NA), legend.title=element_blank()) + guides(color=guide_legend(title=NULL, nrow=2), linetype=guide_legend(title=NULL)) + } + if(length(results_dir)!=1 && !stacked_plot) p <- p + facet_grid(. ~ pathdir) + if(nrow(afd)>0) { + return(renderPlot({print(p + labs(title=variable))}, height = 600)) + } +} +}) +output$gdxcompaRstackedplot <- renderPlot({ +show_historical <- input$add_historical # Checkbox controls plot visibility +ylim_zero <- input$ylim_zero +field_show <- input$field +variable <- input$variable_selected +if(is.null(variable)) variable <- list_of_variables[1] +afd <- get_witch(variable, , field=field_show) +if(verbose) print(str_glue("Variable {variable} loaded.")) +set_info <- extract_additional_sets(afd, file_group_columns) +output$choose_additional_set <- renderUI({ +variable <- variable_selected_reactive() +if(is.null(variable)) variable <- list_of_variables[1] +sel <- input$additional_set_id_selected +size_elements <- min(length(set_info$set_elements), 5) +selectInput("additional_set_id_selected", "Index 1:", set_info$set_elements, size=size_elements, selectize=FALSE, multiple=TRUE, selected=sel) +}) +yearlim <- input$yearlim +additional_set_selected <- input$additional_set_id_selected +regions <- input$regions_selected +scenarios <- input$scenarios_selected +if(is.null(regions)) regions <- display_regions +if(is.null(additional_set_selected)) additional_set_selected <- set_info$set_elements[1] +if((set_info$additional_set_id!="na" & additional_set_selected[1]=="na") | !(additional_set_selected[1] %in% set_info$set_elements)) additional_set_selected <- set_info$set_elements[1] +afd <- subset_by_additional_sets(afd, set_info$additional_set_id, additional_set_selected, NULL, NULL) +# Order pathdir factor according to results_dir vector +if("pathdir" %in% names(afd) && length(results_dir) > 1) { + pathdir_levels <- basename(results_dir) + afd$pathdir <- factor(afd$pathdir, levels=pathdir_levels) +} +# Calculate year if not already present BEFORE filtering +if(!"year" %in% names(afd)) { + if("tlen" %in% names(afd)) { + afd$year <- ttoyear(afd$t, afd$tlen) + } else { + afd$year <- ttoyear(afd$t) + } +} +afd <- subset(afd, year>=yearlim[1] & year<=yearlim[2]) +afd <- afd %>% filter(!is.na(value)) +afd <- subset(afd, file %in% c(scenarios, paste0(scenarios, "(b1)"), paste0(scenarios, "(b2)"), paste0(scenarios, "(b3)")) | str_detect(file, "historical") | str_detect(file, "valid")) +afd_hist <- subset(afd, file %in% c(str_subset(unique(afd$file), "historical")[1])) +afd <- subset(afd, file %in% c(scenarios)) +for(scen in scenarios){ +afd_hist$file <- scen +if(scen==scenarios[1]) afd_hist_temp <- afd_hist else afd_hist_temp <- rbind(afd_hist_temp, afd_hist) +} +afd <- rbind(afd, afd_hist) +unit_conv <- unit_conversion(variable) +afd$value <- afd$value * unit_conv$convert +p_stacked <- ggplot(subset(afd, n %in% regions & !str_detect(file, "historical") & !str_detect(file, "valid")), aes(year, value, fill=n)) + geom_area(stat="identity", linewidth=1.5) + xlab("year") + ylab(unit_conv$unit) + scale_fill_manual(values=region_palette) + xlim(yearlim[1], yearlim[2]) +p_stacked <- p_stacked + theme(text=element_text(size=16), legend.position="bottom", legend.direction="horizontal", legend.box="vertical", legend.key=element_rect(colour=NA), legend.title=element_blank()) + guides(fill=guide_legend(title=NULL, nrow=2)) +if(!is.null(scenarios)) p_stacked <- p_stacked + facet_wrap(. ~ file) +if(nrow(afd)>0) print(p_stacked + labs(title=variable)) +}) +output$gdxompaRplotly <- renderPlot({ +show_historical <- input$add_historical # Checkbox controls plot visibility +ylim_zero <- input$ylim_zero +field_show <- input$field +growth_rate <- input$growth_rate +stacked_plot <- input$stacked_plot +variable <- input$variable_selected +if(is.null(variable)) variable <- list_of_variables[1] +afd <- get_witch(variable, , field=field_show) +if(verbose) print(str_glue("Variable {variable} loaded.")) +set_info <- extract_additional_sets(afd, file_group_columns) +yearlim <- input$yearlim +additional_set_selected <- input$additional_set_id_selected +regions <- input$regions_selected +scenarios <- input$scenarios_selected +if(is.null(regions)) regions <- display_regions +if(is.null(additional_set_selected)) additional_set_selected <- set_info$set_elements[1] +if((set_info$additional_set_id!="na" & additional_set_selected[1]=="na") | !(additional_set_selected[1] %in% set_info$set_elements)) additional_set_selected <- set_info$set_elements[1] +plot_data <- prepare_plot_data(variable, field_show, yearlim, scenarios, set_info$additional_set_id, additional_set_selected, NULL, NULL, regions, growth_rate, time_filter=TRUE, compute_aggregates=TRUE, verbose=verbose) +afd <- plot_data$data +unit_conv <- plot_data$unit_conv +if(growth_rate){ +unit_conv$unit <- " % p.a." +unit_conv$convert <- 1 +} +# Create plot using same logic as gdxcompaRplot +if(stacked_plot && length(regions) > 1){ +p <- ggplot(subset(afd, n %in% regions & !str_detect(file, "historical") & !str_detect(file, "valid")), aes(ttoyear(t), value, fill=n)) + geom_area(stat="identity", linewidth=1.5) + xlab("year") + ylab(unit_conv$unit) + scale_fill_manual(values=region_palette) + xlim(yearlim[1], yearlim[2]) +p <- p + theme(text=element_text(size=16), legend.position="bottom", legend.direction="horizontal", legend.box="vertical", legend.key=element_rect(colour=NA), legend.title=element_blank()) + guides(fill=guide_legend(title=NULL, nrow=2)) +if(!is.null(scenarios) && length(scenarios)>1) p <- p + facet_wrap(. ~ file) +} else if(regions[1]=="World" | length(regions)==1){ +p <- ggplot(subset(afd, n %in% regions & !str_detect(file, "historical") & !str_detect(file, "valid")), aes(ttoyear(t), value, colour=file)) + geom_line(stat="identity", linewidth=1.5) + xlab("year") + ylab(unit_conv$unit) + xlim(yearlim[1], yearlim[2]) +if(ylim_zero) p <- p + ylim(0, NA) +if(show_historical) { + p <- p + geom_line(data=subset(afd, n %in% regions & str_detect(file, "historical")), aes(year, value, colour=file), stat="identity", linewidth=1.0, linetype="solid") + p <- p + geom_point(data=subset(afd, n %in% regions & str_detect(file, "valid")), aes(year, value, colour=file), size=4.0, shape=18) +} +p <- p + theme(text=element_text(size=16), legend.position="bottom", legend.direction="horizontal", legend.box="vertical", legend.key=element_rect(colour=NA), legend.title=element_blank()) + guides(color=guide_legend(title=NULL)) +}else{ +p <- ggplot(subset(afd, n %in% regions & !str_detect(file, "historical") & !str_detect(file, "valid")), aes(ttoyear(t), value, colour=n, linetype=file)) + geom_line(stat="identity", linewidth=1.5) + xlab("year") + ylab(unit_conv$unit) + scale_colour_manual(values=region_palette) + xlim(yearlim[1], yearlim[2]) +if(show_historical) { + p <- p + geom_line(data=subset(afd, n %in% regions & str_detect(file, "historical")), aes(year, value, colour=n, group=interaction(n, file)), linetype="solid", stat="identity", linewidth=1.0) + p <- p + geom_point(data=subset(afd, n %in% regions & str_detect(file, "valid")), aes(year, value, colour=n, shape=file), size=4.0) +} +p <- p + theme(text=element_text(size=16), legend.position="bottom", legend.direction="horizontal", legend.box="vertical", legend.key=element_rect(colour=NA), legend.title=element_blank()) + guides(color=guide_legend(title=NULL, nrow=2), linetype=guide_legend(title=NULL)) +} +if(length(results_dir)!=1 && !stacked_plot) p <- p + facet_grid(. ~ pathdir) +if(nrow(afd)>0) print(p + labs(title=variable)) +}) +output$gdxcompaRmap <- renderPlot({ +variable <- input$variable_selected +yearlim <- input$yearlim +scenarios <- input$scenarios_selected +data <- get_witch(variable) +map_simple(data, yearmap=yearlim[2], scenplot=scenarios, title=str_glue("{variable} in {yearlim[2]}")) +}) +output$diagnostics <- renderPlot({ +# Diagnostics should never load historical data +assign("add_historical", FALSE, envir=.GlobalEnv) +variable <- input$variable_selected +yearlim <- input$yearlim +scenarios <- input$scenarios_selected +elapsed <- get_witch("elapsed") +if(!exists("elapsed")) elapsed <- data.frame(file=scenlist, value=0) +Y <- get_witch("Y") +if(!"year" %in% names(Y)) Y <- Y %>% mutate(year=if("tlen" %in% names(Y)) ttoyear(t, tlen) else ttoyear(t)) +TATM <- get_witch("TATM") +if(!"year" %in% names(TATM)) TATM <- TATM %>% mutate(year=if("tlen" %in% names(TATM)) ttoyear(t, tlen) else ttoyear(t)) +MIU <- get_witch("MIU") +if(!"year" %in% names(MIU)) MIU <- MIU %>% mutate(year=if("tlen" %in% names(MIU)) ttoyear(t, tlen) else ttoyear(t)) +l <- get_witch("l") +if(!"year" %in% names(l)) l <- l %>% mutate(year=if("tlen" %in% names(l)) ttoyear(t, tlen) else ttoyear(t)) +# Order pathdir factor according to results_dir vector +if("pathdir" %in% names(elapsed) && length(results_dir) > 1) { + pathdir_levels <- basename(results_dir) + elapsed$pathdir <- factor(elapsed$pathdir, levels=pathdir_levels) +} +if("pathdir" %in% names(Y) && length(results_dir) > 1) { + pathdir_levels <- basename(results_dir) + Y$pathdir <- factor(Y$pathdir, levels=pathdir_levels) +} +if("pathdir" %in% names(TATM) && length(results_dir) > 1) { + pathdir_levels <- basename(results_dir) + TATM$pathdir <- factor(TATM$pathdir, levels=pathdir_levels) +} +if("pathdir" %in% names(MIU) && length(results_dir) > 1) { + pathdir_levels <- basename(results_dir) + MIU$pathdir <- factor(MIU$pathdir, levels=pathdir_levels) +} +if("pathdir" %in% names(l) && length(results_dir) > 1) { + pathdir_levels <- basename(results_dir) + l$pathdir <- factor(l$pathdir, levels=pathdir_levels) +} +# Find common last year across all scenarios +last_year_by_scen <- Y %>% filter(file %in% scenarios) %>% group_by(file) %>% summarize(max_year=max(year, na.rm=TRUE)) +common_last_year <- min(last_year_by_scen$max_year, yearlim[2]) +# Filter all data to common year range +Y <- Y %>% filter(year >= yearlim[1] & year <= common_last_year) +TATM <- TATM %>% filter(year >= yearlim[1] & year <= common_last_year) +MIU <- MIU %>% filter(year >= yearlim[1] & year <= common_last_year) +l <- l %>% filter(year >= yearlim[1] & year <= common_last_year) +gini <- Y %>% left_join(l %>% rename(pop=value) %>% select(-year), by=c("t", "n", "file", "pathdir")) %>% group_by(t, year, file, pathdir) %>% summarize(value=reldist::gini(value/pop, weights=pop), .groups="drop") +diagplot <- list() +for(p in basename(results_dir)){ +diagplot[[p]] <- ggarrange( +ggplot(elapsed %>% filter(file %in% scenarios & pathdir==p)) + geom_bar(aes(file, value, fill=file), stat="identity") + ylab("Run time (minutes)") + theme(axis.text.x=element_text(angle=90, hjust=1)) + theme(axis.title.x=element_blank(), axis.text.x=element_blank(), axis.ticks.x=element_blank()) + ggtitle(p) + scale_y_continuous(limits=c(0, max(elapsed$value)+1e-10), labels=function(l) strftime(as.POSIXct(l, origin="1970-01-01"), "%M:%S")), +ggarrange( +ggplot(MIU %>% group_by(year, file, pathdir) %>% summarise(value=mean(value), .groups="drop") %>% filter(file %in% scenarios & pathdir==p)) + geom_line(aes(year, value, color=file), linewidth=1) + ylab("MIU") + xlab("") + xlim(yearlim[1], common_last_year), +ggplot(Y %>% filter(file %in% scenarios & pathdir==p) %>% group_by(year, file, pathdir) %>% summarise(value=sum(value), .groups="drop")) + geom_line(aes(year, value, color=file), linewidth=1) + ylab("GDP [T$]") + xlab("") + xlim(yearlim[1], common_last_year), +ncol=2, common.legend=T, legend="none" +), +ggarrange( +ggplot(TATM %>% filter(file %in% scenarios & pathdir==p & !is.na(value))) + geom_line(aes(year, value, color=file), linewidth=1) + ylab("TATM") + xlab("") + xlim(yearlim[1], common_last_year), +ggplot(gini %>% filter(file %in% scenarios & pathdir==p)) + geom_line(aes(year, value, color=file), linewidth=1) + ylab("Gini index of GDPpc") + xlab("") + ylim(0, 1) + xlim(yearlim[1], common_last_year), +ncol=2, common.legend=T, legend="none" +), +nrow=3, common.legend=T, legend="bottom" +) +} +diagplot_all <- ggarrange(plotlist=diagplot, ncol=length(diagplot), common.legend=T) +print(diagplot_all) +}) +output$inequalityplot <- renderPlot({ +variable_ineq <- input$variable_selected +yearlim <- input$yearlim +regions <- input$regions_selected +scenarios <- input$scenarios_selected +inequality_plot_type_selected <- input$inequality_plot_type_selected +inequality_value_share <- input$inequality_value_share +plot_inequality(variable=variable_ineq, plot_type=inequality_plot_type_selected, value_share=inequality_value_share, quantile_set="dist", regions=regions[1], years=seq(yearlim[1], yearlim[2]), years_lorenz=range(yearlim[1], yearlim[2]), scenplot=scenarios) +}) +output$tatmplot <- renderPlot({ +yearlim <- input$yearlim +scenarios <- input$scenarios_selected +gridded_temp_map(yearplot=yearlim[2], scenplot=scenarios, pathadj="../../") +}) +output$iterationplot <- renderPlot({ +# Iterations should never load historical data +assign("add_historical", FALSE, envir=.GlobalEnv) +yearlim <- input$yearlim +scenarios <- input$scenarios_selected +regions <- input$regions_selected +viter <- get_witch("viter") +viter <- viter %>% group_by(n, file, pathdir, v, iter) %>% arrange(t) %>% mutate(seen_nonzero=cumsum(value!=0)>0) %>% complete(t) %>% mutate(value=ifelse(is.na(value) & !seen_nonzero, 0, value)) %>% select(-seen_nonzero) %>% ungroup() +viter <- viter %>% group_by(n, file, pathdir, v, iter) %>% summarise(value=mean(value[ttoyear(t)>=yearlim[1] & ttoyear(t)<=yearlim[2]]), .groups="drop") +viter <- viter %>% filter(file %in% scenarios) +if(regions[1]!="World") viter <- viter %>% filter(n %in% regions) +p_iter <- ggplot(viter) + geom_line(aes(iter, value, color=n, group=n)) + facet_grid(v ~ file, scales="free_y") + theme(legend.position="none") +print(p_iter) +}) +}) diff --git a/gdxcompaR/rice/ui.R b/inst/gdxcompaR/rice/ui.R similarity index 53% rename from gdxcompaR/rice/ui.R rename to inst/gdxcompaR/rice/ui.R index b9bb058..37aa273 100644 --- a/gdxcompaR/rice/ui.R +++ b/inst/gdxcompaR/rice/ui.R @@ -12,28 +12,26 @@ sidebar_ui <- sidebarPanel( max = 2300, value = c(1990,2100), step = 5), - div(style="display:inline-block",checkboxInput("add_historical", "Add historical", value = T)), + div(style="display:inline-block",checkboxInput("add_historical", "Show historical", value = if(exists("add_historical")) add_historical else TRUE)), div(style="display:inline-block", checkboxInput("ylim_zero", "ymin=0", value = FALSE)), div(style="display:inline-block",checkboxInput("growth_rate", "Show growth rates", value = F)), - div(style="display:inline-block",radioButtons("field", "", choiceNames = c("l","up","lo"), choiceValues = c("l","up","lo"), inline = TRUE)), + div(style="display:inline-block",checkboxInput("stacked_plot", "Stacked plot", value = F)), + tags$div(style="display:inline-block", + tags$label("Show:", style="display:inline-block; margin-right: 5px;"), + div(style="display:inline-block", radioButtons("field", "", choiceNames = c("l","up","lo"), choiceValues = c("l","up","lo"), inline = TRUE)) + ), div(style="display:inline-block",actionButton("button_saveplotdata", "Save Plot")) ) -tabs_ui <- tabsetPanel(type = "tabs", id = "tabs", - tabPanel("gdxcompaR", id = "gdxcompaR", h2(textOutput("varname")),plotOutput("gdxcompaRplot", width = "100%", height = "80vh")), +tabs_ui <- tabsetPanel(type = "tabs", id = "tabs", + tabPanel("gdxcompaR", id = "gdxcompaR", h2(textOutput("varname")),uiOutput("gdxcompaRplot")), tabPanel("Diagnostics", id = "diagnostics", plotOutput("diagnostics", width = "100%", height = "80vh")), tabPanel("Iterations", id = "iterationplot", plotOutput("iterationplot", width = "100%", height = "80vh")), - tabPanel("gdxcompaRly (BETA)", id = "gdxcompaRly", plotlyOutput("gdxompaRplotly", width = "100%", height = "80vh")), - tabPanel("gdxcompaR stacked", id = "gdxcompaR_stacked", h2(textOutput("varname2")),plotOutput("gdxcompaRstackedplot", width = "100%", height = "80vh")), tabPanel("gdxcompaR MAP", id = "gdxcompaR_map", plotOutput("gdxcompaRmap", width = "100%", height = "80vh")), - tabPanel("Inequality", id = "Inequality", - div(style="display:inline-block",selectInput("inequality_plot_type_selected", "Plot Type:", c("quantiles", "gini", "lorenz_curve", "distribution") , size=1, selectize = F, multiple = F, selected = "Quantiles")), - div(style="display:inline-block",selectInput("inequality_value_share", "Plot value or share:", c("value", "share") , size=1, selectize = F, multiple = F, selected = "value")), - h2("Inequality Plots"),plotOutput("inequalityplot", width = "100%", height = "80vh")), - tabPanel("Temperature Map", id = "tatm_plot", plotOutput("tatmplot", width = "100%", height = "80vh")) + tabPanel("Temperature Map", id = "tatm_plot", plotOutput("tatmplot", width = "100%", height = "80vh")) ) ui <- fluidPage( diff --git a/inst/gdxcompaR/witch/global.R b/inst/gdxcompaR/witch/global.R new file mode 100644 index 0000000..f47c3bd --- /dev/null +++ b/inst/gdxcompaR/witch/global.R @@ -0,0 +1,21 @@ +# Global environment setup for WITCH Shiny app +# This file loads required packages and makes functions available + +# Suppress package startup messages and warnings about namespace conflicts +suppressPackageStartupMessages({ + library(shiny) + library(ggplot2) + library(plotly) + library(data.table) + library(dplyr) # Load dplyr after data.table to prioritize dplyr functions + library(stringr) + library(shinyWidgets) + library(gdxtools) + library(witchtools) + library(tidyr) + # Optional packages + if(requireNamespace("arrow", quietly=TRUE)) library(arrow) + if(requireNamespace("ggpubr", quietly=TRUE)) library(ggpubr) + if(requireNamespace("rnaturalearth", quietly=TRUE)) library(rnaturalearth) + if(requireNamespace("sf", quietly=TRUE)) library(sf) +}) diff --git a/inst/gdxcompaR/witch/server.R b/inst/gdxcompaR/witch/server.R new file mode 100644 index 0000000..558877b --- /dev/null +++ b/inst/gdxcompaR/witch/server.R @@ -0,0 +1,184 @@ +shinyServer(function(input, output, session) { +verbose <- FALSE +if(deploy_online){ +suppressPackageStartupMessages(require(tidyverse)) +require(plotly) +require(shinyWidgets) +add_historical_values <- function(x, varname, iiasadb, verbose){return(x)} +get_witch <- function(variable, field){return(allvariables[[variable]])} +} +list_of_variables <- get_gdx_variable_list(results_dir, filelist, filter_time_dependent=FALSE) +output$select_scenarios <- renderUI({create_scenario_selector(scenlist)}) +output$select_variable <- renderUI({create_variable_selector(list_of_variables, default_var="Q_EMI", use_picker=TRUE)}) +output$select_regions <- renderUI({create_region_selector(witch_regions, include_aggregates=c("World", "EU"), default_region="World")}) +variable_input <- reactive({return(input$variable_selected)}) + +# PERFORMANCE FIX: Move index selectors OUTSIDE renderPlot +# This prevents them from re-rendering every time the plot updates +# Only update when variable changes +set_info_reactive <- reactive({ + variable <- variable_input() + if(is.null(variable)) variable <- list_of_variables[1] + field_show <- input$field + afd <- get_witch(variable, , field=field_show) + extract_additional_sets(afd, file_group_columns) +}) + +output$choose_additional_set <- renderUI({ + set_info <- set_info_reactive() + variable <- variable_input() + sel <- input$additional_set_id_selected + + # Smart default: prefer "co2_ffi" for Q_EMI, otherwise first element + if(is.null(sel) || !all(sel %in% set_info$set_elements)){ + if(variable == "Q_EMI" && "co2_ffi" %in% set_info$set_elements) { + sel <- "co2_ffi" + } else { + sel <- set_info$set_elements[1] + } + } + + size_elements <- min(length(set_info$set_elements), 5) + selectInput(inputId="additional_set_id_selected", label="Index 1:", choices=set_info$set_elements, size=size_elements, selectize=FALSE, multiple=TRUE, selected=sel) +}) + +output$choose_additional_set2 <- renderUI({ + set_info <- set_info_reactive() + sel2 <- input$additional_set_id_selected2 + size_elements2 <- min(length(set_info$set_elements2), 5) + selectInput(inputId="additional_set_id_selected2", label="Index 2:", choices=set_info$set_elements2, size=size_elements2, selectize=FALSE, multiple=TRUE, selected=sel2) +}) + +output$varname <- renderText({ + var_text <- paste0("Variable: ", variable_input()) + if(!is.null(input$additional_set_id_selected) && input$additional_set_id_selected[1] != "na") { + var_text <- paste0(var_text, " - Element: ", str_trunc(paste(input$additional_set_id_selected, collapse=","), 20)) + } + if(!is.null(input$additional_set_id_selected2) && input$additional_set_id_selected2[1] != "na") { + var_text <- paste0(var_text, " - Element2: ", str_trunc(paste(input$additional_set_id_selected2, collapse=","), 20)) + } + if(!is.null(input$regions_selected) && length(input$regions_selected)==1) { + var_text <- paste0(var_text, " - Region: ", input$regions_selected[1]) + } + var_text +}) + +observeEvent(input$button_saveplotdata, { +variable <- input$variable_selected +print("Current plot saved in subdirectory 'graphs'") +saveplot(variable, width=14, height=7) +}) + +output$gdxompaRplot <- renderUI({ +show_historical <- input$add_historical +ylim_zero <- input$ylim_zero +field_show <- input$field +variable <- input$variable_selected +if(is.null(variable)) variable <- list_of_variables[1] +set_info <- set_info_reactive() +yearlim <- input$yearlim +additional_set_selected <- input$additional_set_id_selected +additional_set_selected2 <- input$additional_set_id_selected2 +regions <- input$regions_selected +scenarios <- input$scenarios_selected +if(is.null(regions)) regions <- display_regions +if(is.null(additional_set_selected)) additional_set_selected <- set_info$set_elements[1] +if((set_info$additional_set_id!="na" & additional_set_selected[1]=="na") | !(additional_set_selected[1] %in% set_info$set_elements)) additional_set_selected <- set_info$set_elements[1] +if(is.null(additional_set_selected2)) additional_set_selected2 <- set_info$set_elements2[1] +if((set_info$additional_set_id2!="na" & additional_set_selected2[1]=="na") | !(additional_set_selected2[1] %in% set_info$set_elements2)) additional_set_selected2 <- set_info$set_elements2[1] + +# Check if variable has time dimension +afd <- get_witch(variable, , field=field_show) +has_time <- "t" %in% names(afd) + +if(!has_time) { + # Show dynamic table instead of plot + filtered_data <- afd + # Order pathdir factor according to results_dir vector + if("pathdir" %in% names(filtered_data) && length(results_dir) > 1) { + pathdir_levels <- basename(results_dir) + filtered_data$pathdir <- factor(filtered_data$pathdir, levels=pathdir_levels) + } + if(set_info$additional_set_id != "na") { + filtered_data[[set_info$additional_set_id]] <- tolower(filtered_data[[set_info$additional_set_id]]) + filtered_data <- subset(filtered_data, get(set_info$additional_set_id) %in% additional_set_selected) + } + if(set_info$additional_set_id2 != "na") { + filtered_data[[set_info$additional_set_id2]] <- tolower(filtered_data[[set_info$additional_set_id2]]) + filtered_data <- subset(filtered_data, get(set_info$additional_set_id2) %in% additional_set_selected2) + } + if(!is.null(regions) && "n" %in% names(filtered_data)) { + filtered_data <- subset(filtered_data, n %in% regions) + } + if(!is.null(scenarios)) { + filtered_data <- subset(filtered_data, file %in% scenarios) + } + return(DT::datatable(filtered_data, options = list(pageLength = 25, scrollX = TRUE), rownames = FALSE)) +} else { + # Show plot as usual + plot_data <- prepare_plot_data(variable, field_show, yearlim, scenarios, set_info$additional_set_id, additional_set_selected, set_info$additional_set_id2, additional_set_selected2, regions, growth_rate_flag=FALSE, time_filter=TRUE, compute_aggregates=TRUE, verbose=verbose) + p <- create_gdx_plot(plot_data$data, variable, plot_data$unit_conv, regions, yearlim, ylim_zero, region_palette, results_dir, show_historical) + if(!is.null(p)) { + return(renderPlot({print(p)}, height = 600)) + } +} +}) + +output$Diagnostics <- renderPlot({ +yearlim <- input$yearlim +scenarios <- input$scenarios_selected +diagnostics_plots(scenplot=scenarios) +}) + +output$energymixplot <- renderPlot({ +yearlim <- input$yearlim +regions <- input$regions_selected +scenarios <- input$scenarios_selected +mix_plot_type_selected <- input$mix_plot_type_selected +mix_y_value_selected <- input$mix_y_value_selected +# FIX: Handle EU region properly - convert to actual EU region list +plot_region <- regions[1] +if(plot_region == "EU") { + eu <- tryCatch(get_witch("eu"), error = function(e) NULL) + eu_regions <- if(is.null(eu) || nrow(eu)==0) c("europe") else unique(eu$n) + plot_region <- eu_regions[1] # Use first EU region for mix plot +} +Primary_Energy_Mix(PES_y=mix_y_value_selected, regions=plot_region, years=seq(yearlim[1], yearlim[2], 1), plot_type=mix_plot_type_selected, scenplot=scenarios) +}) + +output$electricitymixplot <- renderPlot({ +yearlim <- input$yearlim +regions <- input$regions_selected +scenarios <- input$scenarios_selected +mix_plot_type_selected <- input$mix_plot_type_selected +mix_y_value_selected <- input$mix_y_value_selected +# FIX: Handle EU region properly - convert to actual EU region list +plot_region <- regions[1] +if(plot_region == "EU") { + eu <- tryCatch(get_witch("eu"), error = function(e) NULL) + eu_regions <- if(is.null(eu) || nrow(eu)==0) c("europe") else unique(eu$n) + plot_region <- eu_regions[1] # Use first EU region for mix plot +} +Electricity_Mix(Electricity_y=mix_y_value_selected, regions=plot_region, years=seq(yearlim[1], yearlim[2], 1), plot_type=mix_plot_type_selected, scenplot=scenarios) +}) + +output$investmentplot <- renderPlot({ +scenarios <- input$scenarios_selected +Investment_Plot(regions="World", scenplot=scenarios) +}) + +output$policycostplot <- renderPlot({ +yearlim <- input$yearlim +regions <- input$regions_selected +scenarios <- input$scenarios_selected +Policy_Cost(discount_rate=5, regions=regions, bauscen=scenarios[1], show_numbers=TRUE, tmax=yeartot(yearlim[2])) +}) + +output$intensityplot <- renderPlot({ +yearlim <- input$yearlim +regions <- input$regions_selected +scenarios <- input$scenarios_selected +Intensity_Plot(years=c(yearlim[2], yearlim[2]-50), regions=regions, year0=2010, scenplot=scenarios, animate_plot=FALSE) +}) + +}) diff --git a/gdxcompaR/witch/ui.R b/inst/gdxcompaR/witch/ui.R similarity index 62% rename from gdxcompaR/witch/ui.R rename to inst/gdxcompaR/witch/ui.R index ae4ba61..1af35ca 100644 --- a/gdxcompaR/witch/ui.R +++ b/inst/gdxcompaR/witch/ui.R @@ -5,7 +5,7 @@ #only if deployed online deploy_online <<- F -if(!exists("witch_folder")){ +if(!exists("results_dir")){ load(file="allvariables.Rdata", envir = .GlobalEnv) #Install and load packages require_package <- function(package){ @@ -31,48 +31,35 @@ sidebar_ui <- sidebarPanel( value = c(1990,2100), step = 5), div(style="display:inline-block", - checkboxInput("time_filter", - "Time filter", - value = TRUE)), - div(style="display:inline-block", - checkboxInput("add_historical", - "Historical", - value = TRUE)), + checkboxInput("add_historical", + "Show historical", + value = if(exists("add_historical")) add_historical else TRUE)), div(style="display:inline-block", checkboxInput("ylim_zero", "ymin=0", value = FALSE)), - div(style="display:inline-block",radioButtons("field", "", choiceNames = c("l","up","lo"), choiceValues = c("l","up","lo"), inline = TRUE)), + tags$div(style="display:inline-block", + tags$label("Show:", style="display:inline-block; margin-right: 5px;"), + div(style="display:inline-block", radioButtons("field", "", choiceNames = c("l","up","lo"), choiceValues = c("l","up","lo"), inline = TRUE)) + ), div(style="display:inline-block",actionButton("button_saveplotdata", "Save Plot")) ) tabs_ui <- tabsetPanel(type = "tabs", id = "tabs", - - tabPanel("gdxcompaR", id = "gdxcompaR", h3(textOutput("varname")),plotOutput("gdxompaRplot", width = "100%", height = "80vh")), - tabPanel("gdxcompaRly (BETA)", id = "gdxcompaRly", plotlyOutput("gdxompaRplotly", width = "100%", height = "80vh")), + + tabPanel("gdxcompaR", id = "gdxcompaR", h3(textOutput("varname")),uiOutput("gdxompaRplot")), tabPanel("Diagnostics", id = "Diagnostics", h2("Diagnostics of model runs"),plotOutput("Diagnostics", width = "100%", height = "80vh")), - tabPanel("Energy Mix", id = "Energy Mix", - div(style="display:inline-block",selectInput("mix_y_value_selected", "Plot value or share:", c("value", "share") , size=1, selectize = F, multiple = F, selected = "value")), + tabPanel("Energy Mix", id = "Energy Mix", + div(style="display:inline-block",selectInput("mix_y_value_selected", "Plot value or share:", c("value", "share") , size=1, selectize = F, multiple = F, selected = "value")), div(style="display:inline-block",selectInput("mix_plot_type_selected", "Plot Type:", c("area", "line", "bar") , size=1, selectize = F, multiple = F, selected = "area")), h2("Energy Mix"),plotOutput("energymixplot", width = "100%", height = "80vh")), - tabPanel("Electricity Mix", id = "Electricity Mix", - div(style="display:inline-block",selectInput("mix_y_value_selected", "Plot value or share:", c("value", "share") , size=1, selectize = F, multiple = F, selected = "value")), + tabPanel("Electricity Mix", id = "Electricity Mix", + div(style="display:inline-block",selectInput("mix_y_value_selected", "Plot value or share:", c("value", "share") , size=1, selectize = F, multiple = F, selected = "value")), div(style="display:inline-block",selectInput("mix_plot_type_selected", "Plot Type:", c("area", "line", "bar") , size=1, selectize = F, multiple = F, selected = "area")), h2("Electricity Mix"),plotOutput("electricitymixplot", width = "100%", height = "80vh")), tabPanel("Investment", id = "Investment", h2("Investment"),plotOutput("investmentplot", width = "100%", height = "80vh")), tabPanel("Policy Cost", id = "Policy Cost", h2("Policy Cost"),p("Select BAU scenario under 'scenarios'."),plotOutput("policycostplot", width = "100%", height = "80vh")), - tabPanel("Intensity Plot", id = "Intensity Plot", h2("Energy and Carbon Intensity"),plotOutput("intensityplot", width = "100%", height = "80vh")), - tabPanel("Impact Map", id = "Impact Map", h2("GDP Impact [% loss wrt BAU]"),plotOutput("impactmap", width = "100%", height = "80vh")), - tabPanel("Climate", id = "climate", h2("The Climate"),plotOutput("climate_plot", width = "100%", height = "80vh")), - tabPanel("SCC", id = "SCC", - div(style="display:inline-block",selectInput("scc_normalization_region", "Normalization region:", c("World", witch_regions) , size=1, selectize = F, multiple = F, selected = "World")), - h2("Social Cost of Carbon"),plotOutput("SCC_plot", width = "100%", height = "80vh")), - tabPanel("Inequality", id = "Inequality", - div(style="display:inline-block",selectInput("inequality_plot_type_selected", "Plot Type:", c("quantiles", "gini", "lorenz_curve", "distribution") , size=1, selectize = F, multiple = F, selected = "Quantiles")), - div(style="display:inline-block",selectInput("inequality_value_share", "Plot value or share:", c("value", "share") , size=1, selectize = F, multiple = F, selected = "value")), - h2("Inequality Plots"),plotOutput("inequalityplot", width = "100%", height = "80vh")) - - + tabPanel("Intensity Plot", id = "Intensity Plot", h2("Energy and Carbon Intensity"),plotOutput("intensityplot", width = "100%", height = "80vh")) ) ui <- fluidPage( diff --git a/man/figures/logo.png b/man/figures/logo.png new file mode 100644 index 0000000..619a0b4 Binary files /dev/null and b/man/figures/logo.png differ diff --git a/man/run_fidelio.Rd b/man/run_fidelio.Rd new file mode 100644 index 0000000..b765ae9 --- /dev/null +++ b/man/run_fidelio.Rd @@ -0,0 +1,54 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/witchplot.R +\name{run_fidelio} +\alias{run_fidelio} +\title{Launch FIDELIO Model Interactive Visualization} +\usage{ +run_fidelio( + results_dir = "./", + restrict_files = "results_", + exclude_files = "", + removepattern = "results_", + add_historical = TRUE, + deploy_online = FALSE, + figure_format = "png", + write_plotdata_csv = FALSE, + launch = TRUE, + ... +) +} +\arguments{ +\item{results_dir}{Path(s) to results directory containing GDX files (default: "./")} + +\item{restrict_files}{Pattern to filter GDX files (default: "results_")} + +\item{exclude_files}{Pattern to exclude GDX files (default: "")} + +\item{removepattern}{Pattern to remove from scenario names (default: "")} + +\item{add_historical}{Logical, add historical data where available (default: TRUE)} + +\item{deploy_online}{Logical, whether to deploy online (default: FALSE)} + +\item{figure_format}{Output format for figures (default: "png")} + +\item{write_plotdata_csv}{Logical, save plot data as CSV (default: FALSE)} + +\item{launch}{Logical, launch Shiny app immediately (default: TRUE)} + +\item{...}{Additional options passed to session configuration} +} +\value{ +Invisibly returns NULL. Launches Shiny application if launch=TRUE. +} +\description{ +Loads FIDELIO model GDX result files and launches an interactive Shiny application +for analyzing economic impacts and input-output model results. +} +\examples{ +\dontrun{ + run_fidelio() + run_fidelio(results_dir = "results") +} + +} diff --git a/man/run_iiasadb.Rd b/man/run_iiasadb.Rd new file mode 100644 index 0000000..1490110 --- /dev/null +++ b/man/run_iiasadb.Rd @@ -0,0 +1,77 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/witchplot.R +\name{run_iiasadb} +\alias{run_iiasadb} +\title{Launch IIASA Database Comparison Viewer} +\usage{ +run_iiasadb( + results_dir = "./", + reg_id = c("r5"), + iamc_filename = NULL, + iamc_databasename = NULL, + add_historical = TRUE, + deploy_online = FALSE, + figure_format = "png", + write_plotdata_csv = FALSE, + launch = TRUE, + ... +) +} +\arguments{ +\item{results_dir}{Path(s) to director(ies) containing IAMC format files. Can be a vector for multiple directories (default: "./")} + +\item{reg_id}{Regional aggregation(s) to display, e.g., c("witch20", "global") (default: c("witch20", "global"))} + +\item{iamc_filename}{Specific IAMC file to load (CSV, XLSX, or CSV.ZIP). If NULL, loads all CSV/XLSX files in results_dir (default: NULL)} + +\item{iamc_databasename}{Name of IIASA database to connect to (e.g., "ENGAGE"). Alternative to iamc_filename (default: NULL)} + +\item{add_historical}{Logical, add historical data where available (default: TRUE)} + +\item{deploy_online}{Logical, whether to deploy online (default: FALSE)} + +\item{figure_format}{Output format for figures (default: "png")} + +\item{write_plotdata_csv}{Logical, save plot data as CSV (default: FALSE)} + +\item{launch}{Logical, launch Shiny app immediately (default: TRUE)} + +\item{...}{Additional options passed to session configuration} + +\item{year0}{Base year for the model (default: 2005)} + +\item{tstep}{Time step in years (default: 5)} + +\item{map_var_hist}{Data frame mapping IAMC variables to historical data sources. If NULL, uses default mapping.} +} +\value{ +Invisibly returns NULL. Launches Shiny application if launch=TRUE. +} +\description{ +Loads IAM scenario data in IAMC format (CSV/XLSX files or IIASA database connection) +and launches an interactive Shiny application for comparing scenarios. +} +\details{ +By default (iamc_filename=NULL), automatically discovers and loads all CSV and XLSX files +in the results_dir. Files are combined into a single dataset for comparison. +Supports multiple directories - pass as a vector to load and compare across directories. +} +\examples{ +\dontrun{ + # Auto-load all CSV/XLSX files in current directory + run_iiasadb() + + # Load specific file + run_iiasadb(iamc_filename = "scenarios.csv") + + # Load from custom directory + run_iiasadb(results_dir = "EIEE-MIP") + + # Compare across multiple directories + run_iiasadb(results_dir = c("results_v1", "results_v2")) + + # Connect to IIASA database + run_iiasadb(iamc_databasename = "ENGAGE") +} + +} diff --git a/man/run_rice.Rd b/man/run_rice.Rd new file mode 100644 index 0000000..0ea78da --- /dev/null +++ b/man/run_rice.Rd @@ -0,0 +1,79 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/witchplot.R +\name{run_rice} +\alias{run_rice} +\title{Launch RICE50+ Model Interactive Visualization} +\usage{ +run_rice( + results_dir = "./", + reg_id = "ed58", + year0 = 2015, + tstep = 5, + restrict_files = "results_", + exclude_files = "", + removepattern = "results_", + add_historical = TRUE, + deploy_online = FALSE, + figure_format = "png", + write_plotdata_csv = FALSE, + launch = TRUE, + ... +) +} +\arguments{ +\item{results_dir}{Path(s) to results directory containing GDX files (default: "./")} + +\item{reg_id}{Regional aggregation ID, e.g., "ed58" for 58 regions (default: "ed58")} + +\item{year0}{Base year for the model (default: 2015)} + +\item{tstep}{Time step in years (default: 5)} + +\item{restrict_files}{Pattern to filter GDX files (default: "results_")} + +\item{exclude_files}{Pattern to exclude GDX files (default: "")} + +\item{removepattern}{Pattern to remove from scenario names (default: "")} + +\item{add_historical}{Logical, add historical data where available (default: TRUE)} + +\item{deploy_online}{Logical, whether to deploy online (default: FALSE)} + +\item{figure_format}{Output format for figures: "png", "pdf", "svg" (default: "png")} + +\item{write_plotdata_csv}{Logical, save plot data as CSV (default: FALSE)} + +\item{launch}{Logical, launch Shiny app immediately (default: TRUE)} + +\item{...}{Additional options passed to session configuration. Useful options include: +\itemize{ + \item \code{file_separate}: Vector to split scenario names into multiple columns. Format: c("type", "separator", "col1", "col2", ...). + Type can be "separate" (split all), "first" (first element), or "last" (last element). + Example: \code{file_separate = c("separate", "_", "model", "scenario", "carbon_price")} splits "SSP2_1p5C_high" into three columns. + \item \code{nice_region_names}: Named vector to rename regions for display. Example: \code{c("usa_te" = "USA", "eur" = "Europe")} + \item \code{restrict_regions}: Character vector of regions to display (filters out others) +}} +} +\value{ +Invisibly returns NULL. Launches Shiny application if launch=TRUE. +} +\description{ +Loads RICE50+ model GDX result files and launches an interactive Shiny application +for scenario comparison and visualization with regional disaggregation. +} +\examples{ +\dontrun{ + # Basic usage + run_rice() + + # Disable historical data + run_rice(add_historical = FALSE) + + # Custom regional aggregation + run_rice(reg_id = "ed57", year0 = 2020, tstep = 10) + + # Specify custom paths + run_rice(results_dir = "results") +} + +} diff --git a/man/run_witch.Rd b/man/run_witch.Rd new file mode 100644 index 0000000..1280873 --- /dev/null +++ b/man/run_witch.Rd @@ -0,0 +1,33 @@ +\name{run_witch} +\alias{run_witch} +\title{Run WITCH Model Visualization App} +\description{ +Launch interactive Shiny application for WITCH model results visualization and comparison. +} +\usage{ +run_witch(folder = NULL, main_folder = NULL, subdir = "", + restrict_files = "results_", exclude_files = "", + removepattern = "", yearmin = 1980, yearmax = 2100, ...) +} +\arguments{ +\item{folder}{Path to WITCH model folder (required)} +\item{main_folder}{Main results directory (defaults to folder)} +\item{subdir}{Subdirectories to scan (default: "")} +\item{restrict_files}{File pattern filter (default: "results_")} +\item{exclude_files}{Files to exclude (default: "")} +\item{removepattern}{Pattern to remove from names (default: "")} +\item{yearmin}{Minimum year for visualization (default: 1980)} +\item{yearmax}{Maximum year for visualization (default: 2100)} +\item{...}{Additional options passed to options()} +} +\value{ +Launches Shiny application (no return value) +} +\examples{ +\dontrun{ +library(witchplot) +run_witch(folder="../witch", yearmin=2000, yearmax=2100) +} +} +\author{Johannes Emmerling and WITCH Team} +\seealso{\code{\link{run_rice}}, \code{\link{run_fidelio}}, \code{\link{run_iiasadb}}} diff --git a/plotgdx_fidelio.R b/plotgdx_fidelio.R deleted file mode 100644 index 981f9ae..0000000 --- a/plotgdx_fidelio.R +++ /dev/null @@ -1,40 +0,0 @@ -rm(list = ls()) -witch_folder = "../fidelio" #Where you're RICE/DICE/RICE50x code is located (FIDELIO is in EUR of 2010) (EUR2010 to USD2005 = 1.33/1.10774) -#main directory of your results files -main_folder <- witch_folder # by default, the witch source folder -subdir = c("") #can be multiple directories - -reg_id = "fidelio46" #for historical data folder -year0 = 2015 -tstep = 1 - -restrict_files = c("results") #to all scenarios matching partly one of its arguments -exclude_files = c("") -removepattern = c("") - -yearmin = 1980 -yearmax = 2050 - -#Initialize default options, load all witch and other functionsget -source('R/witch_functions.R') - -#mapping of variables to historical and validation statistics and unit conversion from WITCH historical to model -map_var_hist <- fread("varname_model, set_model, element_model, var_witch, set_witch, element_witch, conv -Y, , , SOCECON, *, gdp-ppp, 1 -E, , , Q_EMI, e, co2, 1/(44/12) -EIND, , , Q_EMI, e, co2ffi, 1/(44/12) -ELAND, , , Q_EMI, e, co2lu, 1/(44/12) -pop, , , l, , , 1e3 -K, , , K, g, fg, 1 -I, , , I, g, fg, 1 -GDP_VA_VAL_t, , , ykali, , , 1e6/1.09 -") -#compute numerical conversion factor -map_var_hist <- map_var_hist %>% rowwise() %>% mutate(conv=eval(parse(text = conv))) %>% as.data.table() - - -#gdxcompaR(Standard gdxcompaR based on typical variables, otherwise edit in gdxcompaR/server.R) -runApp(appDir = "gdxcompaR/fidelio") - -get_plot_witch("GDP_VA_VAL_t") -get_witch("GDP_VA_VAL_t") diff --git a/plotgdx_iiasadb.R b/plotgdx_iiasadb.R deleted file mode 100644 index 346e9ec..0000000 --- a/plotgdx_iiasadb.R +++ /dev/null @@ -1,75 +0,0 @@ -rm(list = ls()) -witch_folder = "../witch" #Where you're WITCH code is located -main_folder <- witch_folder # by default, the witch source folder -subdir = c("EIEE-MIP") #can be multiple directories - -reg_id <- c("witch20", "global") #choose the aggregations that has the historical data at the aggregation closes to the iamc data - -#set or an iamc_filename OR iamc_databasename -iamc_filename <- "EIEE_MIP_2023_11_23.csv" #IIASADB snapshot file to read in main_folder/subdir/ -#iamc_databasename <- "navigate_internal" #IIASADB database name to read - - -source('R/witch_functions.R') - - -#mapping of variables to historical and validation statistics and unit conversion from WITCH historical to model -map_var_hist <- fread("varname_model, set_model, element_model, var_witch, set_witch, element_witch, conv -Primary Energy, , , TPES, , , 0.0036 -Emissions|CO2, , , Q_EMI, e, co2, 1e3*(44/12) -Emissions|CO2|Energy, , , Q_EMI, e, co2ffi, 1e3*(44/12) -Emissions|CH4, , , Q_EMI, e, ch4, 1e3*(44/12)/28 -Population, , , l, , , 1 -GDP|MER, , , Q, iq, y, 1e3 -E, ghg, co2, Q_EMI, e, co2, 44/12 -") -#compute numerical conversion factor -map_var_hist <- map_var_hist %>% rowwise() %>% mutate(conv=eval(parse(text = conv))) %>% as.data.table() - - - -if(exists("iamc_databasename")){ - if(file.exists("gdxcompaR/iiasadb/iiasadb_snapshot.Rdata")){ - input <- menu(c("Yes", "No"), title="There is a snapshot saved. Do you want to load it locally?") - if(input==1) load("gdxcompaR/iiasadb/iiasadb_snapshot.Rdata") - }else{ - iiasadb_snapshot <- get_iiasadb(database = iamc_databasename, varlist = "*", region="World", modlist = "*", scenlist = "*", add_metadata = F) - #convert to IAMC standard format - names(iiasadb_snapshot) <- toupper(names(iiasadb_snapshot)) - iiasadb_snapshot <- iiasadb_snapshot %>% select(MODEL, SCENARIO, REGION, VARIABLE, UNIT, YEAR, VALUE) %>% dplyr::rename(value=VALUE) %>% filter(!is.na(value)) - } -}else{ -#IIASADB from a xlsx/csv/zipped csv file in the subfolder specified above -# IIASADB snapshot file to read -if(str_detect(iamc_filename, ".xlsx$")){iiasadb_snapshot <- read.xlsx(file.path(main_folder, subdir, iamc_filename), sheet = 1);names(iiasadb_snapshot) <- toupper(names(iiasadb_snapshot))} -#from zipped CSV files (old iiasadb snapshots) -if(str_detect(iamc_filename, ".csv.zip$")){iiasadb_snapshot <- fread(cmd=paste0('unzip -cq "', file.path(file.path(main_folder, subdir, iamc_filename)),'" ', gsub(".zip","",basename(file.path(main_folder, subdir, iamc_filename)))), header=T, quote="\"", sep=",", check.names = FALSE);names(iiasadb_snapshot) <- toupper(names(iiasadb_snapshot))} -#from zipped CSV files (old iiasadb snapshots) -if(str_detect(iamc_filename, ".csv$")){iiasadb_snapshot <- fread(file.path(main_folder, subdir, iamc_filename), header=T, quote="\"", sep=",", check.names = FALSE);names(iiasadb_snapshot) <- toupper(names(iiasadb_snapshot))} -#convert to iiasadb long format -iiasadb_snapshot <- iiasadb_snapshot %>% mutate(across(matches("^\\d{4}$"), ~suppressWarnings(as.numeric(.x)))) -iiasadb_snapshot <- iiasadb_snapshot %>% pivot_longer(cols = -c(MODEL, SCENARIO, REGION, VARIABLE, UNIT), names_to = "YEAR") %>% mutate(YEAR=as.integer(YEAR)) %>% as.data.frame() -} -#to avoid casing issues, for now always use upper case for regions -iiasadb_snapshot <- iiasadb_snapshot %>% mutate(REGION=toupper(REGION)) -if(!exists("iiasadb_snapshot")) stop("Please check you specified a correct iiasadb file or connection.") - -#use only a subset of the data -#iiasadb_snapshot <- iiasadb_snapshot %>% filter(REGION %in% c("WORLD", "EU27", "EUROPE", "ITALY", "JAPAN")) - -#function to get historical values for all data where map_var_hist is defined -iiasadb_with_historical = list() -for(varname in map_var_hist$varname_model){ - if(nrow(iiasadb_snapshot %>% filter(VARIABLE==varname))>0) iiasadb_with_historical[[varname]] <- add_historical_values(iiasadb_snapshot %>% filter(VARIABLE==varname), varname = varname, check_calibration = T, iiasadb = T, verbose = F) -} -iiasadb_historical <- rbindlist(iiasadb_with_historical) %>% filter(str_detect(SCENARIO, "historical")) %>% as.data.frame() - -#store also in the shiny folder for online deployment -save(iiasadb_snapshot, iiasadb_historical, file = "gdxcompaR/iiasadb/iiasadb_snapshot.Rdata") - -#launch gdxcompaR -runApp(appDir = "gdxcompaR/iiasadb") - - -stop("Just run the App") - diff --git a/plotgdx_rice.R b/plotgdx_rice.R deleted file mode 100644 index 144cd2d..0000000 --- a/plotgdx_rice.R +++ /dev/null @@ -1,41 +0,0 @@ -rm(list = ls()) -witch_folder = "../RICE50x" #Where you're RICE/DICE/RICE50x code is located -#main directory of your results files -main_folder <- witch_folder # by default, the witch source folder -subdir = c("") #can be multiple directories - -reg_id = "ed58" #for historical data folder -year0 = 2015 -tstep = 5 - -restrict_files = c("results_") #to all scenarios matching partly at least one of its arguments -exclude_files = c("") -removepattern = c("") - -yearmin = 1980 -yearmax = 2300 - -#Initialize default options, load all witch and other functionsget -source('R/witch_functions.R') - -#mapping of variables to historical and validation statistics and unit conversion from WITCH to MODEL units -map_var_hist <- fread("varname_model, set_model, element_model, var_witch, set_witch, element_witch, conv -Y, , , SOCECON, *, gdp-ppp, 1 -EIND, ghg, co2, Q_EMI, e, co2_ffi, 44/12 -ELAND, , , Q_EMI, e, co2lu, 44/12 -E, ghg, ch4, Q_EMI, e, ch4, 1e3*44/12/25 -E, ghg, n2o, Q_EMI, e, n2o, 1e3*44/12/298 -E, ghg, co2, Q_EMI, e, co2, 44/12 -pop, , , l, , , 1 -K, , , K, g, fg, 1 -I, , , I, g, fg, 1 -") -#compute numerical conversion factor -map_var_hist <- map_var_hist %>% rowwise() %>% mutate(conv=eval(parse(text = conv))) %>% as.data.table() - - -#gdxcompaR(Standard gdxcompaR based on typical variables, otherwise edit in gdxcompaR/server.R) -runApp(appDir = "gdxcompaR/rice") - -get_plot_witch("E") -Y <- get_witch("Y") diff --git a/plotgdx_witch.R b/plotgdx_witch.R deleted file mode 100644 index e145b17..0000000 --- a/plotgdx_witch.R +++ /dev/null @@ -1,27 +0,0 @@ -rm(list = ls()) -witch_folder = "../witch" #Where you're WITCH code is located -#main directory of your results files -main_folder <- witch_folder # by default, the witch source folder -#main_folder <- "C:/Users/Emmerling/Documents/Dropbox (CMCC)/EIEE/WITCH_CODING/WITCH_RUNS_2018/submission_cdlinks/2019_04_15" -subdir = c("") #can be multiple directories - - -restrict_files = c("results_") #to all scenarios matching partly one of its arguments -exclude_files = c("") -removepattern = c("") - -yearmin = 1980 -yearmax = 2100 - -#If you want to have significant separations or parts of file names, specify file_separate <- c(type="first|last|separate", sep="_", names="c("file_new")) -#file_separate <- c("last", "_", c("specification")) -#Name scenarios (also subsets to the ones given (otherwise it takes gdx filename) as a mapping -#scenlist <- c("results_ssp2_asia_curpol"="Current policies") - -#c(lsf.str()) #show all available functions - -#Initialize default options, load all witch and other functionsget -source('R/witch_functions.R') - -#gdxcompaR (Standard gdxcompaR based on typical variables, otherwise edit in gdxcompaR/server.R) -runApp(appDir = "gdxcompaR/witch") diff --git a/tests/testthat.R b/tests/testthat.R new file mode 100644 index 0000000..9728f91 --- /dev/null +++ b/tests/testthat.R @@ -0,0 +1,3 @@ +library(testthat) +library(witchplot) +test_check("witchplot") diff --git a/witch-plot.Rproj b/witchplot.Rproj similarity index 59% rename from witch-plot.Rproj rename to witchplot.Rproj index 8e3c2eb..eaa6b81 100644 --- a/witch-plot.Rproj +++ b/witchplot.Rproj @@ -11,3 +11,8 @@ Encoding: UTF-8 RnwWeave: Sweave LaTeX: pdfLaTeX + +BuildType: Package +PackageUseDevtools: Yes +PackageInstallArgs: --no-multiarch --with-keep.source +PackageRoxygenize: rd,collate,namespace