Skip to content

Commit

Permalink
Re-do lagged targets
Browse files Browse the repository at this point in the history
  • Loading branch information
n8layman committed Dec 16, 2024
1 parent 482e754 commit 01b1e2b
Show file tree
Hide file tree
Showing 13 changed files with 5,142 additions and 4,430 deletions.
Binary file modified .env
Binary file not shown.
2 changes: 1 addition & 1 deletion R/calculate_weather_anomalies.R
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ calculate_weather_anomalies <- function(nasa_weather_transformed,

# Open dataset to transformed data
weather_transformed_dataset <- arrow::open_dataset(nasa_weather_transformed) |>
filter(date == model_dates_selected) |> collect()
filter(date == lubridate::as_date(model_dates_selected)) |> collect()

# Open dataset to historical weather data
historical_means <- arrow::open_dataset(weather_historical_means) |> filter(doy == lubridate::yday(model_dates_selected)) |> collect()
Expand Down
17 changes: 13 additions & 4 deletions R/calculate_weather_historical_means.R
Original file line number Diff line number Diff line change
Expand Up @@ -39,12 +39,21 @@ calculate_weather_historical_means <- function(nasa_weather_transformed,
weather_historical_means <- map_vec(1:366, .progress = TRUE, function(i) {
filename <- file.path(weather_historical_means_directory,
glue::glue("weather_historical_mean_doy_{i}.parquet"))
nasa_weather_data |>

mean_vals <- nasa_weather_data |>
filter(doy == i) |>
group_by(x, y, doy) |>
summarize(across(matches("temperature|precipitation|humidity"), ~mean(.x, na.rm = T)),
.groups = "drop")

sd_vals <- nasa_weather_data |>
filter(doy == i) |>
group_by(x, y, doy) |>
summarize(across(matches("temperature|precipitation|humidity"), ~mean(.x)),
across(matches("temperature|precipitation|humidity"), ~sd(.x), .names = "{.col}_sd")) |>
arrow::write_parquet(filename)
summarize(across(matches("temperature|precipitation|humidity"), ~sd(.x, na.rm = T),
.names = "{.col}_sd"),
.groups = "drop")

mean_vals |> left_join(sd_vals) |> arrow::write_parquet(filename, compression = "gzip", compression_level = 5)

filename
})
Expand Down
3 changes: 2 additions & 1 deletion R/get_remote_rasters.R
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,8 @@ get_remote_rasters <- function(urls,
}

# Extract the raster file to a temporary directory
system2("unrar", c("e", "-o+", rar_file, here::here(output_dir), ">/dev/null"))
# Capture persistent File CRC error. If it's truly corrupted it won't open in the next step
try(archive::archive_extract(rar_file, output_dir), silent = T)

# Load the raster data
unpacked_raster <- terra::rast(raster_file)
Expand Down
13 changes: 7 additions & 6 deletions R/preprocess_soil.R
Original file line number Diff line number Diff line change
Expand Up @@ -98,12 +98,13 @@ preprocess_soil <- function(soil_directory_dataset,
# Figure out where key is for the units are in HWSD2
# NCL: This is confusing but keeping to match previous work
soil_preprocessed <- soil_preprocessed |> mutate(soil_texture = if_else(soil_texture == 5, 1, # clay (heavy) + clay loam
if_else(soil_texture == 7, 2, # silty clay + silty loam aka
if_else(soil_texture == 8, 3, # clay + sandy clay
if_else(soil_texture == 9, 4, # silty clay loam
if_else(soil_texture == 10, 5, # clay loam + sandy clay loam BUT SEE RULE 1!!!
if_else(soil_texture == 11, 6, # silt sandy + loam
if_else(soil_texture == 12, 7, 0))))))) |>
if_else(soil_texture == 7, 2, # silt loam + silty clay
if_else(soil_texture == 8, 3, # sandy clay + clay
if_else(soil_texture == 9, 4, # loam + silty clay loam
if_else(soil_texture == 10, 5, # sandy clay loam SEE RULE 1!!!
if_else(soil_texture == 11, 6, # sandy loam + silt
if_else(soil_texture == 12, 7, # loamy sand + silt loam
0))))))) |>
as.factor()) # loamy sand + silt loam


Expand Down
179 changes: 133 additions & 46 deletions README.Rmd

Large diffs are not rendered by default.

816 changes: 0 additions & 816 deletions README.html

This file was deleted.

Loading

0 comments on commit 01b1e2b

Please sign in to comment.