mirror of
https://git.gfz-potsdam.de/naaice/poet.git
synced 2025-12-15 12:28:22 +01:00
add ai adaptations
This commit is contained in:
parent
83b1389b93
commit
3b3ea6e595
0
.gitmodules
vendored
0
.gitmodules
vendored
@ -33,6 +33,11 @@ set(TUG_ENABLE_TESTING OFF CACHE BOOL "" FORCE)
|
||||
add_subdirectory(ext/tug EXCLUDE_FROM_ALL)
|
||||
add_subdirectory(ext/iphreeqc EXCLUDE_FROM_ALL)
|
||||
|
||||
# AI/NAA specific includes TODO: add option flags
|
||||
add_subdirectory(ext/ai-surrogate EXCLUDE_FROM_ALL)
|
||||
|
||||
|
||||
|
||||
option(POET_ENABLE_TESTING "Build test suite for POET" OFF)
|
||||
|
||||
if (POET_ENABLE_TESTING)
|
||||
|
||||
@ -63,6 +63,11 @@ set_valid_predictions <- function(temp_field, prediction, validity) {
|
||||
return(temp_field)
|
||||
}
|
||||
|
||||
get_invalid_values <- function(df, validity) {
|
||||
return(df[validity == 0, ])
|
||||
}
|
||||
|
||||
|
||||
training_step <- function(model, predictor, target, validity) {
|
||||
msgm("Training:")
|
||||
|
||||
|
||||
1
ai-surrogate-poet
Submodule
1
ai-surrogate-poet
Submodule
@ -0,0 +1 @@
|
||||
Subproject commit 1a2dfc6a48fb82b86d142c297db539315d135797
|
||||
@ -1,18 +1,5 @@
|
||||
## Time-stamp: "Last modified 2024-05-30 13:27:06 delucia"
|
||||
|
||||
## load a pretrained model from tensorflow file
|
||||
## Use the global variable "ai_surrogate_base_path" when using file paths
|
||||
## relative to the input script
|
||||
initiate_model <- function() {
|
||||
require(keras3)
|
||||
require(tensorflow)
|
||||
init_model <- normalizePath(paste0(ai_surrogate_base_path,
|
||||
"barite_50ai_all.keras"))
|
||||
Model <- keras3::load_model(init_model)
|
||||
msgm("Loaded model:")
|
||||
print(str(Model))
|
||||
return(Model)
|
||||
}
|
||||
|
||||
|
||||
scale_min_max <- function(x, min, max, backtransform) {
|
||||
if (backtransform) {
|
||||
@ -22,6 +9,22 @@ scale_min_max <- function(x, min, max, backtransform) {
|
||||
}
|
||||
}
|
||||
|
||||
scale_standardizer <- function(x, mean, scale, backtransform) {
|
||||
if(backtransform){
|
||||
return(x * scale + mean)
|
||||
}
|
||||
else{
|
||||
return((x-mean) / scale)
|
||||
}
|
||||
}
|
||||
|
||||
standard <- list(mean = c(H = 111.01243361730982, O= 55.50673140754027, Ba= 0.0016161137065825058,
|
||||
Cl= 0.0534503766678322, S=0.00012864849674669584, Sr=0.0252377348949622,
|
||||
Barite_kin=0.05292312117000998, Celestite_kin=0.9475491659328229),
|
||||
scale = c(H=1.0, O=0.00048139729680698453, Ba=0.008945717576237102, Cl=0.03587363709464328,
|
||||
S=0.00012035100591827131, Sr=0.01523052668095922, Barite_kin=0.21668648247230615,
|
||||
Celestite_kin=0.21639449682671968))
|
||||
|
||||
minmax <- list(min = c(H = 111.012433592824, O = 55.5062185549492, Charge = -3.1028354471876e-08,
|
||||
Ba = 1.87312878574393e-141, Cl = 0, `S(6)` = 4.24227510643685e-07,
|
||||
Sr = 0.00049382996130541, Barite = 0.000999542409828586, Celestite = 0.244801877115968),
|
||||
@ -30,14 +33,19 @@ minmax <- list(min = c(H = 111.012433592824, O = 55.5062185549492, Charge = -3.1
|
||||
Sr = 0.0558680070692722, Barite = 0.756779139057097, Celestite = 1.00075422160624
|
||||
))
|
||||
|
||||
ai_surrogate_species_input = c("H", "O", "Ba", "Cl", "S", "Sr", "Barite_kin", "Celestite_kin")
|
||||
ai_surrogate_species_output = c("O", "Ba", "S", "Sr", "Barite_kin", "Celestite_kin")
|
||||
|
||||
|
||||
|
||||
preprocess <- function(df) {
|
||||
if (!is.data.frame(df))
|
||||
df <- as.data.frame(df, check.names = FALSE)
|
||||
|
||||
as.data.frame(lapply(colnames(df),
|
||||
function(x) scale_min_max(x=df[x],
|
||||
min=minmax$min[x],
|
||||
max=minmax$max[x],
|
||||
function(x) scale_standardizer(x=df[x],
|
||||
mean=standard$mean[x],
|
||||
scale=standard$scale[x],
|
||||
backtransform=FALSE)),
|
||||
check.names = FALSE)
|
||||
}
|
||||
@ -47,23 +55,25 @@ postprocess <- function(df) {
|
||||
df <- as.data.frame(df, check.names = FALSE)
|
||||
|
||||
as.data.frame(lapply(colnames(df),
|
||||
function(x) scale_min_max(x=df[x],
|
||||
min=minmax$min[x],
|
||||
max=minmax$max[x],
|
||||
function(x) scale_standardizer(x=df[x],
|
||||
mean=standard$mean[x],
|
||||
scale=standard$scale[x],
|
||||
backtransform=TRUE)),
|
||||
check.names = FALSE)
|
||||
}
|
||||
|
||||
mass_balance <- function(predictors, prediction) {
|
||||
dBa <- abs(prediction$Ba + prediction$Barite -
|
||||
predictors$Ba - predictors$Barite)
|
||||
dSr <- abs(prediction$Sr + prediction$Celestite -
|
||||
predictors$Sr - predictors$Celestite)
|
||||
return(dBa + dSr)
|
||||
dBa <- abs(prediction$Ba + prediction$Barite_kin -
|
||||
predictors$Ba - predictors$Barite_kin)
|
||||
dSr <- abs(prediction$Sr + prediction$Celestite_kin -
|
||||
predictors$Sr - predictors$Celestite_kin)
|
||||
dS <- abs(prediction$S + prediction$Celestite_kin + prediction$Barite_kin -
|
||||
predictors$S - predictors$Celestite_kin - predictors$Barite_kin)
|
||||
return(dBa + dSr + dS)
|
||||
}
|
||||
|
||||
validate_predictions <- function(predictors, prediction) {
|
||||
epsilon <- 1E-7
|
||||
epsilon <- 1E-5
|
||||
mb <- mass_balance(predictors, prediction)
|
||||
msgm("Mass balance mean:", mean(mb))
|
||||
msgm("Mass balance variance:", var(mb))
|
||||
@ -72,19 +82,3 @@ validate_predictions <- function(predictors, prediction) {
|
||||
sum(ret))
|
||||
return(ret)
|
||||
}
|
||||
|
||||
training_step <- function(model, predictor, target, validity) {
|
||||
msgm("Starting incremental training:")
|
||||
|
||||
## x <- as.matrix(predictor)
|
||||
## y <- as.matrix(target[colnames(x)])
|
||||
|
||||
history <- model %>% keras3::fit(x = data.matrix(predictor),
|
||||
y = data.matrix(target),
|
||||
epochs = 10, verbose=1)
|
||||
|
||||
keras3::save_model(model,
|
||||
filepath = paste0(out_dir, "/current_model.keras"),
|
||||
overwrite=TRUE)
|
||||
return(model)
|
||||
}
|
||||
|
||||
190
src/poet.cpp
190
src/poet.cpp
@ -43,6 +43,12 @@
|
||||
#include <set>
|
||||
#include <string>
|
||||
|
||||
#include <Model.hpp>
|
||||
#include <NAABackend.hpp>
|
||||
#include <PythonBackend.hpp>
|
||||
#include <TrainingBackend.hpp>
|
||||
#include <TrainingData.hpp>
|
||||
|
||||
#include <CLI/CLI.hpp>
|
||||
|
||||
#include <poet.hpp>
|
||||
@ -145,8 +151,16 @@ int parseInitValues(int argc, char **argv, RuntimeParameters ¶ms) {
|
||||
->check(CLI::PositiveNumber)
|
||||
->default_val(RuntimeParameters::INTERP_BUCKET_ENTRIES_DEFAULT);
|
||||
|
||||
app.add_flag("--ai-surrogate", params.use_ai_surrogate,
|
||||
"Enable AI surrogate for chemistry module");
|
||||
auto *ai_option_group =
|
||||
app.add_option_group("ai_surrogate", "AI Surrogate related options");
|
||||
|
||||
ai_option_group->add_flag("--ai", params.ai,
|
||||
"Enable AI surrogate for chemistry module");
|
||||
ai_option_group
|
||||
->add_option("--ai-backend", params.ai_backend,
|
||||
"Desired ai backend (0: python (keras), 1: naa, 2: cuda)")
|
||||
->check(CLI::PositiveNumber)
|
||||
->default_val(RuntimeParameters::AI_BACKEND_DEFAULT);
|
||||
|
||||
app.add_flag("--rds", params.as_rds,
|
||||
"Save output as .rds file instead of default .qs2");
|
||||
@ -191,7 +205,7 @@ int parseInitValues(int argc, char **argv, RuntimeParameters ¶ms) {
|
||||
MSG("Output format/extension is " + params.out_ext);
|
||||
MSG("Work Package Size: " + std::to_string(params.work_package_size));
|
||||
MSG("DHT is " + BOOL_PRINT(params.use_dht));
|
||||
MSG("AI Surrogate is " + BOOL_PRINT(params.use_ai_surrogate));
|
||||
MSG("AI Surrogate is " + BOOL_PRINT(params.ai));
|
||||
|
||||
if (params.use_dht) {
|
||||
// MSG("DHT strategy is " + std::to_string(simparams.dht_strategy));
|
||||
@ -236,7 +250,6 @@ int parseInitValues(int argc, char **argv, RuntimeParameters ¶ms) {
|
||||
// R["dht_log"] = simparams.dht_log;
|
||||
|
||||
try {
|
||||
|
||||
Rcpp::List init_params_(ReadRObj_R(init_file));
|
||||
params.init_params = init_params_;
|
||||
|
||||
@ -290,6 +303,41 @@ static Rcpp::List RunMasterLoop(RInsidePOET &R, const RuntimeParameters ¶ms,
|
||||
}
|
||||
R["TMP_PROPS"] = Rcpp::wrap(chem.getField().GetProps());
|
||||
|
||||
std::unique_ptr<AIContext> ai_ctx = nullptr;
|
||||
|
||||
if (params.ai) {
|
||||
|
||||
ai_ctx = std::make_unique<AIContext>(
|
||||
"/mnt/scratch/signer/poet/bench/barite/barite_trained.weights.h5");
|
||||
R.parseEval(
|
||||
"mean <- as.numeric(standard$mean[ai_surrogate_species_output])");
|
||||
R.parseEval(
|
||||
"scale <- as.numeric(standard$scale[ai_surrogate_species_output])");
|
||||
|
||||
std::vector<float> mean = R["mean"];
|
||||
std::vector<float> scale = R["scale"];
|
||||
|
||||
ai_ctx->scaler.set_scaler(mean, scale);
|
||||
|
||||
// initialzie training backens only if retraining is desired
|
||||
if (params.ai_backend == PYTHON_BACKEND) {
|
||||
MSG("AI Surrogate with Python/keras backend enabled.")
|
||||
// auto model = Python<ai_type_t>();
|
||||
} else if (params.ai_backend == NAA_BACKEND) {
|
||||
MSG("AI Surrogate with NAA backend enabled.")
|
||||
ai_ctx->training_backend =
|
||||
std::make_unique<NAABackend<ai_type_t>>(20 * params.batch_size);
|
||||
}
|
||||
|
||||
if (!params.disable_retraining) {
|
||||
ai_ctx->training_backend->training_thread(
|
||||
ai_ctx->design_buffer, ai_ctx->results_buffer, ai_ctx->model,
|
||||
ai_ctx->meta_params, ai_ctx->scaler, ai_ctx->data_semaphore_write,
|
||||
ai_ctx->data_semaphore_read, ai_ctx->model_semaphore,
|
||||
ai_ctx->training_is_running, 1);
|
||||
}
|
||||
}
|
||||
|
||||
/* SIMULATION LOOP */
|
||||
double dSimTime{0};
|
||||
for (uint32_t iter = 1; iter < maxiter + 1; iter++) {
|
||||
@ -311,7 +359,7 @@ static Rcpp::List RunMasterLoop(RInsidePOET &R, const RuntimeParameters ¶ms,
|
||||
chem.getField().update(diffusion.getField());
|
||||
|
||||
// MSG("Chemistry start");
|
||||
if (params.use_ai_surrogate) {
|
||||
if (params.ai) {
|
||||
double ai_start_t = MPI_Wtime();
|
||||
// Save current values from the tug field as predictor for the ai step
|
||||
R["TMP"] = Rcpp::wrap(chem.getField().AsVector());
|
||||
@ -319,42 +367,63 @@ static Rcpp::List RunMasterLoop(RInsidePOET &R, const RuntimeParameters ¶ms,
|
||||
std::string("predictors <- setNames(data.frame(matrix(TMP, nrow=" +
|
||||
std::to_string(chem.getField().GetRequestedVecSize()) +
|
||||
")), TMP_PROPS)"));
|
||||
R.parseEval("predictors <- predictors[ai_surrogate_species]");
|
||||
|
||||
// Apply preprocessing
|
||||
MSG("AI Preprocessing");
|
||||
R.parseEval("predictors <- predictors[ai_surrogate_species_input]");
|
||||
|
||||
R.parseEval("predictors_scaled <- preprocess(predictors)");
|
||||
std::vector<std::vector<float>> predictors_scaled =
|
||||
R["predictors_scaled"];
|
||||
|
||||
// Predict
|
||||
MSG("AI Prediction");
|
||||
R.parseEval(
|
||||
"aipreds_scaled <- prediction_step(model, predictors_scaled)");
|
||||
// FIXME: double/float conversion
|
||||
std::vector<float> predictions_scaled = ai_ctx->model.predict(
|
||||
predictors_scaled, params.batch_size, ai_ctx->model_semaphore);
|
||||
|
||||
// Apply postprocessing
|
||||
MSG("AI Postprocessing");
|
||||
R.parseEval("aipreds <- postprocess(aipreds_scaled)");
|
||||
int n_samples = R.parseEval("nrow(predictors)");
|
||||
int n_output_features = ai_ctx->model.weight_matrices.back().cols();
|
||||
std::cout << "n_output_features: " << n_output_features << std::endl;
|
||||
std::vector<double> predictions_scaled_double(predictions_scaled.begin(),
|
||||
predictions_scaled.end());
|
||||
R["TMP"] = predictions_scaled_double;
|
||||
R["n_samples"] = n_samples;
|
||||
R["n_output"] = n_output_features;
|
||||
|
||||
R.parseEval("predictions_scaled <- setNames(data.frame(matrix(TMP, "
|
||||
"nrow=n_samples, ncol=n_output, byrow=TRUE)), "
|
||||
"ai_surrogate_species_output)");
|
||||
// R.parseEval("print(head(predictions_scaled))");
|
||||
R.parseEval("predictions <- postprocess(predictions_scaled)");
|
||||
// R.parseEval("print(head(predictions))");
|
||||
|
||||
// Validate prediction and write valid predictions to chem field
|
||||
MSG("AI Validation");
|
||||
R.parseEval(
|
||||
"validity_vector <- validate_predictions(predictors, aipreds)");
|
||||
|
||||
// FIXME: (mass balance plausible?)
|
||||
R.parseEval("validity_vector <- validate_predictions(predictors, "
|
||||
"predictions) ");
|
||||
|
||||
R.parseEval("print(head(validity_vector))");
|
||||
|
||||
MSG("AI Marking accepted");
|
||||
chem.set_ai_surrogate_validity_vector(R.parseEval("validity_vector"));
|
||||
|
||||
MSG("AI TempField");
|
||||
std::vector<std::vector<double>> RTempField =
|
||||
R.parseEval("set_valid_predictions(predictors,\
|
||||
aipreds,\
|
||||
R.parseEval("print(ai_surrogate_species_output)");
|
||||
// R.parseEval("print(head(predictors))");
|
||||
std::vector<std::vector<double>> RTempField = R.parseEval(
|
||||
"set_valid_predictions(predictors[ai_surrogate_species_output],\
|
||||
predictions,\
|
||||
validity_vector)");
|
||||
|
||||
MSG("AI Set Field");
|
||||
Field predictions_field =
|
||||
Field(R.parseEval("nrow(predictors)"), RTempField,
|
||||
R.parseEval("colnames(predictors)"));
|
||||
Field predictions_field = Field(
|
||||
R.parseEval("nrow(predictors)"), RTempField,
|
||||
R.parseEval(
|
||||
"colnames(predictors[ai_surrogate_species_output])")); // FIXME:
|
||||
// is this
|
||||
// correct?
|
||||
|
||||
MSG("AI Update");
|
||||
chem.getField().update(predictions_field);
|
||||
|
||||
double ai_end_t = MPI_Wtime();
|
||||
R["ai_prediction_time"] = ai_end_t - ai_start_t;
|
||||
}
|
||||
@ -362,7 +431,7 @@ static Rcpp::List RunMasterLoop(RInsidePOET &R, const RuntimeParameters ¶ms,
|
||||
chem.simulate(dt);
|
||||
|
||||
/* AI surrogate iterative training*/
|
||||
if (params.use_ai_surrogate) {
|
||||
if (params.ai == true && params.disable_retraining == false) {
|
||||
double ai_start_t = MPI_Wtime();
|
||||
|
||||
R["TMP"] = Rcpp::wrap(chem.getField().AsVector());
|
||||
@ -370,14 +439,55 @@ static Rcpp::List RunMasterLoop(RInsidePOET &R, const RuntimeParameters ¶ms,
|
||||
std::string("targets <- setNames(data.frame(matrix(TMP, nrow=" +
|
||||
std::to_string(chem.getField().GetRequestedVecSize()) +
|
||||
")), TMP_PROPS)"));
|
||||
R.parseEval("targets <- targets[ai_surrogate_species]");
|
||||
|
||||
// TODO: Check how to get the correct columns
|
||||
R.parseEval("target_scaled <- preprocess(targets)");
|
||||
R.parseEval("predictors_retraining <- "
|
||||
"get_invalid_values(predictors_scaled, validity_vector)");
|
||||
R.parseEval("targets_retraining <- "
|
||||
"get_invalid_values(targets[ai_surrogate_species_output], "
|
||||
"validity_vector)");
|
||||
R.parseEval("targets_retraining <- preprocess(targets_retraining)");
|
||||
|
||||
std::vector<std::vector<float>> predictors_retraining =
|
||||
R["predictors_retraining"];
|
||||
std::vector<std::vector<float>> targets_retraining =
|
||||
R["targets_retraining"];
|
||||
|
||||
MSG("AI: add invalid data to buffer");
|
||||
|
||||
ai_ctx->data_semaphore_write.acquire();
|
||||
|
||||
std::cout << "size of predictors " << predictors_retraining[0].size()
|
||||
<< std::endl;
|
||||
std::cout << "size of targets " << targets_retraining[0].size()
|
||||
<< std::endl;
|
||||
|
||||
ai_ctx->design_buffer.addData(predictors_retraining);
|
||||
ai_ctx->results_buffer.addData(targets_retraining);
|
||||
|
||||
size_t elements_design_buffer =
|
||||
ai_ctx->design_buffer.getSize() /
|
||||
(predictors_retraining.size() * sizeof(float));
|
||||
size_t elements_results_buffer =
|
||||
ai_ctx->results_buffer.getSize() /
|
||||
(targets_retraining.size() * sizeof(float));
|
||||
|
||||
std::cout << "design_buffer_size: " << elements_design_buffer
|
||||
<< std::endl;
|
||||
std::cout << "results_buffer_size: " << elements_results_buffer
|
||||
<< std::endl;
|
||||
|
||||
if (elements_design_buffer >= 20 * params.batch_size &&
|
||||
elements_results_buffer >= 20 * params.batch_size &&
|
||||
ai_ctx->training_is_running == false) {
|
||||
ai_ctx->data_semaphore_read.release();
|
||||
} else if (ai_ctx->training_is_running == true) {
|
||||
MSG("Training is currently running");
|
||||
ai_ctx->data_semaphore_write.release();
|
||||
} else {
|
||||
MSG("Not enough data for retraining");
|
||||
ai_ctx->data_semaphore_write.release();
|
||||
}
|
||||
|
||||
MSG("AI: incremental training");
|
||||
R.parseEval("model <- training_step(model, predictors_scaled, "
|
||||
"target_scaled, validity_vector)");
|
||||
double ai_end_t = MPI_Wtime();
|
||||
R["ai_training_time"] = ai_end_t - ai_start_t;
|
||||
}
|
||||
@ -402,6 +512,10 @@ static Rcpp::List RunMasterLoop(RInsidePOET &R, const RuntimeParameters ¶ms,
|
||||
|
||||
std::cout << std::endl;
|
||||
|
||||
if (!params.disable_retraining) {
|
||||
ai_ctx->training_backend->stop_training(ai_ctx->data_semaphore_read);
|
||||
}
|
||||
|
||||
Rcpp::List chem_profiling;
|
||||
chem_profiling["simtime"] = chem.GetChemistryTime();
|
||||
chem_profiling["loop"] = chem.GetMasterLoopTime();
|
||||
@ -593,7 +707,7 @@ int main(int argc, char *argv[]) {
|
||||
run_params.interp_bucket_entries,
|
||||
run_params.interp_size,
|
||||
run_params.interp_min_entries,
|
||||
run_params.use_ai_surrogate};
|
||||
run_params.ai};
|
||||
|
||||
chemistry.masterEnableSurrogates(surr_setup);
|
||||
|
||||
@ -613,13 +727,15 @@ int main(int argc, char *argv[]) {
|
||||
R["out_ext"] = run_params.out_ext;
|
||||
R["out_dir"] = run_params.out_dir;
|
||||
|
||||
if (run_params.use_ai_surrogate) {
|
||||
if (run_params.ai) {
|
||||
/* Incorporate ai surrogate from R */
|
||||
R.parseEvalQ(ai_surrogate_r_library);
|
||||
/* Use dht species for model input and output */
|
||||
R["ai_surrogate_species"] =
|
||||
init_list.getChemistryInit().dht_species.getNames();
|
||||
|
||||
const auto &names = init_list.getChemistryInit().dht_species.getNames();
|
||||
for (const auto &name : names) {
|
||||
std::cout << name << " ";
|
||||
}
|
||||
std::cout << "\n"; //
|
||||
const std::string ai_surrogate_input_script =
|
||||
init_list.getChemistryInit().ai_surrogate_input_script;
|
||||
|
||||
@ -627,8 +743,6 @@ int main(int argc, char *argv[]) {
|
||||
R.parseEvalQ(ai_surrogate_input_script);
|
||||
|
||||
MSG("AI: initialize AI model");
|
||||
R.parseEval("model <- initiate_model()");
|
||||
R.parseEval("gpu_info()");
|
||||
}
|
||||
|
||||
MSG("Init done on process with rank " + std::to_string(MY_RANK));
|
||||
|
||||
@ -22,12 +22,21 @@
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <atomic>
|
||||
#include <cstdint>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#include <MetaParameter.hpp>
|
||||
#include <Model.hpp>
|
||||
#include <Standardizer.hpp>
|
||||
#include <TrainingBackend.hpp>
|
||||
#include <TrainingData.hpp>
|
||||
|
||||
#include <Rcpp.h>
|
||||
|
||||
using ai_type_t = float;
|
||||
|
||||
static const char *poet_version = "@POET_VERSION@";
|
||||
|
||||
// using the Raw string literal to avoid escaping the quotes
|
||||
@ -38,6 +47,8 @@ static const inline std::string ai_surrogate_r_library =
|
||||
R"(@R_AI_SURROGATE_LIB@)";
|
||||
static const inline std::string r_runtime_parameters = "mysetup";
|
||||
|
||||
enum BACKEND_TYPE { PYTHON_BACKEND = 1, NAA_BACKEND, CUDA_BACKEND };
|
||||
|
||||
struct RuntimeParameters {
|
||||
std::string out_dir;
|
||||
std::vector<double> timesteps;
|
||||
@ -68,5 +79,27 @@ struct RuntimeParameters {
|
||||
static constexpr std::uint32_t INTERP_BUCKET_ENTRIES_DEFAULT = 20;
|
||||
std::uint32_t interp_bucket_entries = INTERP_BUCKET_ENTRIES_DEFAULT;
|
||||
|
||||
bool use_ai_surrogate = false;
|
||||
// configuration for ai surrogate approach
|
||||
bool ai = false;
|
||||
bool disable_retraining = false;
|
||||
static constexpr std::uint8_t AI_BACKEND_DEFAULT = 1;
|
||||
std::uint8_t ai_backend = 1; // 1 - python, 2 - naa, 3 - cuda
|
||||
bool train_only_invalid = true;
|
||||
int batch_size = 1000;
|
||||
};
|
||||
|
||||
struct AIContext {
|
||||
TrainingData<ai_type_t> design_buffer;
|
||||
TrainingData<ai_type_t> results_buffer;
|
||||
Model<ai_type_t> model;
|
||||
MetaParameter<ai_type_t> meta_params;
|
||||
Standardizer<ai_type_t> scaler;
|
||||
|
||||
std::binary_semaphore data_semaphore_write{1};
|
||||
std::binary_semaphore data_semaphore_read{0};
|
||||
std::binary_semaphore model_semaphore{1};
|
||||
std::atomic_bool training_is_running = false;
|
||||
std::unique_ptr<TrainingBackend<ai_type_t>> training_backend;
|
||||
|
||||
AIContext(const std::string &weights_path) : model(weights_path) {}
|
||||
};
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user