Skip to contents

Generic S3 Function for rpwf_augment() Object into the Database

Usage

rpwf_export_db(obj, db_con)

# S3 method for rpwf_workflow_set
rpwf_export_db(obj, db_con)

# S3 method for rpwf_data_set
rpwf_export_db(obj, db_con)

Arguments

obj

an augmented rpwf_workflow_set() or rpwf_data_set() object.

db_con

an rpwf_connect_db() object.

Value

number of rows exported.

Examples

# Create the database
board <- pins::board_temp()
tmp_dir <- tempdir()
db_con <- rpwf_connect_db(paste(tmp_dir, "db.SQLite", sep = "/"), board)

# Create a `workflow_set`
d <- mtcars
d$target <- stats::rbinom(nrow(d), 1, 0.5)
m1 <- parsnip::boost_tree() |>
  parsnip::set_engine("xgboost") |>
  parsnip::set_mode("classification") |>
  set_py_engine("xgboost", "XGBClassifier", "my_xgboost_model")
r1 <- d |>
  recipes::recipe(target ~ .)
wf <- rpwf_workflow_set(list(r1), list(m1), "neg_log_loss")

to_export <- wf |>
  rpwf_augment(db_con, dials::grid_latin_hypercube, size = 10)
#> No hyper param tuning specified
#> No pandas idx added. Use update_roles() with 'pd.index' for one
rpwf_write_grid(to_export)
#> No grid generated
rpwf_write_df(to_export)
#> Creating new version '20221219T051124Z-a8d18'
#> Writing to pin 'df.0dda107e15c6150535a8d13a54848e37.parquet'

# Before exporting
DBI::dbGetQuery(db_con$con, "SELECT * FROM wflow_tbl;")
#> [1] wflow_id             model_tag            recipe_tag          
#> [4] costs                model_type_id        py_base_learner_args
#> [7] grid_id              df_id                random_state        
#> <0 rows> (or 0-length row.names)
# After exporting
rpwf_export_db(to_export, db_con)
#> Exporting workflows to db...
#> [1] 1
DBI::dbGetQuery(db_con$con, "SELECT * FROM wflow_tbl;")
#>   wflow_id        model_tag recipe_tag        costs model_type_id
#> 1        1 my_xgboost_model       <NA> neg_log_loss             5
#>   py_base_learner_args grid_id df_id random_state
#> 1                 <NA>       1     1         1004