Categorical Regression Splines.
Calls crs::crs() from crs.
Parameters
| Id | Type | Default | Levels | Range |
| degree | integer | 3 | \([0, \infty)\) | |
| segments | integer | 1 | \([1, \infty)\) | |
| include | integer | - | \((-\infty, \infty)\) | |
| lambda | untyped | - | - | |
| lambda.discrete | logical | FALSE | TRUE, FALSE | - |
| lambda.discrete.num | integer | 100 | \([0, \infty)\) | |
| cv | character | nomad | nomad, exhaustive, none | - |
| cv.threshold | integer | 10000 | \([0, \infty)\) | |
| cv.func | character | cv.ls | cv.ls, cv.gcv, cv.aic | - |
| kernel | logical | TRUE | TRUE, FALSE | - |
| degree.max | integer | 10 | \([0, \infty)\) | |
| segments.max | integer | 10 | \([1, \infty)\) | |
| degree.min | integer | 0 | \([0, \infty)\) | |
| segments.min | integer | 1 | \([1, \infty)\) | |
| cv.df.min | integer | 1 | \((-\infty, \infty)\) | |
| complexity | character | degree-knots | degree-knots, degree, knots | - |
| knots | character | quantiles | quantiles, uniform, auto | - |
| basis | character | auto | auto, additive, tensor, glp | - |
| prune | logical | FALSE | TRUE, FALSE | - |
| restarts | integer | 0 | \([0, \infty)\) | |
| nmulti | integer | 5 | \([0, \infty)\) | |
| singular.ok | logical | FALSE | TRUE, FALSE | - |
| deriv | integer | 0 | \([0, \infty)\) | |
| data.return | logical | FALSE | TRUE, FALSE | - |
| model.return | logical | FALSE | TRUE, FALSE | - |
| random.seed | integer | - | \((-\infty, \infty)\) | |
| tau | numeric | - | \([0, 1]\) | |
| initial.mesh.size.real | untyped | - | - | |
| initial.mesh.size.integer | untyped | - | - | |
| max.bb.eval | untyped | - | - | |
| min.mesh.size.real | untyped | - | - | |
| min.mesh.size.integer | untyped | - | - | |
| min.poll.size.real | untyped | - | - | |
| min.poll.size.integer | untyped | - | - | |
| opts | untyped | - | - |
See also
as.data.table(mlr_learners)for a table of available Learners in the running session (depending on the loaded packages).Chapter in the mlr3book: https://mlr3book.mlr-org.com/basics.html#learners
mlr3learners for a selection of recommended learners.
mlr3cluster for unsupervised clustering learners.
mlr3pipelines to combine learners with pre- and postprocessing steps.
mlr3tuning for tuning of hyperparameters, mlr3tuningspaces for established default tuning spaces.
Super classes
mlr3::Learner -> mlr3::LearnerRegr -> LearnerRegrCrs
Methods
Inherited methods
mlr3::Learner$base_learner()mlr3::Learner$configure()mlr3::Learner$encapsulate()mlr3::Learner$format()mlr3::Learner$help()mlr3::Learner$predict()mlr3::Learner$predict_newdata()mlr3::Learner$print()mlr3::Learner$reset()mlr3::Learner$selected_features()mlr3::Learner$train()mlr3::LearnerRegr$predict_newdata_fast()
Examples
# Define the Learner
learner = lrn("regr.crs")
print(learner)
#>
#> ── <LearnerRegrCrs> (regr.crs): Regression Splines ─────────────────────────────
#> • Model: -
#> • Parameters: list()
#> • Packages: mlr3 and crs
#> • Predict Types: [response] and se
#> • Feature Types: integer, numeric, factor, and ordered
#> • Encapsulation: none (fallback: -)
#> • Properties: weights
#> • Other settings: use_weights = 'use'
# Define a Task
task = tsk("mtcars")
# Create train and test set
ids = partition(task)
# Train the learner on the training ids
learner$train(task, row_ids = ids$train)
#> Calling NOMAD (Nonsmooth Optimization by Mesh Adaptive Direct Search)
#>
#> starting point # 0: ( 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 )
#> starting point # 1: ( 0 0 1 7 5 1 6 0 0 1 4 5 8 7 1 3 6 6 1 1 )
#> starting point # 2: ( 0 0 1 9 8 1 1 3 0 3 8 6 3 6 4 5 2 6 7 7 )
#> starting point # 3: ( 0 3 0 5 1 0 8 8 0 6 3 2 6 2 9 9 8 2 9 9 )
#> starting point # 4: ( 0 1 0 0 6 1 4 5 0 9 7 9 4 3 6 7 5 9 4 3 )
#>
#>
fv = 7.558038
fv = 26.06681
fv = 14.22838
fv = 45.82841
fv = 30.62477
fv = 9.958206
fv = 78605.95
fv = 10.70206
fv = 11.42287
fv = 363.0511
fv = 1.340781e+154
fv = 10.21936
fv = 1.340781e+154
fv = 1.340781e+154
fv = 28.11733
fv = 1.340781e+154
fv = 13.58281
fv = 193.0666
fv = 1.340781e+154
fv = 1.340781e+154
fv = 20.86694
fv = 1.340781e+154
fv = 11.164
fv = 1.340781e+154
fv = 23.90894
fv = 106.1359
#> Warning: number of rows of result is not a multiple of vector length (arg 2)
#>
fv = 54405115830
fv = 14.90272
fv = 8.734768
fv = 26.38662
fv = 12.5527
fv = 16.82241
fv = 9.585874
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 13.07504
fv = 8.829401
fv = 1.340781e+154
fv = 12.26707
fv = 13.77721
fv = 1.340781e+154
fv = 1.340781e+154
fv = 25.16853
run # 0: f=7.558037916
#>
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 8.798629
fv = 41.10885
fv = 1.340781e+154
fv = 1.340781e+154
fv = 7.951348
fv = 41.10885
fv = 1.340781e+154
fv = 1.340781e+154
fv = 41.10885
fv = 1.340781e+154
fv = 12.83352
fv = 9.00064
fv = 2183560
fv = 1.340781e+154
fv = 17.1831
fv = 6.949189
fv = 13.31135
fv = 12.79046
fv = 685551
fv = 123.7117
fv = 10.02364
fv = 9.195121
fv = 11.19827
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 7.499547
fv = 6.949189
fv = 8.215707
fv = 1.340781e+154
fv = 1.340781e+154
fv = 9.427604
fv = 6.949189
fv = 1.340781e+154
fv = 1.340781e+154
fv = 12.68065
fv = 15.04884
fv = 1.340781e+154
fv = 32.03497
fv = 274.2527
fv = 8.311329
fv = 8.10029
fv = 17.16075
fv = 6.380238
fv = 1.340781e+154
fv = 1.340781e+154
fv = 38.12414
fv = 1.340781e+154
fv = 25.16853
fv = 1.340781e+154
fv = 4.798179
fv = 7.913082
fv = 1.340781e+154
fv = 13.20002
fv = 1.340781e+154
fv = 1.340781e+154
fv = 25.16853
fv = 1.340781e+154
fv = 8.426614
fv = 1.340781e+154
fv = 8.287091
fv = 4.798179
fv = 8.287091
fv = 1.69095e+12
fv = 424529940781
fv = 23.39119
fv = 9.239898
fv = 1.340781e+154
fv = 424529940781
fv = 6.159627
fv = 51.20784
fv = 51.20784
fv = 1.340781e+154
fv = 9.619153
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 28.51122
fv = 1.340781e+154
fv = 274.3971
fv = 4.798179
fv = 1.340781e+154
fv = 8.080709
fv = 13.89286
fv = 4.381798
fv = 1.340781e+154
fv = 8.779469
fv = 4.381798
fv = 13.62238
fv = 9.73131
fv = 1.340781e+154
fv = 1.340781e+154
fv = 12.324
fv = 4.381798
fv = 121539372159
fv = 5.639023
fv = 13.95032
fv = 10.68802
fv = 18.77828
fv = 9.787976
fv = 8.145489
fv = 72548448556
fv = 1.340781e+154
fv = 1920602972
fv = 1.340781e+154
fv = 4.381798
fv = 194.1259
fv = 12.11341
fv = 1.340781e+154
fv = 11.91328
fv = 65.45587
fv = 4.381798
fv = 7.69968
fv = 4.381798
fv = 6.152503
fv = 10.78573
fv = 1.340781e+154
fv = 8.451022
fv = 5.123214
fv = 33.23405
fv = 119.3567
fv = 4.381798
fv = 1.340781e+154
fv = 4.381798
fv = 4.381798
fv = 25.78331
fv = 12.92725
fv = 1.340781e+154
fv = 10.60214
fv = 10.78573
fv = 31.2492
fv = 30.40718
fv = 4.574743
fv = 4.381798
fv = 4.381798
fv = 4.381798
fv = 4.381798
fv = 5.8559e+11
fv = 4.381798
fv = 4.54387
fv = 6.370446
fv = 4.798179
fv = 9.907011
run # 1: f=4.381797558
#>
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 15901.42
fv = 2910.483
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 17825.22
fv = 2910.483
fv = 1.340781e+154
fv = 1363.062
fv = 329760658
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 43916.97
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 12314.72
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 43916.97
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1363.062
fv = 803767.6
fv = 31.04301
fv = 23.26372
fv = 23.26372
fv = 5377.117
fv = 1.340781e+154
fv = 1.340781e+154
fv = 781.242
fv = 1.340781e+154
fv = 1.340781e+154
fv = 23.26372
fv = 1.340781e+154
fv = 12.25577
fv = 1.340781e+154
fv = 41.68135
fv = 18.72599
fv = 1.340781e+154
fv = 1.340781e+154
fv = 39.36971
fv = 22.05733
fv = 16.81845
fv = 1.340781e+154
fv = 64264.93
fv = 26.29493
fv = 29.81887
fv = 29.81887
fv = 23.26372
fv = 1.340781e+154
fv = 8.698723
fv = 25228.54
fv = 1.340781e+154
fv = 29.23536
fv = 17.95363
fv = 1.340781e+154
fv = 5.889964
fv = 25228.54
fv = 1.340781e+154
fv = 11.85482
fv = 1.340781e+154
fv = 17.95363
fv = 12.25577
fv = 8.698723
fv = 1.340781e+154
fv = 17.95363
fv = 1.340781e+154
fv = 10.13297
fv = 13.21761
fv = 13.00105
fv = 1.340781e+154
fv = 3210.312
fv = 12773644
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 19184.02
fv = 1.340781e+154
fv = 1.340781e+154
fv = 14.12739
fv = 1506731699
fv = 6.984611
fv = 1.340781e+154
fv = 17.95363
fv = 14.93745
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 18853.16
fv = 14.93745
fv = 8.698723
fv = 1.340781e+154
fv = 25228.54
fv = 8.538091
fv = 54.36112
fv = 5.889964
fv = 1.340781e+154
fv = 1.340781e+154
fv = 13.70781
fv = 5.889964
fv = 1.340781e+154
fv = 8.837393
fv = 35.51944
fv = 1.340781e+154
fv = 23.12948
fv = 5.889964
fv = 4.94954
fv = 4.94954
fv = 1.340781e+154
fv = 12.25577
fv = 4.94954
fv = 4.94954
fv = 252.9006
fv = 1.340781e+154
fv = 4.94954
fv = 4.94954
fv = 106.5829
fv = 10.80349
fv = 1.340781e+154
fv = 1.340781e+154
fv = 21.10818
fv = 235.9834
fv = 1039338
fv = 4.94954
fv = 1.340781e+154
fv = 4.94954
fv = 4.94954
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 4.94954
fv = 4.94954
fv = 8.17483
fv = 21.17723
fv = 1.340781e+154
fv = 1194.009
fv = 1.340781e+154
fv = 4.94954
fv = 1.340781e+154
fv = 1.340781e+154
fv = 4.94954
fv = 68.51275
fv = 15491.26
fv = 31.68291
fv = 1.340781e+154
fv = 22.62609
fv = 4.94954
fv = 1.340781e+154
fv = 252.9006
fv = 14.00246
fv = 10.80349
fv = 21.10818
fv = 17.95363
run # 2: f=4.949540433
#>
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 8.790252
fv = 16.70308
fv = 1.340781e+154
fv = 1.340781e+154
fv = 8.790252
fv = 18.79549
fv = 106.6415
fv = 316.9966
fv = 175.1425
fv = 1.340781e+154
fv = 662140.8
fv = 8.790252
fv = 1.340781e+154
fv = 11.92715
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 8.790252
fv = 1.340781e+154
fv = 8.790252
fv = 1.340781e+154
fv = 48.34343
fv = 306196.8
fv = 1.340781e+154
fv = 8.790252
fv = 1.340781e+154
fv = 153.9621
fv = 398.8618
fv = 1.340781e+154
fv = 8.790252
fv = 11.62714
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 8.790252
fv = 1.340781e+154
fv = 1.340781e+154
fv = 8.790252
fv = 1.340781e+154
fv = 1.340781e+154
fv = 23.55222
fv = 171.4944
fv = 45.44064
fv = 95.64312
fv = 357.5171
fv = 14.83462
fv = 129.8115
fv = 1.340781e+154
fv = 1.340781e+154
fv = 8.790252
fv = 190.9313
fv = 8.790252
fv = 7862.907
fv = 90.5404
fv = 168.6895
fv = 8.790252
fv = 2136.229
fv = 8.790252
fv = 404.685
fv = 4041.846
fv = 8.790252
fv = 8.790252
fv = 40.40833
run # 3: f=8.790251523
#>
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
fv = 1.340781e+154
run # 4: f=1.340780793e+154
#>
#> bb eval : 561
#> best : 4.381797558
#> worst : 1.340780793e+154
#> solution: x = ( 0 0 0 0 0 2 1 0 0 2 1 7 1 3 1 1 7 6 1 1 ) f(x) = 4.381797558
#>
#>
fv = 4.381798
#> Warning: optimal degree equals search maximum (1): rerun with larger degree.max optimal degree equals search maximum (4): rerun with larger degree.max optimal degree equals search maximum (2): rerun with larger degree.max optimal degree equals search maximum (10): rerun with larger degree.max optimal degree equals search maximum (10): rerun with larger degree.max optimal degree equals search maximum (2): rerun with larger degree.max optimal degree equals search maximum (10): rerun with larger degree.max optimal degree equals search maximum (10): rerun with larger degree.max optimal degree equals search maximum (1): rerun with larger degree.max optimal degree equals search maximum (10): rerun with larger degree.max
#> Working...
print(learner$model)
#> Call:
#> crs.formula(formula = formula, data = data, weights = private$.get_weights(task))
# Make predictions for the test rows
predictions = learner$predict(task, row_ids = ids$test)
#> Working...
#> Warning: some 'x' values beyond boundary knots may cause ill-conditioned bases
#>
# Score the predictions
predictions$score()
#> regr.mse
#> 116.7078