From 31df06be50d70dd5f032d06642b8821be5ca9d8b Mon Sep 17 00:00:00 2001 From: topepo Date: Wed, 24 Jan 2024 10:44:58 +0000 Subject: [PATCH] =?UTF-8?q?Deploying=20to=20gh-pages=20from=20@=20tidymode?= =?UTF-8?q?ls/parsnip@0ff77bc0c09744e5f3d2836d0bdc42bc6d27e6b0=20?= =?UTF-8?q?=F0=9F=9A=80?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- dev/apple-touch-icon-120x120.png | Bin 9769 -> 9769 bytes dev/apple-touch-icon-152x152.png | Bin 13321 -> 13321 bytes dev/apple-touch-icon-180x180.png | Bin 15780 -> 15780 bytes dev/apple-touch-icon-60x60.png | Bin 4010 -> 4010 bytes dev/apple-touch-icon-76x76.png | Bin 5226 -> 5226 bytes dev/apple-touch-icon.png | Bin 15780 -> 15780 bytes dev/articles/Examples.html | 624 +++++++++++++++---------------- dev/articles/Submodels.html | 2 +- dev/favicon-16x16.png | Bin 1112 -> 1112 bytes dev/favicon-32x32.png | Bin 2096 -> 2096 bytes dev/news/index.html | 2 +- dev/pkgdown.yml | 2 +- dev/search.json | 2 +- 13 files changed, 316 insertions(+), 316 deletions(-) diff --git a/dev/apple-touch-icon-120x120.png b/dev/apple-touch-icon-120x120.png index cdbbd5cd80b5f0b9ce48914498ccd8ff6ba328a4..5db6ab34ad4792fc0a1293a1eee6b229f976a854 100644 GIT binary patch delta 69 zcmZ4Kv(jfm6^8_uCbQo)J&uh{pH)OmLJSS8Obo3IjI|96tPBiF^t|>??o^e!d8fzOESQ!``ZmQTdxl>gZLt@kY L18kFbspbO!7pN9@ diff --git a/dev/apple-touch-icon-152x152.png b/dev/apple-touch-icon-152x152.png index 9d9557c43ea9f39bdc9cd34f97cd6a5fd774643e..0b0c3fae321b18001e94686afb480e10596ffbe2 100644 GIT binary patch delta 69 zcmeCo=**Z<#Ua6^$#P-S?$sNcE*puMgcurFnHX9bm}naqSQ!{>>?|>!oM|kJA#vc! Knf}SMjPn67n->iL delta 69 zcmeCo=**Z<#Uak8U^eaS%NHA)E*ps$hZvez85>!dnrIssSQ!|wsk)g=&NP@@!m9LJSS8Obo3IOtlRRtPBjq|JAoMFfgc=xJHzu jB$lLF<>sekrd2W+85o)98W`#tp=(H6eB64nsck+0T=W~* delta 93 zcmZ2dy`*|V6^A&Vg89Q!H|sVw<=MCzhZvez85>!dnra&uSQ!{B2=Z-UU|>)!ag8WR jNi0dV%FR#7OsixtGB7gHH89jQLf3GS|EtYpQ`>w1pokot diff --git a/dev/apple-touch-icon-60x60.png b/dev/apple-touch-icon-60x60.png index 19eb7249db2c70ed90a97c7be8ed906c399dea6a..a5a324acb7f53d022a58ff8186d94a9597f39556 100644 GIT binary patch delta 69 zcmZ1_ze;{W6^8_uCgXw|{wf=rO87)fLJSS8Obo3I47CjmtPBhuZ;YNYnTuZ*Lt@gK K#~PEZ`SStF_!MdY delta 69 zcmZ1_ze;{W6^A&Vg3-hUx*s<-mGFrehZvez85>!d8fY6BSQ!|6%1Tn7%*8K@A<=Ap KYxZPo{(Jz{0Td4a diff --git a/dev/apple-touch-icon-76x76.png b/dev/apple-touch-icon-76x76.png index 55a93bd5dbc97c7aab73b76f79313e160abd3442..2a0c961f491094d7effb17514b4fe999c128c2af 100644 GIT binary patch delta 69 zcmaE*@k(Pt6^8_uCX?kwxp^C#OoT;DLJSS8Obo3IjI<35tPBh;MLKd$J|!%RA+ea{ K=hn%;h4TT?KosQw delta 69 zcmaE*@k(Pt6^A&Vg7LHEvgsR}OoT;@Lkvx44GeXS&^3g6-(5D@)HWXgA#WPY delta 93 zcmZ2dy`*|V6^A&Vf<U|>)!ag8WR jNi0dV%FR#7OsixtGB7gHH89jQLf7!%`TK>FO>Of5hn5|D diff --git a/dev/articles/Examples.html b/dev/articles/Examples.html index 300e0b0df..5e3495609 100644 --- a/dev/articles/Examples.html +++ b/dev/articles/Examples.html @@ -154,7 +154,7 @@

## dplyr::filter() masks stats::filter() ## dplyr::lag() masks stats::lag() ## recipes::step() masks stats::step() - ## Use suppressPackageStartupMessages() to eliminate package startup messages + ## Learn how to get started at https://www.tidymodels.org/start/
   tidymodels_prefer()
   data(Chicago)
@@ -1088,124 +1088,124 @@ 

linreg_reg_fit <- linreg_reg_spec %>% fit(ridership ~ ., data = Chicago_train)

  ## Epoch 1/20
   ## 
-  1/178 [..............................] - ETA: 38s - loss: 17.4307
- 70/178 [==========>...................] - ETA: 0s - loss: 10.6130 
-138/178 [======================>.......] - ETA: 0s - loss: 10.5614
-178/178 [==============================] - 0s 747us/step - loss: 10.0142
+  1/178 [..............................] - ETA: 37s - loss: 4.0178
+ 72/178 [===========>..................] - ETA: 0s - loss: 9.2526 
+141/178 [======================>.......] - ETA: 0s - loss: 9.9516
+178/178 [==============================] - 0s 734us/step - loss: 10.0162
   ## Epoch 2/20
   ## 
-  1/178 [..............................] - ETA: 0s - loss: 8.4581
- 68/178 [==========>...................] - ETA: 0s - loss: 9.6488
-136/178 [=====================>........] - ETA: 0s - loss: 10.2243
-178/178 [==============================] - 0s 835us/step - loss: 9.9035
+  1/178 [..............................] - ETA: 0s - loss: 8.8066
+ 68/178 [==========>...................] - ETA: 0s - loss: 9.5041
+137/178 [======================>.......] - ETA: 0s - loss: 9.4180
+178/178 [==============================] - 0s 832us/step - loss: 9.9176
   ## Epoch 3/20
   ## 
-  1/178 [..............................] - ETA: 0s - loss: 12.3253
- 69/178 [==========>...................] - ETA: 0s - loss: 9.2844 
-138/178 [======================>.......] - ETA: 0s - loss: 9.5535
-178/178 [==============================] - 0s 743us/step - loss: 9.8282
+  1/178 [..............................] - ETA: 0s - loss: 13.9874
+ 70/178 [==========>...................] - ETA: 0s - loss: 9.7820 
+139/178 [======================>.......] - ETA: 0s - loss: 10.1092
+178/178 [==============================] - 0s 751us/step - loss: 9.8406
   ## Epoch 4/20
   ## 
-  1/178 [..............................] - ETA: 0s - loss: 6.8442
- 70/178 [==========>...................] - ETA: 0s - loss: 9.5057
-139/178 [======================>.......] - ETA: 0s - loss: 10.0353
-178/178 [==============================] - 0s 740us/step - loss: 9.7801
+  1/178 [..............................] - ETA: 0s - loss: 2.4863
+ 70/178 [==========>...................] - ETA: 0s - loss: 9.8285
+139/178 [======================>.......] - ETA: 0s - loss: 9.5838
+178/178 [==============================] - 0s 737us/step - loss: 9.7912
   ## Epoch 5/20
   ## 
-  1/178 [..............................] - ETA: 0s - loss: 5.8707
- 70/178 [==========>...................] - ETA: 0s - loss: 9.3942
-139/178 [======================>.......] - ETA: 0s - loss: 9.5975
-178/178 [==============================] - 0s 742us/step - loss: 9.7271
+  1/178 [..............................] - ETA: 0s - loss: 3.7076
+ 70/178 [==========>...................] - ETA: 0s - loss: 10.5940
+139/178 [======================>.......] - ETA: 0s - loss: 9.9238 
+178/178 [==============================] - 0s 740us/step - loss: 9.7440
   ## Epoch 6/20
   ## 
-  1/178 [..............................] - ETA: 0s - loss: 14.4505
- 69/178 [==========>...................] - ETA: 0s - loss: 9.6746 
-123/178 [===================>..........] - ETA: 0s - loss: 9.7878
-178/178 [==============================] - 0s 807us/step - loss: 9.6887
+  1/178 [..............................] - ETA: 0s - loss: 17.2106
+ 70/178 [==========>...................] - ETA: 0s - loss: 9.4003 
+124/178 [===================>..........] - ETA: 0s - loss: 9.9788
+178/178 [==============================] - 0s 803us/step - loss: 9.7044
   ## Epoch 7/20
   ## 
-  1/178 [..............................] - ETA: 0s - loss: 6.0710
- 70/178 [==========>...................] - ETA: 0s - loss: 8.7914
-139/178 [======================>.......] - ETA: 0s - loss: 9.5653
-178/178 [==============================] - 0s 736us/step - loss: 9.6613
+  1/178 [..............................] - ETA: 0s - loss: 8.6231
+ 70/178 [==========>...................] - ETA: 0s - loss: 9.7931
+139/178 [======================>.......] - ETA: 0s - loss: 9.8011
+178/178 [==============================] - 0s 741us/step - loss: 9.6681
   ## Epoch 8/20
   ## 
-  1/178 [..............................] - ETA: 0s - loss: 3.1123
- 71/178 [==========>...................] - ETA: 0s - loss: 9.0030
-141/178 [======================>.......] - ETA: 0s - loss: 9.4412
-178/178 [==============================] - 0s 733us/step - loss: 9.6261
+  1/178 [..............................] - ETA: 0s - loss: 8.8271
+ 70/178 [==========>...................] - ETA: 0s - loss: 9.4372
+140/178 [======================>.......] - ETA: 0s - loss: 9.6402
+178/178 [==============================] - 0s 733us/step - loss: 9.6429
   ## Epoch 9/20
   ## 
-  1/178 [..............................] - ETA: 0s - loss: 5.1978
- 70/178 [==========>...................] - ETA: 0s - loss: 9.6703
-137/178 [======================>.......] - ETA: 0s - loss: 9.8151
-178/178 [==============================] - 0s 745us/step - loss: 9.6121
+  1/178 [..............................] - ETA: 0s - loss: 9.8849
+ 70/178 [==========>...................] - ETA: 0s - loss: 10.2799
+140/178 [======================>.......] - ETA: 0s - loss: 9.7851 
+178/178 [==============================] - 0s 734us/step - loss: 9.6176
   ## Epoch 10/20
   ## 
-  1/178 [..............................] - ETA: 0s - loss: 9.9975
- 71/178 [==========>...................] - ETA: 0s - loss: 9.9440
-140/178 [======================>.......] - ETA: 0s - loss: 9.2790
-178/178 [==============================] - 0s 735us/step - loss: 9.5944
+  1/178 [..............................] - ETA: 0s - loss: 9.8390
+ 70/178 [==========>...................] - ETA: 0s - loss: 9.2099
+139/178 [======================>.......] - ETA: 0s - loss: 9.9742
+178/178 [==============================] - 0s 739us/step - loss: 9.6048
   ## Epoch 11/20
   ## 
-  1/178 [..............................] - ETA: 0s - loss: 6.5948
- 70/178 [==========>...................] - ETA: 0s - loss: 10.5874
-140/178 [======================>.......] - ETA: 0s - loss: 9.8003 
-178/178 [==============================] - 0s 734us/step - loss: 9.5830
+  1/178 [..............................] - ETA: 0s - loss: 12.1220
+ 70/178 [==========>...................] - ETA: 0s - loss: 8.8306 
+137/178 [======================>.......] - ETA: 0s - loss: 9.4540
+178/178 [==============================] - 0s 748us/step - loss: 9.5852
   ## Epoch 12/20
   ## 
-  1/178 [..............................] - ETA: 0s - loss: 5.6675
- 70/178 [==========>...................] - ETA: 0s - loss: 10.0886
-140/178 [======================>.......] - ETA: 0s - loss: 10.0484
-178/178 [==============================] - 0s 738us/step - loss: 9.5807
+  1/178 [..............................] - ETA: 0s - loss: 8.0364
+ 71/178 [==========>...................] - ETA: 0s - loss: 9.9407
+140/178 [======================>.......] - ETA: 0s - loss: 9.5596
+178/178 [==============================] - 0s 734us/step - loss: 9.5785
   ## Epoch 13/20
   ## 
-  1/178 [..............................] - ETA: 0s - loss: 16.1351
- 69/178 [==========>...................] - ETA: 0s - loss: 9.5628 
-139/178 [======================>.......] - ETA: 0s - loss: 9.1581
-178/178 [==============================] - 0s 739us/step - loss: 9.5583
+  1/178 [..............................] - ETA: 0s - loss: 18.1718
+ 71/178 [==========>...................] - ETA: 0s - loss: 9.1830 
+140/178 [======================>.......] - ETA: 0s - loss: 9.3422
+178/178 [==============================] - 0s 733us/step - loss: 9.5692
   ## Epoch 14/20
   ## 
-  1/178 [..............................] - ETA: 0s - loss: 3.0510
- 70/178 [==========>...................] - ETA: 0s - loss: 8.9868
-140/178 [======================>.......] - ETA: 0s - loss: 9.0619
-178/178 [==============================] - 0s 738us/step - loss: 9.5608
+  1/178 [..............................] - ETA: 0s - loss: 7.5960
+ 70/178 [==========>...................] - ETA: 0s - loss: 9.4781
+139/178 [======================>.......] - ETA: 0s - loss: 9.5747
+178/178 [==============================] - 0s 735us/step - loss: 9.5480
   ## Epoch 15/20
   ## 
-  1/178 [..............................] - ETA: 0s - loss: 7.4849
- 70/178 [==========>...................] - ETA: 0s - loss: 9.5764
-140/178 [======================>.......] - ETA: 0s - loss: 9.0276
-178/178 [==============================] - 0s 734us/step - loss: 9.5466
+  1/178 [..............................] - ETA: 0s - loss: 1.3175
+ 69/178 [==========>...................] - ETA: 0s - loss: 9.2704
+139/178 [======================>.......] - ETA: 0s - loss: 9.5494
+178/178 [==============================] - 0s 737us/step - loss: 9.5444
   ## Epoch 16/20
   ## 
-  1/178 [..............................] - ETA: 0s - loss: 2.8376
- 71/178 [==========>...................] - ETA: 0s - loss: 9.8700
-140/178 [======================>.......] - ETA: 0s - loss: 9.9571
-178/178 [==============================] - 0s 734us/step - loss: 9.5364
+  1/178 [..............................] - ETA: 0s - loss: 8.1835
+ 71/178 [==========>...................] - ETA: 0s - loss: 10.3360
+140/178 [======================>.......] - ETA: 0s - loss: 9.8695 
+178/178 [==============================] - 0s 730us/step - loss: 9.5391
   ## Epoch 17/20
   ## 
-  1/178 [..............................] - ETA: 0s - loss: 7.4454
- 70/178 [==========>...................] - ETA: 0s - loss: 10.0004
-139/178 [======================>.......] - ETA: 0s - loss: 9.5371 
-178/178 [==============================] - 0s 738us/step - loss: 9.5360
+  1/178 [..............................] - ETA: 0s - loss: 6.5206
+ 71/178 [==========>...................] - ETA: 0s - loss: 9.7326
+140/178 [======================>.......] - ETA: 0s - loss: 9.3318
+178/178 [==============================] - 0s 734us/step - loss: 9.5301
   ## Epoch 18/20
   ## 
-  1/178 [..............................] - ETA: 0s - loss: 20.2563
- 71/178 [==========>...................] - ETA: 0s - loss: 9.3077 
-140/178 [======================>.......] - ETA: 0s - loss: 9.4210
-178/178 [==============================] - 0s 736us/step - loss: 9.5309
+  1/178 [..............................] - ETA: 0s - loss: 17.5520
+ 70/178 [==========>...................] - ETA: 0s - loss: 9.0890 
+140/178 [======================>.......] - ETA: 0s - loss: 9.4896
+178/178 [==============================] - 0s 734us/step - loss: 9.5319
   ## Epoch 19/20
   ## 
-  1/178 [..............................] - ETA: 0s - loss: 12.4499
- 70/178 [==========>...................] - ETA: 0s - loss: 10.3009
-139/178 [======================>.......] - ETA: 0s - loss: 9.9400 
-178/178 [==============================] - 0s 740us/step - loss: 9.5303
+  1/178 [..............................] - ETA: 0s - loss: 6.6445
+ 70/178 [==========>...................] - ETA: 0s - loss: 9.1370
+137/178 [======================>.......] - ETA: 0s - loss: 9.4737
+178/178 [==============================] - 0s 747us/step - loss: 9.5278
   ## Epoch 20/20
   ## 
-  1/178 [..............................] - ETA: 0s - loss: 1.9899
- 70/178 [==========>...................] - ETA: 0s - loss: 8.8502
-139/178 [======================>.......] - ETA: 0s - loss: 9.6911
-178/178 [==============================] - 0s 735us/step - loss: 9.5231
+ 1/178 [..............................] - ETA: 0s - loss: 10.1576 + 70/178 [==========>...................] - ETA: 0s - loss: 10.9734 +139/178 [======================>.......] - ETA: 0s - loss: 9.4202 +178/178 [==============================] - 0s 738us/step - loss: 9.5204
   linreg_reg_fit
  ## parsnip model object
@@ -1230,10 +1230,10 @@ 

## 1 20.4 ## 2 20.6 ## 3 20.9 - ## 4 20.6 - ## 5 18.9 - ## 6 7.44 - ## 7 7.08

+ ## 4 20.7 + ## 5 19.0 + ## 6 7.45 + ## 7 7.09
With the "stan" engine

@@ -1521,84 +1521,84 @@

logreg_cls_fit <- logreg_cls_spec %>% fit(Class ~ ., data = data_train)
  ## Epoch 1/20
   ## 
- 1/25 [>.............................] - ETA: 7s - loss: 0.9321
-25/25 [==============================] - 0s 850us/step - loss: 0.8988
+ 1/25 [>.............................] - ETA: 7s - loss: 0.9612
+25/25 [==============================] - 0s 839us/step - loss: 0.9000
   ## Epoch 2/20
   ## 
- 1/25 [>.............................] - ETA: 0s - loss: 0.9018
-25/25 [==============================] - 0s 834us/step - loss: 0.8877
+ 1/25 [>.............................] - ETA: 0s - loss: 0.8651
+25/25 [==============================] - 0s 824us/step - loss: 0.8891
   ## Epoch 3/20
   ## 
- 1/25 [>.............................] - ETA: 0s - loss: 0.9434
-25/25 [==============================] - 0s 858us/step - loss: 0.8770
+ 1/25 [>.............................] - ETA: 0s - loss: 0.9680
+25/25 [==============================] - 0s 821us/step - loss: 0.8785
   ## Epoch 4/20
   ## 
- 1/25 [>.............................] - ETA: 0s - loss: 0.8521
-25/25 [==============================] - 0s 835us/step - loss: 0.8662
+ 1/25 [>.............................] - ETA: 0s - loss: 0.8629
+25/25 [==============================] - 0s 819us/step - loss: 0.8680
   ## Epoch 5/20
   ## 
- 1/25 [>.............................] - ETA: 0s - loss: 0.9705
-25/25 [==============================] - 0s 862us/step - loss: 0.8558
+ 1/25 [>.............................] - ETA: 0s - loss: 0.7740
+25/25 [==============================] - 0s 843us/step - loss: 0.8579
   ## Epoch 6/20
   ## 
- 1/25 [>.............................] - ETA: 0s - loss: 0.8390
-25/25 [==============================] - 0s 881us/step - loss: 0.8457
+ 1/25 [>.............................] - ETA: 0s - loss: 0.8865
+25/25 [==============================] - 0s 845us/step - loss: 0.8479
   ## Epoch 7/20
   ## 
- 1/25 [>.............................] - ETA: 0s - loss: 0.8311
-25/25 [==============================] - 0s 866us/step - loss: 0.8357
+ 1/25 [>.............................] - ETA: 0s - loss: 0.8074
+25/25 [==============================] - 0s 848us/step - loss: 0.8383
   ## Epoch 8/20
   ## 
- 1/25 [>.............................] - ETA: 0s - loss: 0.7976
-25/25 [==============================] - 0s 883us/step - loss: 0.8259
+ 1/25 [>.............................] - ETA: 0s - loss: 0.8382
+25/25 [==============================] - 0s 852us/step - loss: 0.8288
   ## Epoch 9/20
   ## 
- 1/25 [>.............................] - ETA: 0s - loss: 0.8802
-25/25 [==============================] - 0s 864us/step - loss: 0.8165
+ 1/25 [>.............................] - ETA: 0s - loss: 0.8655
+25/25 [==============================] - 0s 844us/step - loss: 0.8195
   ## Epoch 10/20
   ## 
- 1/25 [>.............................] - ETA: 0s - loss: 0.7910
-25/25 [==============================] - 0s 867us/step - loss: 0.8071
+ 1/25 [>.............................] - ETA: 0s - loss: 0.7916
+25/25 [==============================] - 0s 843us/step - loss: 0.8105
   ## Epoch 11/20
   ## 
- 1/25 [>.............................] - ETA: 0s - loss: 0.8217
-25/25 [==============================] - 0s 863us/step - loss: 0.7980
+ 1/25 [>.............................] - ETA: 0s - loss: 0.8338
+25/25 [==============================] - 0s 840us/step - loss: 0.8017
   ## Epoch 12/20
   ## 
- 1/25 [>.............................] - ETA: 0s - loss: 0.8311
-25/25 [==============================] - 0s 863us/step - loss: 0.7893
+ 1/25 [>.............................] - ETA: 0s - loss: 0.7643
+25/25 [==============================] - 0s 846us/step - loss: 0.7931
   ## Epoch 13/20
   ## 
- 1/25 [>.............................] - ETA: 0s - loss: 0.8042
-25/25 [==============================] - 0s 869us/step - loss: 0.7804
+ 1/25 [>.............................] - ETA: 0s - loss: 0.7616
+25/25 [==============================] - 0s 844us/step - loss: 0.7845
   ## Epoch 14/20
   ## 
- 1/25 [>.............................] - ETA: 0s - loss: 0.7928
-25/25 [==============================] - 0s 873us/step - loss: 0.7718
+ 1/25 [>.............................] - ETA: 0s - loss: 0.7430
+25/25 [==============================] - 0s 846us/step - loss: 0.7762
   ## Epoch 15/20
   ## 
- 1/25 [>.............................] - ETA: 0s - loss: 0.7808
-25/25 [==============================] - 0s 857us/step - loss: 0.7634
+ 1/25 [>.............................] - ETA: 0s - loss: 0.7676
+25/25 [==============================] - 0s 850us/step - loss: 0.7684
   ## Epoch 16/20
   ## 
- 1/25 [>.............................] - ETA: 0s - loss: 0.7588
-25/25 [==============================] - 0s 856us/step - loss: 0.7553
+ 1/25 [>.............................] - ETA: 0s - loss: 0.7518
+25/25 [==============================] - 0s 851us/step - loss: 0.7603
   ## Epoch 17/20
   ## 
- 1/25 [>.............................] - ETA: 0s - loss: 0.7480
-25/25 [==============================] - 0s 869us/step - loss: 0.7471
+ 1/25 [>.............................] - ETA: 0s - loss: 0.7961
+25/25 [==============================] - 0s 835us/step - loss: 0.7524
   ## Epoch 18/20
   ## 
- 1/25 [>.............................] - ETA: 0s - loss: 0.7732
-25/25 [==============================] - 0s 861us/step - loss: 0.7392
+ 1/25 [>.............................] - ETA: 0s - loss: 0.7796
+25/25 [==============================] - 0s 838us/step - loss: 0.7448
   ## Epoch 19/20
   ## 
- 1/25 [>.............................] - ETA: 0s - loss: 0.7394
-25/25 [==============================] - 0s 868us/step - loss: 0.7314
+ 1/25 [>.............................] - ETA: 0s - loss: 0.7352
+25/25 [==============================] - 0s 842us/step - loss: 0.7373
   ## Epoch 20/20
   ## 
- 1/25 [>.............................] - ETA: 0s - loss: 0.6844
-25/25 [==============================] - 0s 847us/step - loss: 0.7239
+ 1/25 [>.............................] - ETA: 0s - loss: 0.7264 +25/25 [==============================] - 0s 851us/step - loss: 0.7300
   logreg_cls_fit
  ## parsnip model object
@@ -1624,16 +1624,16 @@ 

  ## # A tibble: 10 × 3
   ##    .pred_class .pred_Class1 .pred_Class2
   ##    <fct>              <dbl>        <dbl>
-  ##  1 Class1             0.509       0.491 
-  ##  2 Class1             0.836       0.164 
-  ##  3 Class1             0.521       0.479 
-  ##  4 Class1             0.828       0.172 
-  ##  5 Class1             0.638       0.362 
-  ##  6 Class1             0.534       0.466 
-  ##  7 Class1             0.737       0.263 
-  ##  8 Class1             0.525       0.475 
-  ##  9 Class1             0.989       0.0108
-  ## 10 Class2             0.492       0.508
+ ## 1 Class1 0.501 0.499 + ## 2 Class1 0.834 0.166 + ## 3 Class1 0.512 0.488 + ## 4 Class1 0.829 0.171 + ## 5 Class1 0.635 0.365 + ## 6 Class1 0.531 0.469 + ## 7 Class1 0.734 0.266 + ## 8 Class1 0.516 0.484 + ## 9 Class1 0.990 0.0103 + ## 10 Class2 0.490 0.510

With the "LiblineaR" engine

@@ -2072,124 +2072,124 @@

mlp_reg_fit <- mlp_reg_spec %>% fit(ridership ~ ., data = Chicago_train)
  ## Epoch 1/20
   ## 
-  1/178 [..............................] - ETA: 33s - loss: 195.9154
- 70/178 [==========>...................] - ETA: 0s - loss: 213.1526 
-130/178 [====================>.........] - ETA: 0s - loss: 213.3227
-178/178 [==============================] - 0s 783us/step - loss: 209.6472
+  1/178 [..............................] - ETA: 33s - loss: 230.9207
+ 69/178 [==========>...................] - ETA: 0s - loss: 216.2306 
+136/178 [=====================>........] - ETA: 0s - loss: 213.2218
+178/178 [==============================] - 0s 759us/step - loss: 209.6853
   ## Epoch 2/20
   ## 
-  1/178 [..............................] - ETA: 0s - loss: 199.8269
- 68/178 [==========>...................] - ETA: 0s - loss: 194.0793
-135/178 [=====================>........] - ETA: 0s - loss: 191.9669
-178/178 [==============================] - 0s 761us/step - loss: 190.0307
+  1/178 [..............................] - ETA: 0s - loss: 226.3140
+ 68/178 [==========>...................] - ETA: 0s - loss: 191.2608
+135/178 [=====================>........] - ETA: 0s - loss: 191.1377
+178/178 [==============================] - 0s 765us/step - loss: 190.0487
   ## Epoch 3/20
   ## 
-  1/178 [..............................] - ETA: 0s - loss: 158.2600
- 68/178 [==========>...................] - ETA: 0s - loss: 182.6862
-135/178 [=====================>........] - ETA: 0s - loss: 183.1960
-178/178 [==============================] - 0s 761us/step - loss: 180.1448
+  1/178 [..............................] - ETA: 0s - loss: 167.0655
+ 67/178 [==========>...................] - ETA: 0s - loss: 183.1242
+132/178 [=====================>........] - ETA: 0s - loss: 182.3996
+178/178 [==============================] - 0s 773us/step - loss: 180.1502
   ## Epoch 4/20
   ## 
-  1/178 [..............................] - ETA: 0s - loss: 215.8296
- 67/178 [==========>...................] - ETA: 0s - loss: 173.9482
-134/178 [=====================>........] - ETA: 0s - loss: 172.0008
-178/178 [==============================] - 0s 767us/step - loss: 171.5592
+  1/178 [..............................] - ETA: 0s - loss: 202.8846
+ 68/178 [==========>...................] - ETA: 0s - loss: 173.8117
+135/178 [=====================>........] - ETA: 0s - loss: 171.4261
+178/178 [==============================] - 0s 763us/step - loss: 171.5603
   ## Epoch 5/20
   ## 
-  1/178 [..............................] - ETA: 0s - loss: 215.5383
- 68/178 [==========>...................] - ETA: 0s - loss: 167.2482
-135/178 [=====================>........] - ETA: 0s - loss: 164.8201
-178/178 [==============================] - 0s 766us/step - loss: 163.5937
+  1/178 [..............................] - ETA: 0s - loss: 187.1993
+ 68/178 [==========>...................] - ETA: 0s - loss: 164.3662
+133/178 [=====================>........] - ETA: 0s - loss: 163.5835
+178/178 [==============================] - 0s 772us/step - loss: 163.5888
   ## Epoch 6/20
   ## 
-  1/178 [..............................] - ETA: 0s - loss: 135.6939
- 68/178 [==========>...................] - ETA: 0s - loss: 155.2889
-135/178 [=====================>........] - ETA: 0s - loss: 156.1454
-178/178 [==============================] - 0s 762us/step - loss: 156.0912
+  1/178 [..............................] - ETA: 0s - loss: 187.5711
+ 68/178 [==========>...................] - ETA: 0s - loss: 156.8455
+134/178 [=====================>........] - ETA: 0s - loss: 156.9154
+178/178 [==============================] - 0s 766us/step - loss: 156.0869
   ## Epoch 7/20
   ## 
-  1/178 [..............................] - ETA: 0s - loss: 144.8098
- 68/178 [==========>...................] - ETA: 0s - loss: 152.5993
-135/178 [=====================>........] - ETA: 0s - loss: 151.6861
-178/178 [==============================] - 0s 765us/step - loss: 148.9920
+  1/178 [..............................] - ETA: 0s - loss: 141.4862
+ 68/178 [==========>...................] - ETA: 0s - loss: 149.5145
+134/178 [=====================>........] - ETA: 0s - loss: 149.3982
+178/178 [==============================] - 0s 768us/step - loss: 148.9828
   ## Epoch 8/20
   ## 
-  1/178 [..............................] - ETA: 0s - loss: 107.1714
- 68/178 [==========>...................] - ETA: 0s - loss: 144.1254
-135/178 [=====================>........] - ETA: 0s - loss: 142.7666
-178/178 [==============================] - 0s 763us/step - loss: 142.2492
+  1/178 [..............................] - ETA: 0s - loss: 114.8170
+ 67/178 [==========>...................] - ETA: 0s - loss: 140.4720
+134/178 [=====================>........] - ETA: 0s - loss: 142.2585
+178/178 [==============================] - 0s 769us/step - loss: 142.2271
   ## Epoch 9/20
   ## 
-  1/178 [..............................] - ETA: 0s - loss: 128.3298
- 68/178 [==========>...................] - ETA: 0s - loss: 139.5679
-135/178 [=====================>........] - ETA: 0s - loss: 136.7252
-178/178 [==============================] - 0s 762us/step - loss: 135.8328
+  1/178 [..............................] - ETA: 0s - loss: 132.7867
+ 68/178 [==========>...................] - ETA: 0s - loss: 136.8206
+135/178 [=====================>........] - ETA: 0s - loss: 137.1821
+178/178 [==============================] - 0s 759us/step - loss: 135.8030
   ## Epoch 10/20
   ## 
-  1/178 [..............................] - ETA: 0s - loss: 150.5847
- 68/178 [==========>...................] - ETA: 0s - loss: 126.4342
-135/178 [=====================>........] - ETA: 0s - loss: 129.5998
-178/178 [==============================] - 0s 760us/step - loss: 129.7127
+  1/178 [..............................] - ETA: 0s - loss: 138.3583
+ 68/178 [==========>...................] - ETA: 0s - loss: 133.8351
+135/178 [=====================>........] - ETA: 0s - loss: 130.3492
+178/178 [==============================] - 0s 764us/step - loss: 129.6855
   ## Epoch 11/20
   ## 
-  1/178 [..............................] - ETA: 0s - loss: 114.1486
- 67/178 [==========>...................] - ETA: 0s - loss: 125.5023
-134/178 [=====================>........] - ETA: 0s - loss: 125.7385
-178/178 [==============================] - 0s 767us/step - loss: 123.8771
+  1/178 [..............................] - ETA: 0s - loss: 131.2519
+ 67/178 [==========>...................] - ETA: 0s - loss: 126.0124
+134/178 [=====================>........] - ETA: 0s - loss: 125.5755
+178/178 [==============================] - 0s 764us/step - loss: 123.8528
   ## Epoch 12/20
   ## 
-  1/178 [..............................] - ETA: 0s - loss: 90.9601
- 68/178 [==========>...................] - ETA: 0s - loss: 120.7318
-134/178 [=====================>........] - ETA: 0s - loss: 119.0738
-178/178 [==============================] - 0s 767us/step - loss: 118.3144
+  1/178 [..............................] - ETA: 0s - loss: 152.5505
+ 68/178 [==========>...................] - ETA: 0s - loss: 122.7017
+135/178 [=====================>........] - ETA: 0s - loss: 118.2214
+178/178 [==============================] - 0s 763us/step - loss: 118.2895
   ## Epoch 13/20
   ## 
-  1/178 [..............................] - ETA: 0s - loss: 106.5899
- 68/178 [==========>...................] - ETA: 0s - loss: 114.7508
-135/178 [=====================>........] - ETA: 0s - loss: 113.2992
-178/178 [==============================] - 0s 763us/step - loss: 113.0014
+  1/178 [..............................] - ETA: 0s - loss: 120.8791
+ 65/178 [=========>....................] - ETA: 0s - loss: 112.7203
+132/178 [=====================>........] - ETA: 0s - loss: 113.8766
+178/178 [==============================] - 0s 774us/step - loss: 112.9745
   ## Epoch 14/20
   ## 
-  1/178 [..............................] - ETA: 0s - loss: 118.1168
- 68/178 [==========>...................] - ETA: 0s - loss: 109.2904
-134/178 [=====================>........] - ETA: 0s - loss: 107.8045
-178/178 [==============================] - 0s 766us/step - loss: 107.9151
+  1/178 [..............................] - ETA: 0s - loss: 117.3056
+ 68/178 [==========>...................] - ETA: 0s - loss: 108.3989
+135/178 [=====================>........] - ETA: 0s - loss: 108.3585
+178/178 [==============================] - 0s 761us/step - loss: 107.8919
   ## Epoch 15/20
   ## 
-  1/178 [..............................] - ETA: 0s - loss: 95.7403
- 67/178 [==========>...................] - ETA: 0s - loss: 102.9026
-134/178 [=====================>........] - ETA: 0s - loss: 103.2705
-178/178 [==============================] - 0s 766us/step - loss: 103.0387
+  1/178 [..............................] - ETA: 0s - loss: 99.4304
+ 67/178 [==========>...................] - ETA: 0s - loss: 103.0506
+134/178 [=====================>........] - ETA: 0s - loss: 103.0960
+178/178 [==============================] - 0s 767us/step - loss: 103.0151
   ## Epoch 16/20
   ## 
-  1/178 [..............................] - ETA: 0s - loss: 93.6622
- 65/178 [=========>....................] - ETA: 0s - loss: 100.8347
-132/178 [=====================>........] - ETA: 0s - loss: 98.9483 
-178/178 [==============================] - 0s 777us/step - loss: 98.3335
+  1/178 [..............................] - ETA: 0s - loss: 89.3936
+ 67/178 [==========>...................] - ETA: 0s - loss: 99.6111
+132/178 [=====================>........] - ETA: 0s - loss: 98.9638
+178/178 [==============================] - 0s 786us/step - loss: 98.3087
   ## Epoch 17/20
   ## 
-  1/178 [..............................] - ETA: 0s - loss: 86.5690
- 68/178 [==========>...................] - ETA: 0s - loss: 96.3766
-134/178 [=====================>........] - ETA: 0s - loss: 94.5167
-178/178 [==============================] - 0s 766us/step - loss: 93.7449
+  1/178 [..............................] - ETA: 0s - loss: 111.9582
+ 66/178 [==========>...................] - ETA: 0s - loss: 96.8132 
+133/178 [=====================>........] - ETA: 0s - loss: 94.5995
+178/178 [==============================] - 0s 768us/step - loss: 93.7212
   ## Epoch 18/20
   ## 
-  1/178 [..............................] - ETA: 0s - loss: 97.1584
- 67/178 [==========>...................] - ETA: 0s - loss: 88.8328
-134/178 [=====================>........] - ETA: 0s - loss: 90.4486
-178/178 [==============================] - 0s 769us/step - loss: 89.2406
+  1/178 [..............................] - ETA: 0s - loss: 101.5892
+ 68/178 [==========>...................] - ETA: 0s - loss: 90.6691 
+135/178 [=====================>........] - ETA: 0s - loss: 88.8466
+178/178 [==============================] - 0s 761us/step - loss: 89.2178
   ## Epoch 19/20
   ## 
-  1/178 [..............................] - ETA: 0s - loss: 78.2105
- 67/178 [==========>...................] - ETA: 0s - loss: 85.0607
-134/178 [=====================>........] - ETA: 0s - loss: 85.4981
-178/178 [==============================] - 0s 765us/step - loss: 84.8999
+  1/178 [..............................] - ETA: 0s - loss: 94.9479
+ 68/178 [==========>...................] - ETA: 0s - loss: 86.5822
+135/178 [=====================>........] - ETA: 0s - loss: 85.5444
+178/178 [==============================] - 0s 759us/step - loss: 84.8844
   ## Epoch 20/20
   ## 
-  1/178 [..............................] - ETA: 0s - loss: 74.8593
- 68/178 [==========>...................] - ETA: 0s - loss: 80.9710
-135/178 [=====================>........] - ETA: 0s - loss: 80.7750
-178/178 [==============================] - 0s 763us/step - loss: 80.7053
+ 1/178 [..............................] - ETA: 0s - loss: 77.3532 + 68/178 [==========>...................] - ETA: 0s - loss: 83.4353 +132/178 [=====================>........] - ETA: 0s - loss: 81.5463 +178/178 [==============================] - 0s 780us/step - loss: 80.6915
   mlp_reg_fit
  ## parsnip model object
@@ -2216,8 +2216,8 @@ 

## 3 7.50 ## 4 7.50 ## 5 7.50 - ## 6 6.86 - ## 7 6.69

+ ## 6 6.90 + ## 7 6.74

Classification Example (keras)

@@ -2251,84 +2251,84 @@

mlp_cls_fit <- mlp_cls_spec %>% fit(Class ~ ., data = data_train)
  ## Epoch 1/20
   ## 
- 1/25 [>.............................] - ETA: 5s - loss: 0.7017
-25/25 [==============================] - 0s 860us/step - loss: 0.6993
+ 1/25 [>.............................] - ETA: 5s - loss: 0.6948
+25/25 [==============================] - 0s 855us/step - loss: 0.6990
   ## Epoch 2/20
   ## 
- 1/25 [>.............................] - ETA: 0s - loss: 0.6943
-25/25 [==============================] - 0s 845us/step - loss: 0.6934
+ 1/25 [>.............................] - ETA: 0s - loss: 0.7013
+25/25 [==============================] - 0s 835us/step - loss: 0.6929
   ## Epoch 3/20
   ## 
- 1/25 [>.............................] - ETA: 0s - loss: 0.6993
-25/25 [==============================] - 0s 845us/step - loss: 0.6876
+ 1/25 [>.............................] - ETA: 0s - loss: 0.6962
+25/25 [==============================] - 0s 824us/step - loss: 0.6872
   ## Epoch 4/20
   ## 
- 1/25 [>.............................] - ETA: 0s - loss: 0.6779
-25/25 [==============================] - 0s 841us/step - loss: 0.6824
+ 1/25 [>.............................] - ETA: 0s - loss: 0.6825
+25/25 [==============================] - 0s 849us/step - loss: 0.6818
   ## Epoch 5/20
   ## 
- 1/25 [>.............................] - ETA: 0s - loss: 0.6796
-25/25 [==============================] - 0s 865us/step - loss: 0.6774
+ 1/25 [>.............................] - ETA: 0s - loss: 0.6802
+25/25 [==============================] - 0s 858us/step - loss: 0.6770
   ## Epoch 6/20
   ## 
- 1/25 [>.............................] - ETA: 0s - loss: 0.6739
-25/25 [==============================] - 0s 891us/step - loss: 0.6728
+ 1/25 [>.............................] - ETA: 0s - loss: 0.6805
+25/25 [==============================] - 0s 859us/step - loss: 0.6723
   ## Epoch 7/20
   ## 
- 1/25 [>.............................] - ETA: 0s - loss: 0.6699
-25/25 [==============================] - 0s 873us/step - loss: 0.6683
+ 1/25 [>.............................] - ETA: 0s - loss: 0.6707
+25/25 [==============================] - 0s 885us/step - loss: 0.6679
   ## Epoch 8/20
   ## 
- 1/25 [>.............................] - ETA: 0s - loss: 0.6708
-25/25 [==============================] - 0s 874us/step - loss: 0.6641
+ 1/25 [>.............................] - ETA: 0s - loss: 0.6636
+25/25 [==============================] - 0s 985us/step - loss: 0.6636
   ## Epoch 9/20
   ## 
- 1/25 [>.............................] - ETA: 0s - loss: 0.6659
-25/25 [==============================] - 0s 873us/step - loss: 0.6598
+ 1/25 [>.............................] - ETA: 0s - loss: 0.6627
+25/25 [==============================] - 0s 884us/step - loss: 0.6594
   ## Epoch 10/20
   ## 
- 1/25 [>.............................] - ETA: 0s - loss: 0.6567
-25/25 [==============================] - 0s 879us/step - loss: 0.6556
+ 1/25 [>.............................] - ETA: 0s - loss: 0.6575
+25/25 [==============================] - 0s 858us/step - loss: 0.6553
   ## Epoch 11/20
   ## 
- 1/25 [>.............................] - ETA: 0s - loss: 0.6423
-25/25 [==============================] - 0s 862us/step - loss: 0.6515
+ 1/25 [>.............................] - ETA: 0s - loss: 0.6481
+25/25 [==============================] - 0s 860us/step - loss: 0.6510
   ## Epoch 12/20
   ## 
- 1/25 [>.............................] - ETA: 0s - loss: 0.6556
-25/25 [==============================] - 0s 872us/step - loss: 0.6473
+ 1/25 [>.............................] - ETA: 0s - loss: 0.6327
+25/25 [==============================] - 0s 860us/step - loss: 0.6470
   ## Epoch 13/20
   ## 
- 1/25 [>.............................] - ETA: 0s - loss: 0.6443
-25/25 [==============================] - 0s 880us/step - loss: 0.6431
+ 1/25 [>.............................] - ETA: 0s - loss: 0.6626
+25/25 [==============================] - 0s 866us/step - loss: 0.6429
   ## Epoch 14/20
   ## 
- 1/25 [>.............................] - ETA: 0s - loss: 0.6518
-25/25 [==============================] - 0s 893us/step - loss: 0.6389
+ 1/25 [>.............................] - ETA: 0s - loss: 0.6334
+25/25 [==============================] - 0s 859us/step - loss: 0.6387
   ## Epoch 15/20
   ## 
- 1/25 [>.............................] - ETA: 0s - loss: 0.6341
-25/25 [==============================] - 0s 881us/step - loss: 0.6346
+ 1/25 [>.............................] - ETA: 0s - loss: 0.6530
+25/25 [==============================] - 0s 868us/step - loss: 0.6345
   ## Epoch 16/20
   ## 
- 1/25 [>.............................] - ETA: 0s - loss: 0.6137
-25/25 [==============================] - 0s 868us/step - loss: 0.6301
+ 1/25 [>.............................] - ETA: 0s - loss: 0.6354
+25/25 [==============================] - 0s 870us/step - loss: 0.6300
   ## Epoch 17/20
   ## 
- 1/25 [>.............................] - ETA: 0s - loss: 0.6139
-25/25 [==============================] - 0s 886us/step - loss: 0.6260
+ 1/25 [>.............................] - ETA: 0s - loss: 0.6479
+25/25 [==============================] - 0s 858us/step - loss: 0.6257
   ## Epoch 18/20
   ## 
- 1/25 [>.............................] - ETA: 0s - loss: 0.6361
-25/25 [==============================] - 0s 885us/step - loss: 0.6214
+ 1/25 [>.............................] - ETA: 0s - loss: 0.6021
+25/25 [==============================] - 0s 869us/step - loss: 0.6213
   ## Epoch 19/20
   ## 
- 1/25 [>.............................] - ETA: 0s - loss: 0.6043
-25/25 [==============================] - 0s 879us/step - loss: 0.6176
+ 1/25 [>.............................] - ETA: 0s - loss: 0.6017
+25/25 [==============================] - 0s 869us/step - loss: 0.6167
   ## Epoch 20/20
   ## 
- 1/25 [>.............................] - ETA: 0s - loss: 0.6076
-25/25 [==============================] - 0s 864us/step - loss: 0.6127
+ 1/25 [>.............................] - ETA: 0s - loss: 0.5676 +25/25 [==============================] - 0s 867us/step - loss: 0.6122
   mlp_cls_fit
  ## parsnip model object
@@ -2354,16 +2354,16 @@ 

  ## # A tibble: 10 × 3
   ##    .pred_class .pred_Class1 .pred_Class2
   ##    <fct>              <dbl>        <dbl>
-  ##  1 Class1             0.538        0.462
-  ##  2 Class1             0.662        0.338
-  ##  3 Class1             0.595        0.405
-  ##  4 Class2             0.477        0.523
-  ##  5 Class2             0.474        0.526
-  ##  6 Class2             0.410        0.590
-  ##  7 Class1             0.552        0.448
-  ##  8 Class1             0.555        0.445
-  ##  9 Class1             0.745        0.255
-  ## 10 Class2             0.373        0.627
+ ## 1 Class1 0.542 0.458 + ## 2 Class1 0.667 0.333 + ## 3 Class1 0.600 0.400 + ## 4 Class2 0.481 0.519 + ## 5 Class2 0.478 0.522 + ## 6 Class2 0.413 0.587 + ## 7 Class1 0.557 0.443 + ## 8 Class1 0.559 0.441 + ## 9 Class1 0.749 0.251 + ## 10 Class2 0.376 0.624

@@ -2516,84 +2516,84 @@

mr_cls_fit <- mr_cls_spec %>% fit(island ~ ., data = penguins_train)

## Epoch 1/20
 ## 
-1/11 [=>............................] - ETA: 2s - loss: 3.3637
-11/11 [==============================] - 0s 964us/step - loss: 4.0321
+1/11 [=>............................] - ETA: 2s - loss: 3.4629
+11/11 [==============================] - 0s 943us/step - loss: 4.0391
 ## Epoch 2/20
 ## 
-1/11 [=>............................] - ETA: 0s - loss: 3.4470
-11/11 [==============================] - 0s 932us/step - loss: 3.7883
+1/11 [=>............................] - ETA: 0s - loss: 3.5155
+11/11 [==============================] - 0s 892us/step - loss: 3.7981
 ## Epoch 3/20
 ## 
-1/11 [=>............................] - ETA: 0s - loss: 3.6557
-11/11 [==============================] - 0s 919us/step - loss: 3.5551
+1/11 [=>............................] - ETA: 0s - loss: 3.6371
+11/11 [==============================] - 0s 880us/step - loss: 3.5654
 ## Epoch 4/20
 ## 
-1/11 [=>............................] - ETA: 0s - loss: 3.0647
-11/11 [==============================] - 0s 902us/step - loss: 3.3361
+1/11 [=>............................] - ETA: 0s - loss: 3.3243
+11/11 [==============================] - 0s 897us/step - loss: 3.3467
 ## Epoch 5/20
 ## 
-1/11 [=>............................] - ETA: 0s - loss: 2.2162
-11/11 [==============================] - 0s 904us/step - loss: 3.1318
+1/11 [=>............................] - ETA: 0s - loss: 3.5013
+11/11 [==============================] - 0s 886us/step - loss: 3.1436
 ## Epoch 6/20
 ## 
-1/11 [=>............................] - ETA: 0s - loss: 3.7893
-11/11 [==============================] - 0s 899us/step - loss: 2.9391
+1/11 [=>............................] - ETA: 0s - loss: 2.7714
+11/11 [==============================] - 0s 908us/step - loss: 2.9455
 ## Epoch 7/20
 ## 
-1/11 [=>............................] - ETA: 0s - loss: 3.1412
-11/11 [==============================] - 0s 905us/step - loss: 2.7622
+1/11 [=>............................] - ETA: 0s - loss: 3.0290
+11/11 [==============================] - 0s 902us/step - loss: 2.7662
 ## Epoch 8/20
 ## 
-1/11 [=>............................] - ETA: 0s - loss: 2.7488
-11/11 [==============================] - 0s 919us/step - loss: 2.6005
+1/11 [=>............................] - ETA: 0s - loss: 2.6760
+11/11 [==============================] - 0s 881us/step - loss: 2.6074
 ## Epoch 9/20
 ## 
-1/11 [=>............................] - ETA: 0s - loss: 2.7304
-11/11 [==============================] - 0s 894us/step - loss: 2.4571
+1/11 [=>............................] - ETA: 0s - loss: 2.3547
+11/11 [==============================] - 0s 885us/step - loss: 2.4582
 ## Epoch 10/20
 ## 
-1/11 [=>............................] - ETA: 0s - loss: 3.2580
-11/11 [==============================] - 0s 911us/step - loss: 2.3275
+1/11 [=>............................] - ETA: 0s - loss: 2.0500
+11/11 [==============================] - 0s 896us/step - loss: 2.3305
 ## Epoch 11/20
 ## 
-1/11 [=>............................] - ETA: 0s - loss: 1.9137
-11/11 [==============================] - 0s 929us/step - loss: 2.2151
+1/11 [=>............................] - ETA: 0s - loss: 2.0776
+11/11 [==============================] - 0s 924us/step - loss: 2.2132
 ## Epoch 12/20
 ## 
-1/11 [=>............................] - ETA: 0s - loss: 2.0605
-11/11 [==============================] - 0s 951us/step - loss: 2.1127
+1/11 [=>............................] - ETA: 0s - loss: 1.8689
+11/11 [==============================] - 0s 918us/step - loss: 2.1143
 ## Epoch 13/20
 ## 
-1/11 [=>............................] - ETA: 0s - loss: 1.9462
-11/11 [==============================] - 0s 960us/step - loss: 2.0275
+1/11 [=>............................] - ETA: 0s - loss: 1.5654
+11/11 [==============================] - 0s 936us/step - loss: 2.0278
 ## Epoch 14/20
 ## 
-1/11 [=>............................] - ETA: 0s - loss: 1.7585
-11/11 [==============================] - 0s 918us/step - loss: 1.9550
+1/11 [=>............................] - ETA: 0s - loss: 1.9938
+11/11 [==============================] - 0s 912us/step - loss: 1.9540
 ## Epoch 15/20
 ## 
-1/11 [=>............................] - ETA: 0s - loss: 1.6617
-11/11 [==============================] - 0s 914us/step - loss: 1.8890
+1/11 [=>............................] - ETA: 0s - loss: 1.9354
+11/11 [==============================] - 0s 909us/step - loss: 1.8917
 ## Epoch 16/20
 ## 
-1/11 [=>............................] - ETA: 0s - loss: 1.6871
-11/11 [==============================] - 0s 924us/step - loss: 1.8379
+1/11 [=>............................] - ETA: 0s - loss: 1.7670
+11/11 [==============================] - 0s 920us/step - loss: 1.8379
 ## Epoch 17/20
 ## 
-1/11 [=>............................] - ETA: 0s - loss: 1.6374
-11/11 [==============================] - 0s 925us/step - loss: 1.7913
+1/11 [=>............................] - ETA: 0s - loss: 1.6599
+11/11 [==============================] - 0s 933us/step - loss: 1.7922
 ## Epoch 18/20
 ## 
-1/11 [=>............................] - ETA: 0s - loss: 1.9343
-11/11 [==============================] - 0s 941us/step - loss: 1.7521
+1/11 [=>............................] - ETA: 0s - loss: 2.1965
+11/11 [==============================] - 0s 914us/step - loss: 1.7527
 ## Epoch 19/20
 ## 
-1/11 [=>............................] - ETA: 0s - loss: 1.7206
-11/11 [==============================] - 0s 939us/step - loss: 1.7162
+1/11 [=>............................] - ETA: 0s - loss: 1.5248
+11/11 [==============================] - 0s 933us/step - loss: 1.7162
 ## Epoch 20/20
 ## 
-1/11 [=>............................] - ETA: 0s - loss: 1.9875
-11/11 [==============================] - 0s 905us/step - loss: 1.6881
+1/11 [=>............................] - ETA: 0s - loss: 1.9486 +11/11 [==============================] - 0s 927us/step - loss: 1.6877
 mr_cls_fit
## parsnip model object
@@ -2619,11 +2619,11 @@ 

## # A tibble: 5 × 4
 ##   .pred_class .pred_Biscoe .pred_Dream .pred_Torgersen
 ##   <fct>              <dbl>       <dbl>           <dbl>
-## 1 Torgersen       0.286         0.0175      0.697     
-## 2 Dream           0.000112      1.00        0.00000200
-## 3 Dream           0.318         0.474       0.207     
-## 4 Dream           0.0495        0.939       0.0113    
-## 5 Torgersen       0.304         0.0235      0.673
+## 1 Torgersen 0.285 0.0171 0.698 +## 2 Dream 0.000113 1.00 0.00000204 +## 3 Dream 0.320 0.470 0.210 +## 4 Dream 0.0502 0.938 0.0115 +## 5 Torgersen 0.303 0.0230 0.674

With the "nnet" engine

diff --git a/dev/articles/Submodels.html b/dev/articles/Submodels.html index 960e9c960..9bf07bae0 100644 --- a/dev/articles/Submodels.html +++ b/dev/articles/Submodels.html @@ -142,7 +142,7 @@ ## dplyr::filter() masks stats::filter() ## dplyr::lag() masks stats::lag() ## recipes::step() masks stats::step() -## Use tidymodels_prefer() to resolve common conflicts. +## Dig deeper into tidy modeling with R at https://www.tmwr.org
 data(attrition, package = "modeldata")
 
diff --git a/dev/favicon-16x16.png b/dev/favicon-16x16.png
index 27be507d6b84d656c51807e90917114a71b422bb..a08a04e6474d285bb468a553ecf8830536ef74aa 100644
GIT binary patch
delta 71
zcmcb?af4$+9}|ZJmnP4>OyilGr!ny{ikO5L8d#YaS{WK?8yHv_7|4BgoIZI6vn+;0
N{<@#qlV39D0|4bi6$}6X

delta 71
zcmcb?af4$+9}|Z-pCZ$`BpsQ})0p@eMT|oXO{|QKtqhE`4GgRd3=Ulv3z@uwSr$WL
NbJw~TlV39D0|3CB6!ZW9

diff --git a/dev/favicon-32x32.png b/dev/favicon-32x32.png
index 1bd76e562c3da2f84418afd1636f05a6e85faf21..95ae16fbe07e500d342e0bddc121ade37e059e4f 100644
GIT binary patch
delta 69
zcmdlWut8u#6^8_uChy8TX8w&$zt}`fLJSS8Obo3IjkOI7tPBixw0J2`p1>}PA#w8N
Lr}>i)vgZQ;v0xPP

delta 69
zcmdlWut8u#6^A&VBJ<5unXZjZzt}{KLkvx
 

parsnip (development version)

  • Fixed bug in fitting some model types with the "spark" engine (#1045).

  • -
  • Fixed issue in mlp() metadata where the stop_iter engine argument had been mistakenly protected for the "brulee" engine. (#1050)

  • +
  • Fixed issues in metadata for the "brulee" engine where several arguments were mistakenly protected. (#1050, #1054)

  • .filter_eval_time() was moved to the survival standalone file.

  • Improved errors and documentation related to special terms in formulas. See ?model_formula to learn more. (#770, #1014)

  • Improved errors in cases where the outcome column is mis-specified. (#1003)

  • diff --git a/dev/pkgdown.yml b/dev/pkgdown.yml index 6565cfd90..538eecc5c 100644 --- a/dev/pkgdown.yml +++ b/dev/pkgdown.yml @@ -5,7 +5,7 @@ articles: Examples: Examples.html Submodels: Submodels.html parsnip: parsnip.html -last_built: 2024-01-23T14:33Z +last_built: 2024-01-24T10:41Z urls: reference: https://parsnip.tidymodels.org/reference article: https://parsnip.tidymodels.org/articles diff --git a/dev/search.json b/dev/search.json index e7c874fd5..192417da0 100644 --- a/dev/search.json +++ b/dev/search.json @@ -1 +1 @@ -[{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/CODE_OF_CONDUCT.html","id":"our-pledge","dir":"","previous_headings":"","what":"Our Pledge","title":"Contributor Covenant Code of Conduct","text":"members, contributors, leaders pledge make participation community harassment-free experience everyone, regardless age, body size, visible invisible disability, ethnicity, sex characteristics, gender identity expression, level experience, education, socio-economic status, nationality, personal appearance, race, caste, color, religion, sexual identity orientation. pledge act interact ways contribute open, welcoming, diverse, inclusive, healthy community.","code":""},{"path":"https://parsnip.tidymodels.org/dev/CODE_OF_CONDUCT.html","id":"our-standards","dir":"","previous_headings":"","what":"Our Standards","title":"Contributor Covenant Code of Conduct","text":"Examples behavior contributes positive environment community include: Demonstrating empathy kindness toward people respectful differing opinions, viewpoints, experiences Giving gracefully accepting constructive feedback Accepting responsibility apologizing affected mistakes, learning experience Focusing best just us individuals, overall community Examples unacceptable behavior include: use sexualized language imagery, sexual attention advances kind Trolling, insulting derogatory comments, personal political attacks Public private harassment Publishing others’ private information, physical email address, without explicit permission conduct reasonably considered inappropriate professional setting","code":""},{"path":"https://parsnip.tidymodels.org/dev/CODE_OF_CONDUCT.html","id":"enforcement-responsibilities","dir":"","previous_headings":"","what":"Enforcement Responsibilities","title":"Contributor Covenant Code of Conduct","text":"Community leaders responsible clarifying enforcing standards acceptable behavior take appropriate fair corrective action response behavior deem inappropriate, threatening, offensive, harmful. Community leaders right responsibility remove, edit, reject comments, commits, code, wiki edits, issues, contributions aligned Code Conduct, communicate reasons moderation decisions appropriate.","code":""},{"path":"https://parsnip.tidymodels.org/dev/CODE_OF_CONDUCT.html","id":"scope","dir":"","previous_headings":"","what":"Scope","title":"Contributor Covenant Code of Conduct","text":"Code Conduct applies within community spaces, also applies individual officially representing community public spaces. Examples representing community include using official e-mail address, posting via official social media account, acting appointed representative online offline event.","code":""},{"path":"https://parsnip.tidymodels.org/dev/CODE_OF_CONDUCT.html","id":"enforcement","dir":"","previous_headings":"","what":"Enforcement","title":"Contributor Covenant Code of Conduct","text":"Instances abusive, harassing, otherwise unacceptable behavior may reported community leaders responsible enforcement codeofconduct@posit.co. complaints reviewed investigated promptly fairly. community leaders obligated respect privacy security reporter incident.","code":""},{"path":"https://parsnip.tidymodels.org/dev/CODE_OF_CONDUCT.html","id":"enforcement-guidelines","dir":"","previous_headings":"","what":"Enforcement Guidelines","title":"Contributor Covenant Code of Conduct","text":"Community leaders follow Community Impact Guidelines determining consequences action deem violation Code Conduct:","code":""},{"path":"https://parsnip.tidymodels.org/dev/CODE_OF_CONDUCT.html","id":"id_1-correction","dir":"","previous_headings":"Enforcement Guidelines","what":"1. Correction","title":"Contributor Covenant Code of Conduct","text":"Community Impact: Use inappropriate language behavior deemed unprofessional unwelcome community. Consequence: private, written warning community leaders, providing clarity around nature violation explanation behavior inappropriate. public apology may requested.","code":""},{"path":"https://parsnip.tidymodels.org/dev/CODE_OF_CONDUCT.html","id":"id_2-warning","dir":"","previous_headings":"Enforcement Guidelines","what":"2. Warning","title":"Contributor Covenant Code of Conduct","text":"Community Impact: violation single incident series actions. Consequence: warning consequences continued behavior. interaction people involved, including unsolicited interaction enforcing Code Conduct, specified period time. includes avoiding interactions community spaces well external channels like social media. Violating terms may lead temporary permanent ban.","code":""},{"path":"https://parsnip.tidymodels.org/dev/CODE_OF_CONDUCT.html","id":"id_3-temporary-ban","dir":"","previous_headings":"Enforcement Guidelines","what":"3. Temporary Ban","title":"Contributor Covenant Code of Conduct","text":"Community Impact: serious violation community standards, including sustained inappropriate behavior. Consequence: temporary ban sort interaction public communication community specified period time. public private interaction people involved, including unsolicited interaction enforcing Code Conduct, allowed period. Violating terms may lead permanent ban.","code":""},{"path":"https://parsnip.tidymodels.org/dev/CODE_OF_CONDUCT.html","id":"id_4-permanent-ban","dir":"","previous_headings":"Enforcement Guidelines","what":"4. Permanent Ban","title":"Contributor Covenant Code of Conduct","text":"Community Impact: Demonstrating pattern violation community standards, including sustained inappropriate behavior, harassment individual, aggression toward disparagement classes individuals. Consequence: permanent ban sort public interaction within community.","code":""},{"path":"https://parsnip.tidymodels.org/dev/CODE_OF_CONDUCT.html","id":"attribution","dir":"","previous_headings":"","what":"Attribution","title":"Contributor Covenant Code of Conduct","text":"Code Conduct adapted Contributor Covenant, version 2.1, available https://www.contributor-covenant.org/version/2/1/code_of_conduct.html. Community Impact Guidelines inspired [Mozilla’s code conduct enforcement ladder][https://github.com/mozilla/inclusion]. answers common questions code conduct, see FAQ https://www.contributor-covenant.org/faq. Translations available https://www.contributor-covenant.org/translations.","code":""},{"path":"https://parsnip.tidymodels.org/dev/CONTRIBUTING.html","id":null,"dir":"","previous_headings":"","what":"Contributing to tidymodels","title":"Contributing to tidymodels","text":"detailed information contributing tidymodels packages, see development contributing guide.","code":""},{"path":"https://parsnip.tidymodels.org/dev/CONTRIBUTING.html","id":"documentation","dir":"","previous_headings":"","what":"Documentation","title":"Contributing to tidymodels","text":"Typos grammatical errors documentation may edited directly using GitHub web interface, long changes made source file. YES ✅: edit roxygen comment .R file R/ directory. 🚫: edit .Rd file man/ directory. use roxygen2, Markdown syntax, documentation.","code":""},{"path":"https://parsnip.tidymodels.org/dev/CONTRIBUTING.html","id":"code","dir":"","previous_headings":"","what":"Code","title":"Contributing to tidymodels","text":"submit 🎯 pull request tidymodels package, always file issue confirm tidymodels team agrees idea happy basic proposal. tidymodels packages work together. package contains unit tests, integration tests tests using packages contained extratests. pull requests, recommend create fork repo usethis::create_from_github(), initiate new branch usethis::pr_init(). Look build status making changes. README contains badges continuous integration services used package. New code follow tidyverse style guide. can use styler package apply styles, please don’t restyle code nothing PR. user-facing changes, add bullet top NEWS.md current development version header describing changes made followed GitHub username, links relevant issue(s)/PR(s). use testthat. Contributions test cases included easier accept. contribution spans use one package, consider building extratests changes check breakages /adding new tests . Let us know PR ran extra tests.","code":""},{"path":"https://parsnip.tidymodels.org/dev/CONTRIBUTING.html","id":"code-of-conduct","dir":"","previous_headings":"Code","what":"Code of Conduct","title":"Contributing to tidymodels","text":"project released Contributor Code Conduct. contributing project, agree abide terms.","code":""},{"path":"https://parsnip.tidymodels.org/dev/LICENSE.html","id":null,"dir":"","previous_headings":"","what":"MIT License","title":"MIT License","text":"Copyright (c) 2021 parsnip authors Permission hereby granted, free charge, person obtaining copy software associated documentation files (“Software”), deal Software without restriction, including without limitation rights use, copy, modify, merge, publish, distribute, sublicense, /sell copies Software, permit persons Software furnished , subject following conditions: copyright notice permission notice shall included copies substantial portions Software. SOFTWARE PROVIDED “”, WITHOUT WARRANTY KIND, EXPRESS IMPLIED, INCLUDING LIMITED WARRANTIES MERCHANTABILITY, FITNESS PARTICULAR PURPOSE NONINFRINGEMENT. EVENT SHALL AUTHORS COPYRIGHT HOLDERS LIABLE CLAIM, DAMAGES LIABILITY, WHETHER ACTION CONTRACT, TORT OTHERWISE, ARISING , CONNECTION SOFTWARE USE DEALINGS SOFTWARE.","code":""},{"path":"https://parsnip.tidymodels.org/dev/articles/Examples.html","id":"bart-models","dir":"Articles","previous_headings":"","what":"bart() models","title":"Fitting and predicting with parsnip","text":"’ll model ridership Chicago elevated trains function 14 day lagged ridership two stations. two predictors units (rides per day/1000) need normalized. last week data used training. last week predicted model fit. can define model specific parameters: Now create model fit object: holdout data can predicted: example data two predictors outcome two classes. predictors units. can define model specific parameters: Now create model fit object: holdout data can predicted hard class predictions probabilities. ’ll bind together one tibble:","code":"library(tidymodels) ## ── Attaching packages ─────────────────────────── tidymodels 1.1.1.9000 ── ## ✔ broom 1.0.5 ✔ rsample 1.2.0 ## ✔ dials 1.2.0 ✔ tibble 3.2.1 ## ✔ dplyr 1.1.4 ✔ tidyr 1.3.0 ## ✔ infer 1.0.5 ✔ tune 1.1.2 ## ✔ modeldata 1.3.0 ✔ workflows 1.1.3 ## ✔ parsnip 1.1.1.9007 ✔ workflowsets 1.0.1 ## ✔ purrr 1.0.2 ✔ yardstick 1.3.0 ## ✔ recipes 1.0.9 ## ── Conflicts ─────────────────────────────────── tidymodels_conflicts() ── ## ✖ purrr::discard() masks scales::discard() ## ✖ dplyr::filter() masks stats::filter() ## ✖ dplyr::lag() masks stats::lag() ## ✖ recipes::step() masks stats::step() ## • Use suppressPackageStartupMessages() to eliminate package startup messages tidymodels_prefer() data(Chicago) n <- nrow(Chicago) Chicago <- Chicago %>% select(ridership, Clark_Lake, Quincy_Wells) Chicago_train <- Chicago[1:(n - 7), ] Chicago_test <- Chicago[(n - 6):n, ] bt_reg_spec <- bart(trees = 15) %>% # This model can be used for classification or regression, so set mode set_mode(\"regression\") %>% set_engine(\"dbarts\") bt_reg_spec ## BART Model Specification (regression) ## ## Main Arguments: ## trees = 15 ## ## Computational engine: dbarts set.seed(1) bt_reg_fit <- bt_reg_spec %>% fit(ridership ~ ., data = Chicago_train) bt_reg_fit ## parsnip model object ## ## ## Call: ## `NULL`() predict(bt_reg_fit, Chicago_test) ## # A tibble: 7 × 1 ## .pred ## ## 1 20.1 ## 2 20.3 ## 3 21.3 ## 4 20.2 ## 5 19.4 ## 6 7.51 ## 7 6.44 library(tidymodels) tidymodels_prefer() data(two_class_dat) data_train <- two_class_dat[-(1:10), ] data_test <- two_class_dat[ 1:10 , ] bt_cls_spec <- bart(trees = 15) %>% # This model can be used for classification or regression, so set mode set_mode(\"classification\") %>% set_engine(\"dbarts\") bt_cls_spec ## ## Call: ## NULL set.seed(1) bt_cls_fit <- bt_cls_spec %>% fit(Class ~ ., data = data_train) bt_cls_fit ## parsnip model object ## ## ## Call: ## `NULL`() bind_cols( predict(bt_cls_fit, data_test), predict(bt_cls_fit, data_test, type = \"prob\") ) ## # A tibble: 10 × 3 ## .pred_class .pred_Class1 .pred_Class2 ## ## 1 Class2 0.352 0.648 ## 2 Class1 0.823 0.177 ## 3 Class1 0.497 0.503 ## 4 Class2 0.509 0.491 ## 5 Class2 0.434 0.566 ## 6 Class2 0.185 0.815 ## 7 Class1 0.663 0.337 ## 8 Class2 0.392 0.608 ## 9 Class1 0.967 0.033 ## 10 Class2 0.095 0.905"},{"path":"https://parsnip.tidymodels.org/dev/articles/Examples.html","id":"boost_tree-models","dir":"Articles","previous_headings":"","what":"boost_tree() models","title":"Fitting and predicting with parsnip","text":"’ll model ridership Chicago elevated trains function 14 day lagged ridership two stations. two predictors units (rides per day/1000) need normalized. last week data used training. last week predicted model fit. can define model specific parameters: Now create model fit object: holdout data can predicted: example data two predictors outcome two classes. predictors units. can define model specific parameters: Now create model fit object: holdout data can predicted hard class predictions probabilities. ’ll bind together one tibble: example data two predictors outcome two classes. predictors units. can define model specific parameters: Now create model fit object: holdout data can predicted hard class predictions probabilities. ’ll bind together one tibble:","code":"library(tidymodels) tidymodels_prefer() data(Chicago) n <- nrow(Chicago) Chicago <- Chicago %>% select(ridership, Clark_Lake, Quincy_Wells) Chicago_train <- Chicago[1:(n - 7), ] Chicago_test <- Chicago[(n - 6):n, ] bt_reg_spec <- boost_tree(trees = 15) %>% # This model can be used for classification or regression, so set mode set_mode(\"regression\") %>% set_engine(\"xgboost\") bt_reg_spec ## Boosted Tree Model Specification (regression) ## ## Main Arguments: ## trees = 15 ## ## Computational engine: xgboost set.seed(1) bt_reg_fit <- bt_reg_spec %>% fit(ridership ~ ., data = Chicago_train) bt_reg_fit ## parsnip model object ## ## ##### xgb.Booster ## raw: 51.4 Kb ## call: ## xgboost::xgb.train(params = list(eta = 0.3, max_depth = 6, gamma = 0, ## colsample_bytree = 1, colsample_bynode = 1, min_child_weight = 1, ## subsample = 1), data = x$data, nrounds = 15, watchlist = x$watchlist, ## verbose = 0, nthread = 1, objective = \"reg:squarederror\") ## params (as set within xgb.train): ## eta = \"0.3\", max_depth = \"6\", gamma = \"0\", colsample_bytree = \"1\", colsample_bynode = \"1\", min_child_weight = \"1\", subsample = \"1\", nthread = \"1\", objective = \"reg:squarederror\", validate_parameters = \"TRUE\" ## xgb.attributes: ## niter ## callbacks: ## cb.evaluation.log() ## # of features: 2 ## niter: 15 ## nfeatures : 2 ## evaluation_log: ## iter training_rmse ## 1 10.481475 ## 2 7.620929 ## --- ## 14 2.551943 ## 15 2.531085 predict(bt_reg_fit, Chicago_test) ## # A tibble: 7 × 1 ## .pred ## ## 1 20.6 ## 2 20.6 ## 3 20.2 ## 4 20.6 ## 5 19.3 ## 6 7.26 ## 7 5.92 library(tidymodels) tidymodels_prefer() data(two_class_dat) data_train <- two_class_dat[-(1:10), ] data_test <- two_class_dat[ 1:10 , ] bt_cls_spec <- boost_tree(trees = 15) %>% # This model can be used for classification or regression, so set mode set_mode(\"classification\") %>% set_engine(\"xgboost\") bt_cls_spec ## Boosted Tree Model Specification (classification) ## ## Main Arguments: ## trees = 15 ## ## Computational engine: xgboost set.seed(1) bt_cls_fit <- bt_cls_spec %>% fit(Class ~ ., data = data_train) bt_cls_fit ## parsnip model object ## ## ##### xgb.Booster ## raw: 40.8 Kb ## call: ## xgboost::xgb.train(params = list(eta = 0.3, max_depth = 6, gamma = 0, ## colsample_bytree = 1, colsample_bynode = 1, min_child_weight = 1, ## subsample = 1), data = x$data, nrounds = 15, watchlist = x$watchlist, ## verbose = 0, nthread = 1, objective = \"binary:logistic\") ## params (as set within xgb.train): ## eta = \"0.3\", max_depth = \"6\", gamma = \"0\", colsample_bytree = \"1\", colsample_bynode = \"1\", min_child_weight = \"1\", subsample = \"1\", nthread = \"1\", objective = \"binary:logistic\", validate_parameters = \"TRUE\" ## xgb.attributes: ## niter ## callbacks: ## cb.evaluation.log() ## # of features: 2 ## niter: 15 ## nfeatures : 2 ## evaluation_log: ## iter training_logloss ## 1 0.5524619 ## 2 0.4730697 ## --- ## 14 0.2523133 ## 15 0.2490712 bind_cols( predict(bt_cls_fit, data_test), predict(bt_cls_fit, data_test, type = \"prob\") ) ## # A tibble: 10 × 3 ## .pred_class .pred_Class1 .pred_Class2 ## ## 1 Class2 0.220 0.780 ## 2 Class1 0.931 0.0689 ## 3 Class1 0.638 0.362 ## 4 Class1 0.815 0.185 ## 5 Class2 0.292 0.708 ## 6 Class2 0.120 0.880 ## 7 Class1 0.796 0.204 ## 8 Class2 0.392 0.608 ## 9 Class1 0.879 0.121 ## 10 Class2 0.0389 0.961 library(tidymodels) tidymodels_prefer() data(two_class_dat) data_train <- two_class_dat[-(1:10), ] data_test <- two_class_dat[ 1:10 , ] bt_cls_spec <- boost_tree(trees = 15) %>% set_mode(\"classification\") %>% set_engine(\"C5.0\") bt_cls_spec ## Boosted Tree Model Specification (classification) ## ## Main Arguments: ## trees = 15 ## ## Computational engine: C5.0 set.seed(1) bt_cls_fit <- bt_cls_spec %>% fit(Class ~ ., data = data_train) bt_cls_fit ## parsnip model object ## ## ## Call: ## C5.0.default(x = x, y = y, trials = 15, control ## = C50::C5.0Control(minCases = 2, sample = 0)) ## ## Classification Tree ## Number of samples: 781 ## Number of predictors: 2 ## ## Number of boosting iterations: 15 requested; 6 used due to early stopping ## Average tree size: 3.2 ## ## Non-standard options: attempt to group attributes bind_cols( predict(bt_cls_fit, data_test), predict(bt_cls_fit, data_test, type = \"prob\") ) ## # A tibble: 10 × 3 ## .pred_class .pred_Class1 .pred_Class2 ## ## 1 Class2 0.311 0.689 ## 2 Class1 0.863 0.137 ## 3 Class1 0.535 0.465 ## 4 Class2 0.336 0.664 ## 5 Class2 0.336 0.664 ## 6 Class2 0.137 0.863 ## 7 Class2 0.496 0.504 ## 8 Class2 0.311 0.689 ## 9 Class1 1 0 ## 10 Class2 0 1"},{"path":"https://parsnip.tidymodels.org/dev/articles/Examples.html","id":"decision_tree-models","dir":"Articles","previous_headings":"","what":"decision_tree() models","title":"Fitting and predicting with parsnip","text":"’ll model ridership Chicago elevated trains function 14 day lagged ridership two stations. two predictors units (rides per day/1000) need normalized. last week data used training. last week predicted model fit. can define model specific parameters: Now create model fit object: holdout data can predicted: example data two predictors outcome two classes. predictors units. can define model specific parameters: Now create model fit object: holdout data can predicted hard class predictions probabilities. ’ll bind together one tibble: example data two predictors outcome two classes. predictors units. can define model specific parameters: Now create model fit object: holdout data can predicted hard class predictions probabilities. ’ll bind together one tibble:","code":"library(tidymodels) tidymodels_prefer() data(Chicago) n <- nrow(Chicago) Chicago <- Chicago %>% select(ridership, Clark_Lake, Quincy_Wells) Chicago_train <- Chicago[1:(n - 7), ] Chicago_test <- Chicago[(n - 6):n, ] dt_reg_spec <- decision_tree(tree_depth = 30) %>% # This model can be used for classification or regression, so set mode set_mode(\"regression\") %>% set_engine(\"rpart\") dt_reg_spec ## Decision Tree Model Specification (regression) ## ## Main Arguments: ## tree_depth = 30 ## ## Computational engine: rpart set.seed(1) dt_reg_fit <- dt_reg_spec %>% fit(ridership ~ ., data = Chicago_train) dt_reg_fit ## parsnip model object ## ## n= 5691 ## ## node), split, n, deviance, yval ## * denotes terminal node ## ## 1) root 5691 244958.800 13.615560 ## 2) Quincy_Wells< 2.737 1721 22973.630 5.194394 ## 4) Clark_Lake< 5.07 1116 13166.830 4.260215 * ## 5) Clark_Lake>=5.07 605 7036.349 6.917607 * ## 3) Quincy_Wells>=2.737 3970 47031.540 17.266140 ## 6) Clark_Lake< 17.6965 1940 16042.090 15.418210 * ## 7) Clark_Lake>=17.6965 2030 18033.560 19.032140 * predict(dt_reg_fit, Chicago_test) ## # A tibble: 7 × 1 ## .pred ## ## 1 19.0 ## 2 19.0 ## 3 19.0 ## 4 19.0 ## 5 19.0 ## 6 6.92 ## 7 6.92 library(tidymodels) tidymodels_prefer() data(two_class_dat) data_train <- two_class_dat[-(1:10), ] data_test <- two_class_dat[ 1:10 , ] dt_cls_spec <- decision_tree(tree_depth = 30) %>% # This model can be used for classification or regression, so set mode set_mode(\"classification\") %>% set_engine(\"rpart\") dt_cls_spec ## Decision Tree Model Specification (classification) ## ## Main Arguments: ## tree_depth = 30 ## ## Computational engine: rpart set.seed(1) dt_cls_fit <- dt_cls_spec %>% fit(Class ~ ., data = data_train) dt_cls_fit ## parsnip model object ## ## n= 781 ## ## node), split, n, loss, yval, (yprob) ## * denotes terminal node ## ## 1) root 781 348 Class1 (0.5544174 0.4455826) ## 2) B< 1.495535 400 61 Class1 (0.8475000 0.1525000) * ## 3) B>=1.495535 381 94 Class2 (0.2467192 0.7532808) ## 6) B< 2.079458 191 70 Class2 (0.3664921 0.6335079) ## 12) A>=2.572663 48 13 Class1 (0.7291667 0.2708333) * ## 13) A< 2.572663 143 35 Class2 (0.2447552 0.7552448) * ## 7) B>=2.079458 190 24 Class2 (0.1263158 0.8736842) * bind_cols( predict(dt_cls_fit, data_test), predict(dt_cls_fit, data_test, type = \"prob\") ) ## # A tibble: 10 × 3 ## .pred_class .pred_Class1 .pred_Class2 ## ## 1 Class2 0.245 0.755 ## 2 Class1 0.848 0.152 ## 3 Class1 0.848 0.152 ## 4 Class1 0.729 0.271 ## 5 Class1 0.729 0.271 ## 6 Class2 0.126 0.874 ## 7 Class2 0.245 0.755 ## 8 Class2 0.245 0.755 ## 9 Class1 0.848 0.152 ## 10 Class2 0.126 0.874 library(tidymodels) tidymodels_prefer() data(two_class_dat) data_train <- two_class_dat[-(1:10), ] data_test <- two_class_dat[ 1:10 , ] dt_cls_spec <- decision_tree(min_n = 2) %>% set_mode(\"classification\") %>% set_engine(\"C5.0\") dt_cls_spec ## Decision Tree Model Specification (classification) ## ## Main Arguments: ## min_n = 2 ## ## Computational engine: C5.0 set.seed(1) dt_cls_fit <- dt_cls_spec %>% fit(Class ~ ., data = data_train) dt_cls_fit ## parsnip model object ## ## ## Call: ## C5.0.default(x = x, y = y, trials = 1, control ## = C50::C5.0Control(minCases = 2, sample = 0)) ## ## Classification Tree ## Number of samples: 781 ## Number of predictors: 2 ## ## Tree size: 4 ## ## Non-standard options: attempt to group attributes bind_cols( predict(dt_cls_fit, data_test), predict(dt_cls_fit, data_test, type = \"prob\") ) ## # A tibble: 10 × 3 ## .pred_class .pred_Class1 .pred_Class2 ## ## 1 Class2 0.233 0.767 ## 2 Class1 0.847 0.153 ## 3 Class1 0.847 0.153 ## 4 Class1 0.727 0.273 ## 5 Class1 0.727 0.273 ## 6 Class2 0.118 0.882 ## 7 Class2 0.233 0.767 ## 8 Class2 0.233 0.767 ## 9 Class1 0.847 0.153 ## 10 Class2 0.118 0.882"},{"path":"https://parsnip.tidymodels.org/dev/articles/Examples.html","id":"gen_additive_mod-models","dir":"Articles","previous_headings":"","what":"gen_additive_mod() models","title":"Fitting and predicting with parsnip","text":"’ll model ridership Chicago elevated trains function 14 day lagged ridership two stations. two predictors units (rides per day/1000) need normalized. last week data used training. last week predicted model fit. can define model specific parameters: Now create model fit object: holdout data can predicted: example data two predictors outcome two classes. predictors units. can define model specific parameters: Now create model fit object: holdout data can predicted hard class predictions probabilities. ’ll bind together one tibble:","code":"library(tidymodels) tidymodels_prefer() data(Chicago) n <- nrow(Chicago) Chicago <- Chicago %>% select(ridership, Clark_Lake, Quincy_Wells) Chicago_train <- Chicago[1:(n - 7), ] Chicago_test <- Chicago[(n - 6):n, ] gam_reg_spec <- gen_additive_mod(select_features = FALSE, adjust_deg_free = 10) %>% # This model can be used for classification or regression, so set mode set_mode(\"regression\") %>% set_engine(\"mgcv\") gam_reg_spec ## GAM Model Specification (regression) ## ## Main Arguments: ## select_features = FALSE ## adjust_deg_free = 10 ## ## Computational engine: mgcv set.seed(1) gam_reg_fit <- gam_reg_spec %>% fit(ridership ~ Clark_Lake + Quincy_Wells, data = Chicago_train) gam_reg_fit ## parsnip model object ## ## ## Family: gaussian ## Link function: identity ## ## Formula: ## ridership ~ Clark_Lake + Quincy_Wells ## Total model degrees of freedom 3 ## ## GCV score: 9.505245 predict(gam_reg_fit, Chicago_test) ## # A tibble: 7 × 1 ## .pred ## ## 1 20.3 ## 2 20.5 ## 3 20.8 ## 4 20.5 ## 5 18.8 ## 6 7.45 ## 7 7.02 library(tidymodels) tidymodels_prefer() data(two_class_dat) data_train <- two_class_dat[-(1:10), ] data_test <- two_class_dat[ 1:10 , ] gam_cls_spec <- gen_additive_mod(select_features = FALSE, adjust_deg_free = 10) %>% # This model can be used for classification or regression, so set mode set_mode(\"classification\") %>% set_engine(\"mgcv\") gam_cls_spec ## GAM Model Specification (classification) ## ## Main Arguments: ## select_features = FALSE ## adjust_deg_free = 10 ## ## Computational engine: mgcv set.seed(1) gam_cls_fit <- gam_cls_spec %>% fit(Class ~ A + B, data = data_train) gam_cls_fit ## parsnip model object ## ## ## Family: binomial ## Link function: logit ## ## Formula: ## Class ~ A + B ## Total model degrees of freedom 3 ## ## UBRE score: -0.07548008 bind_cols( predict(gam_cls_fit, data_test), predict(gam_cls_fit, data_test, type = \"prob\") ) ## # A tibble: 10 × 3 ## .pred_class .pred_Class1 .pred_Class2 ## ## 1 Class1 0.518 0.482 ## 2 Class1 0.909 0.0913 ## 3 Class1 0.648 0.352 ## 4 Class1 0.610 0.390 ## 5 Class2 0.443 0.557 ## 6 Class2 0.206 0.794 ## 7 Class1 0.708 0.292 ## 8 Class1 0.567 0.433 ## 9 Class1 0.994 0.00582 ## 10 Class2 0.108 0.892"},{"path":"https://parsnip.tidymodels.org/dev/articles/Examples.html","id":"linear_reg-models","dir":"Articles","previous_headings":"","what":"linear_reg() models","title":"Fitting and predicting with parsnip","text":"’ll model ridership Chicago elevated trains function 14 day lagged ridership two stations. two predictors units (rides per day/1000) need normalized. last week data used training. last week predicted model fit. can define model specific parameters: Now create model fit object: holdout data can predicted: ’ll model ridership Chicago elevated trains function 14 day lagged ridership two stations. two predictors units (rides per day/1000) need normalized. last week data used training. last week predicted model fit. can define model specific parameters: Now create model fit object: holdout data can predicted: ’ll model ridership Chicago elevated trains function 14 day lagged ridership two stations. two predictors units (rides per day/1000) need normalized. last week data used training. last week predicted model fit. can define model specific parameters: Now create model fit object: holdout data can predicted: ’ll model ridership Chicago elevated trains function 14 day lagged ridership two stations. two predictors units (rides per day/1000) need normalized. last week data used training. last week predicted model fit. can define model specific parameters: Now create model fit object: holdout data can predicted: ’ll model ridership Chicago elevated trains function 14 day lagged ridership two stations. two predictors units (rides per day/1000) need normalized. last week data used training. last week predicted model fit. can define model specific parameters: Now create model fit object: holdout data can predicted:","code":"library(tidymodels) tidymodels_prefer() data(Chicago) n <- nrow(Chicago) Chicago <- Chicago %>% select(ridership, Clark_Lake, Quincy_Wells) Chicago_train <- Chicago[1:(n - 7), ] Chicago_test <- Chicago[(n - 6):n, ] linreg_reg_spec <- linear_reg() %>% set_engine(\"lm\") linreg_reg_spec ## Linear Regression Model Specification (regression) ## ## Computational engine: lm set.seed(1) linreg_reg_fit <- linreg_reg_spec %>% fit(ridership ~ ., data = Chicago_train) linreg_reg_fit ## parsnip model object ## ## ## Call: ## stats::lm(formula = ridership ~ ., data = data) ## ## Coefficients: ## (Intercept) Clark_Lake Quincy_Wells ## 1.6624 0.7738 0.2557 predict(linreg_reg_fit, Chicago_test) ## # A tibble: 7 × 1 ## .pred ## ## 1 20.3 ## 2 20.5 ## 3 20.8 ## 4 20.5 ## 5 18.8 ## 6 7.45 ## 7 7.02 library(tidymodels) tidymodels_prefer() data(Chicago) n <- nrow(Chicago) Chicago <- Chicago %>% select(ridership, Clark_Lake, Quincy_Wells) Chicago_train <- Chicago[1:(n - 7), ] Chicago_test <- Chicago[(n - 6):n, ] linreg_reg_spec <- linear_reg() %>% set_engine(\"glm\") linreg_reg_spec ## Linear Regression Model Specification (regression) ## ## Computational engine: glm set.seed(1) linreg_reg_fit <- linreg_reg_spec %>% fit(ridership ~ ., data = Chicago_train) linreg_reg_fit ## parsnip model object ## ## ## Call: stats::glm(formula = ridership ~ ., family = stats::gaussian, ## data = data) ## ## Coefficients: ## (Intercept) Clark_Lake Quincy_Wells ## 1.6624 0.7738 0.2557 ## ## Degrees of Freedom: 5690 Total (i.e. Null); 5688 Residual ## Null Deviance: 245000 ## Residual Deviance: 53530 AIC: 28910 predict(linreg_reg_fit, Chicago_test) ## # A tibble: 7 × 1 ## .pred ## ## 1 20.3 ## 2 20.5 ## 3 20.8 ## 4 20.5 ## 5 18.8 ## 6 7.45 ## 7 7.02 library(tidymodels) tidymodels_prefer() data(Chicago) n <- nrow(Chicago) Chicago <- Chicago %>% select(ridership, Clark_Lake, Quincy_Wells) Chicago_train <- Chicago[1:(n - 7), ] Chicago_test <- Chicago[(n - 6):n, ] linreg_reg_spec <- linear_reg(penalty = 0.1) %>% set_engine(\"glmnet\") linreg_reg_spec ## Linear Regression Model Specification (regression) ## ## Main Arguments: ## penalty = 0.1 ## ## Computational engine: glmnet set.seed(1) linreg_reg_fit <- linreg_reg_spec %>% fit(ridership ~ ., data = Chicago_train) linreg_reg_fit ## parsnip model object ## ## ## Call: glmnet::glmnet(x = maybe_matrix(x), y = y, family = \"gaussian\") ## ## Df %Dev Lambda ## 1 0 0.00 5.7970 ## 2 1 13.25 5.2820 ## 3 1 24.26 4.8130 ## 4 1 33.40 4.3850 ## 5 1 40.98 3.9960 ## 6 1 47.28 3.6410 ## 7 1 52.51 3.3170 ## 8 1 56.85 3.0220 ## 9 1 60.45 2.7540 ## 10 1 63.44 2.5090 ## 11 1 65.92 2.2860 ## 12 1 67.99 2.0830 ## 13 1 69.70 1.8980 ## 14 1 71.12 1.7300 ## 15 1 72.30 1.5760 ## 16 2 73.29 1.4360 ## 17 2 74.11 1.3080 ## 18 2 74.80 1.1920 ## 19 2 75.37 1.0860 ## 20 2 75.84 0.9897 ## 21 2 76.23 0.9018 ## 22 2 76.56 0.8217 ## 23 2 76.83 0.7487 ## 24 2 77.05 0.6822 ## 25 2 77.24 0.6216 ## 26 2 77.39 0.5664 ## 27 2 77.52 0.5160 ## 28 2 77.63 0.4702 ## 29 2 77.72 0.4284 ## 30 2 77.79 0.3904 ## 31 2 77.85 0.3557 ## 32 2 77.90 0.3241 ## 33 2 77.94 0.2953 ## 34 2 77.98 0.2691 ## 35 2 78.01 0.2452 ## 36 2 78.03 0.2234 ## 37 2 78.05 0.2035 ## 38 2 78.07 0.1855 ## 39 2 78.08 0.1690 ## 40 2 78.09 0.1540 ## 41 2 78.10 0.1403 ## 42 2 78.11 0.1278 ## 43 2 78.12 0.1165 ## 44 2 78.12 0.1061 ## 45 2 78.13 0.0967 ## 46 2 78.13 0.0881 ## 47 2 78.13 0.0803 ## 48 2 78.14 0.0732 ## 49 2 78.14 0.0666 ## 50 2 78.14 0.0607 ## 51 2 78.14 0.0553 ## 52 2 78.14 0.0504 ## 53 2 78.14 0.0459 ## 54 2 78.15 0.0419 ## 55 2 78.15 0.0381 predict(linreg_reg_fit, Chicago_test) ## # A tibble: 7 × 1 ## .pred ## ## 1 20.2 ## 2 20.4 ## 3 20.7 ## 4 20.4 ## 5 18.7 ## 6 7.57 ## 7 7.15 library(tidymodels) tidymodels_prefer() data(Chicago) n <- nrow(Chicago) Chicago <- Chicago %>% select(ridership, Clark_Lake, Quincy_Wells) Chicago_train <- Chicago[1:(n - 7), ] Chicago_test <- Chicago[(n - 6):n, ] linreg_reg_spec <- linear_reg(penalty = 0.1) %>% set_engine(\"keras\") linreg_reg_spec ## Linear Regression Model Specification (regression) ## ## Main Arguments: ## penalty = 0.1 ## ## Computational engine: keras set.seed(1) linreg_reg_fit <- linreg_reg_spec %>% fit(ridership ~ ., data = Chicago_train) ## Epoch 1/20 ## 1/178 [..............................] - ETA: 38s - loss: 17.4307 70/178 [==========>...................] - ETA: 0s - loss: 10.6130 138/178 [======================>.......] - ETA: 0s - loss: 10.5614 178/178 [==============================] - 0s 747us/step - loss: 10.0142 ## Epoch 2/20 ## 1/178 [..............................] - ETA: 0s - loss: 8.4581 68/178 [==========>...................] - ETA: 0s - loss: 9.6488 136/178 [=====================>........] - ETA: 0s - loss: 10.2243 178/178 [==============================] - 0s 835us/step - loss: 9.9035 ## Epoch 3/20 ## 1/178 [..............................] - ETA: 0s - loss: 12.3253 69/178 [==========>...................] - ETA: 0s - loss: 9.2844 138/178 [======================>.......] - ETA: 0s - loss: 9.5535 178/178 [==============================] - 0s 743us/step - loss: 9.8282 ## Epoch 4/20 ## 1/178 [..............................] - ETA: 0s - loss: 6.8442 70/178 [==========>...................] - ETA: 0s - loss: 9.5057 139/178 [======================>.......] - ETA: 0s - loss: 10.0353 178/178 [==============================] - 0s 740us/step - loss: 9.7801 ## Epoch 5/20 ## 1/178 [..............................] - ETA: 0s - loss: 5.8707 70/178 [==========>...................] - ETA: 0s - loss: 9.3942 139/178 [======================>.......] - ETA: 0s - loss: 9.5975 178/178 [==============================] - 0s 742us/step - loss: 9.7271 ## Epoch 6/20 ## 1/178 [..............................] - ETA: 0s - loss: 14.4505 69/178 [==========>...................] - ETA: 0s - loss: 9.6746 123/178 [===================>..........] - ETA: 0s - loss: 9.7878 178/178 [==============================] - 0s 807us/step - loss: 9.6887 ## Epoch 7/20 ## 1/178 [..............................] - ETA: 0s - loss: 6.0710 70/178 [==========>...................] - ETA: 0s - loss: 8.7914 139/178 [======================>.......] - ETA: 0s - loss: 9.5653 178/178 [==============================] - 0s 736us/step - loss: 9.6613 ## Epoch 8/20 ## 1/178 [..............................] - ETA: 0s - loss: 3.1123 71/178 [==========>...................] - ETA: 0s - loss: 9.0030 141/178 [======================>.......] - ETA: 0s - loss: 9.4412 178/178 [==============================] - 0s 733us/step - loss: 9.6261 ## Epoch 9/20 ## 1/178 [..............................] - ETA: 0s - loss: 5.1978 70/178 [==========>...................] - ETA: 0s - loss: 9.6703 137/178 [======================>.......] - ETA: 0s - loss: 9.8151 178/178 [==============================] - 0s 745us/step - loss: 9.6121 ## Epoch 10/20 ## 1/178 [..............................] - ETA: 0s - loss: 9.9975 71/178 [==========>...................] - ETA: 0s - loss: 9.9440 140/178 [======================>.......] - ETA: 0s - loss: 9.2790 178/178 [==============================] - 0s 735us/step - loss: 9.5944 ## Epoch 11/20 ## 1/178 [..............................] - ETA: 0s - loss: 6.5948 70/178 [==========>...................] - ETA: 0s - loss: 10.5874 140/178 [======================>.......] - ETA: 0s - loss: 9.8003 178/178 [==============================] - 0s 734us/step - loss: 9.5830 ## Epoch 12/20 ## 1/178 [..............................] - ETA: 0s - loss: 5.6675 70/178 [==========>...................] - ETA: 0s - loss: 10.0886 140/178 [======================>.......] - ETA: 0s - loss: 10.0484 178/178 [==============================] - 0s 738us/step - loss: 9.5807 ## Epoch 13/20 ## 1/178 [..............................] - ETA: 0s - loss: 16.1351 69/178 [==========>...................] - ETA: 0s - loss: 9.5628 139/178 [======================>.......] - ETA: 0s - loss: 9.1581 178/178 [==============================] - 0s 739us/step - loss: 9.5583 ## Epoch 14/20 ## 1/178 [..............................] - ETA: 0s - loss: 3.0510 70/178 [==========>...................] - ETA: 0s - loss: 8.9868 140/178 [======================>.......] - ETA: 0s - loss: 9.0619 178/178 [==============================] - 0s 738us/step - loss: 9.5608 ## Epoch 15/20 ## 1/178 [..............................] - ETA: 0s - loss: 7.4849 70/178 [==========>...................] - ETA: 0s - loss: 9.5764 140/178 [======================>.......] - ETA: 0s - loss: 9.0276 178/178 [==============================] - 0s 734us/step - loss: 9.5466 ## Epoch 16/20 ## 1/178 [..............................] - ETA: 0s - loss: 2.8376 71/178 [==========>...................] - ETA: 0s - loss: 9.8700 140/178 [======================>.......] - ETA: 0s - loss: 9.9571 178/178 [==============================] - 0s 734us/step - loss: 9.5364 ## Epoch 17/20 ## 1/178 [..............................] - ETA: 0s - loss: 7.4454 70/178 [==========>...................] - ETA: 0s - loss: 10.0004 139/178 [======================>.......] - ETA: 0s - loss: 9.5371 178/178 [==============================] - 0s 738us/step - loss: 9.5360 ## Epoch 18/20 ## 1/178 [..............................] - ETA: 0s - loss: 20.2563 71/178 [==========>...................] - ETA: 0s - loss: 9.3077 140/178 [======================>.......] - ETA: 0s - loss: 9.4210 178/178 [==============================] - 0s 736us/step - loss: 9.5309 ## Epoch 19/20 ## 1/178 [..............................] - ETA: 0s - loss: 12.4499 70/178 [==========>...................] - ETA: 0s - loss: 10.3009 139/178 [======================>.......] - ETA: 0s - loss: 9.9400 178/178 [==============================] - 0s 740us/step - loss: 9.5303 ## Epoch 20/20 ## 1/178 [..............................] - ETA: 0s - loss: 1.9899 70/178 [==========>...................] - ETA: 0s - loss: 8.8502 139/178 [======================>.......] - ETA: 0s - loss: 9.6911 178/178 [==============================] - 0s 735us/step - loss: 9.5231 linreg_reg_fit ## parsnip model object ## ## Model: \"sequential\" ## __________________________________________________________________________ ## Layer (type) Output Shape Param # ## ========================================================================== ## dense (Dense) (None, 1) 3 ## dense_1 (Dense) (None, 1) 2 ## ========================================================================== ## Total params: 5 ## Trainable params: 5 ## Non-trainable params: 0 ## __________________________________________________________________________ predict(linreg_reg_fit, Chicago_test) ## # A tibble: 7 × 1 ## .pred ## ## 1 20.4 ## 2 20.6 ## 3 20.9 ## 4 20.6 ## 5 18.9 ## 6 7.44 ## 7 7.08 library(tidymodels) tidymodels_prefer() data(Chicago) n <- nrow(Chicago) Chicago <- Chicago %>% select(ridership, Clark_Lake, Quincy_Wells) Chicago_train <- Chicago[1:(n - 7), ] Chicago_test <- Chicago[(n - 6):n, ] linreg_reg_spec <- linear_reg() %>% set_engine(\"stan\") linreg_reg_spec ## Linear Regression Model Specification (regression) ## ## Computational engine: stan set.seed(1) linreg_reg_fit <- linreg_reg_spec %>% fit(ridership ~ ., data = Chicago_train) linreg_reg_fit ## parsnip model object ## ## stan_glm ## family: gaussian [identity] ## formula: ridership ~ . ## observations: 5691 ## predictors: 3 ## ------ ## Median MAD_SD ## (Intercept) 1.7 0.1 ## Clark_Lake 0.8 0.0 ## Quincy_Wells 0.3 0.1 ## ## Auxiliary parameter(s): ## Median MAD_SD ## sigma 3.1 0.0 ## ## ------ ## * For help interpreting the printed output see ?print.stanreg ## * For info on the priors used see ?prior_summary.stanreg predict(linreg_reg_fit, Chicago_test) ## # A tibble: 7 × 1 ## .pred ## ## 1 20.3 ## 2 20.5 ## 3 20.8 ## 4 20.5 ## 5 18.8 ## 6 7.45 ## 7 7.02"},{"path":"https://parsnip.tidymodels.org/dev/articles/Examples.html","id":"logistic_reg-models","dir":"Articles","previous_headings":"","what":"logistic_reg() models","title":"Fitting and predicting with parsnip","text":"example data two predictors outcome two classes. predictors units. can define model specific parameters: Now create model fit object: holdout data can predicted hard class predictions probabilities. ’ll bind together one tibble: example data two predictors outcome two classes. predictors units. can define model specific parameters: Now create model fit object: holdout data can predicted hard class predictions probabilities. ’ll bind together one tibble: example data two predictors outcome two classes. predictors units. can define model specific parameters: Now create model fit object: holdout data can predicted hard class predictions probabilities. ’ll bind together one tibble: example data two predictors outcome two classes. predictors units. can define model specific parameters: Now create model fit object: holdout data can predicted hard class predictions probabilities. ’ll bind together one tibble: example data two predictors outcome two classes. predictors units. can define model specific parameters: Now create model fit object: holdout data can predicted hard class predictions probabilities. ’ll bind together one tibble:","code":"library(tidymodels) tidymodels_prefer() data(two_class_dat) data_train <- two_class_dat[-(1:10), ] data_test <- two_class_dat[ 1:10 , ] logreg_cls_spec <- logistic_reg() %>% set_engine(\"glm\") logreg_cls_spec ## Logistic Regression Model Specification (classification) ## ## Computational engine: glm set.seed(1) logreg_cls_fit <- logreg_cls_spec %>% fit(Class ~ ., data = data_train) logreg_cls_fit ## parsnip model object ## ## ## Call: stats::glm(formula = Class ~ ., family = stats::binomial, data = data) ## ## Coefficients: ## (Intercept) A B ## -3.755 -1.259 3.855 ## ## Degrees of Freedom: 780 Total (i.e. Null); 778 Residual ## Null Deviance: 1073 ## Residual Deviance: 662.1 AIC: 668.1 bind_cols( predict(logreg_cls_fit, data_test), predict(logreg_cls_fit, data_test, type = \"prob\") ) ## # A tibble: 10 × 3 ## .pred_class .pred_Class1 .pred_Class2 ## ## 1 Class1 0.518 0.482 ## 2 Class1 0.909 0.0913 ## 3 Class1 0.648 0.352 ## 4 Class1 0.610 0.390 ## 5 Class2 0.443 0.557 ## 6 Class2 0.206 0.794 ## 7 Class1 0.708 0.292 ## 8 Class1 0.567 0.433 ## 9 Class1 0.994 0.00582 ## 10 Class2 0.108 0.892 library(tidymodels) tidymodels_prefer() data(two_class_dat) data_train <- two_class_dat[-(1:10), ] data_test <- two_class_dat[ 1:10 , ] logreg_cls_spec <- logistic_reg(penalty = 0.1) %>% set_engine(\"glmnet\") logreg_cls_spec ## Logistic Regression Model Specification (classification) ## ## Main Arguments: ## penalty = 0.1 ## ## Computational engine: glmnet set.seed(1) logreg_cls_fit <- logreg_cls_spec %>% fit(Class ~ ., data = data_train) logreg_cls_fit ## parsnip model object ## ## ## Call: glmnet::glmnet(x = maybe_matrix(x), y = y, family = \"binomial\") ## ## Df %Dev Lambda ## 1 0 0.00 0.308500 ## 2 1 4.76 0.281100 ## 3 1 8.75 0.256100 ## 4 1 12.13 0.233300 ## 5 1 15.01 0.212600 ## 6 1 17.50 0.193700 ## 7 1 19.64 0.176500 ## 8 1 21.49 0.160800 ## 9 1 23.10 0.146500 ## 10 1 24.49 0.133500 ## 11 1 25.71 0.121700 ## 12 1 26.76 0.110900 ## 13 1 27.67 0.101000 ## 14 1 28.46 0.092030 ## 15 1 29.15 0.083860 ## 16 1 29.74 0.076410 ## 17 1 30.25 0.069620 ## 18 1 30.70 0.063430 ## 19 1 31.08 0.057800 ## 20 1 31.40 0.052660 ## 21 1 31.68 0.047990 ## 22 1 31.92 0.043720 ## 23 1 32.13 0.039840 ## 24 2 32.70 0.036300 ## 25 2 33.50 0.033070 ## 26 2 34.18 0.030140 ## 27 2 34.78 0.027460 ## 28 2 35.29 0.025020 ## 29 2 35.72 0.022800 ## 30 2 36.11 0.020770 ## 31 2 36.43 0.018930 ## 32 2 36.71 0.017250 ## 33 2 36.96 0.015710 ## 34 2 37.16 0.014320 ## 35 2 37.34 0.013050 ## 36 2 37.49 0.011890 ## 37 2 37.62 0.010830 ## 38 2 37.73 0.009868 ## 39 2 37.82 0.008992 ## 40 2 37.90 0.008193 ## 41 2 37.97 0.007465 ## 42 2 38.02 0.006802 ## 43 2 38.07 0.006198 ## 44 2 38.11 0.005647 ## 45 2 38.15 0.005145 ## 46 2 38.18 0.004688 ## 47 2 38.20 0.004272 ## 48 2 38.22 0.003892 ## 49 2 38.24 0.003547 ## 50 2 38.25 0.003231 ## 51 2 38.26 0.002944 ## 52 2 38.27 0.002683 ## 53 2 38.28 0.002444 ## 54 2 38.29 0.002227 ## 55 2 38.29 0.002029 ## 56 2 38.30 0.001849 ## 57 2 38.30 0.001685 ## 58 2 38.31 0.001535 ## 59 2 38.31 0.001399 ## 60 2 38.31 0.001275 ## 61 2 38.31 0.001161 ## 62 2 38.32 0.001058 ## 63 2 38.32 0.000964 ## 64 2 38.32 0.000879 ## 65 2 38.32 0.000800 bind_cols( predict(logreg_cls_fit, data_test), predict(logreg_cls_fit, data_test, type = \"prob\") ) ## # A tibble: 10 × 3 ## .pred_class .pred_Class1 .pred_Class2 ## ## 1 Class1 0.530 0.470 ## 2 Class1 0.713 0.287 ## 3 Class1 0.616 0.384 ## 4 Class2 0.416 0.584 ## 5 Class2 0.417 0.583 ## 6 Class2 0.288 0.712 ## 7 Class1 0.554 0.446 ## 8 Class1 0.557 0.443 ## 9 Class1 0.820 0.180 ## 10 Class2 0.206 0.794 library(tidymodels) tidymodels_prefer() data(two_class_dat) data_train <- two_class_dat[-(1:10), ] data_test <- two_class_dat[ 1:10 , ] logreg_cls_spec <- logistic_reg(penalty = 0.1) %>% set_engine(\"keras\") logreg_cls_spec ## Logistic Regression Model Specification (classification) ## ## Main Arguments: ## penalty = 0.1 ## ## Computational engine: keras set.seed(1) logreg_cls_fit <- logreg_cls_spec %>% fit(Class ~ ., data = data_train) ## Epoch 1/20 ## 1/25 [>.............................] - ETA: 7s - loss: 0.9321 25/25 [==============================] - 0s 850us/step - loss: 0.8988 ## Epoch 2/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.9018 25/25 [==============================] - 0s 834us/step - loss: 0.8877 ## Epoch 3/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.9434 25/25 [==============================] - 0s 858us/step - loss: 0.8770 ## Epoch 4/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.8521 25/25 [==============================] - 0s 835us/step - loss: 0.8662 ## Epoch 5/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.9705 25/25 [==============================] - 0s 862us/step - loss: 0.8558 ## Epoch 6/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.8390 25/25 [==============================] - 0s 881us/step - loss: 0.8457 ## Epoch 7/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.8311 25/25 [==============================] - 0s 866us/step - loss: 0.8357 ## Epoch 8/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.7976 25/25 [==============================] - 0s 883us/step - loss: 0.8259 ## Epoch 9/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.8802 25/25 [==============================] - 0s 864us/step - loss: 0.8165 ## Epoch 10/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.7910 25/25 [==============================] - 0s 867us/step - loss: 0.8071 ## Epoch 11/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.8217 25/25 [==============================] - 0s 863us/step - loss: 0.7980 ## Epoch 12/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.8311 25/25 [==============================] - 0s 863us/step - loss: 0.7893 ## Epoch 13/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.8042 25/25 [==============================] - 0s 869us/step - loss: 0.7804 ## Epoch 14/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.7928 25/25 [==============================] - 0s 873us/step - loss: 0.7718 ## Epoch 15/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.7808 25/25 [==============================] - 0s 857us/step - loss: 0.7634 ## Epoch 16/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.7588 25/25 [==============================] - 0s 856us/step - loss: 0.7553 ## Epoch 17/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.7480 25/25 [==============================] - 0s 869us/step - loss: 0.7471 ## Epoch 18/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.7732 25/25 [==============================] - 0s 861us/step - loss: 0.7392 ## Epoch 19/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.7394 25/25 [==============================] - 0s 868us/step - loss: 0.7314 ## Epoch 20/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.6844 25/25 [==============================] - 0s 847us/step - loss: 0.7239 logreg_cls_fit ## parsnip model object ## ## Model: \"sequential_1\" ## __________________________________________________________________________ ## Layer (type) Output Shape Param # ## ========================================================================== ## dense_2 (Dense) (None, 1) 3 ## dense_3 (Dense) (None, 2) 4 ## ========================================================================== ## Total params: 7 ## Trainable params: 7 ## Non-trainable params: 0 ## __________________________________________________________________________ bind_cols( predict(logreg_cls_fit, data_test), predict(logreg_cls_fit, data_test, type = \"prob\") ) ## # A tibble: 10 × 3 ## .pred_class .pred_Class1 .pred_Class2 ## ## 1 Class1 0.509 0.491 ## 2 Class1 0.836 0.164 ## 3 Class1 0.521 0.479 ## 4 Class1 0.828 0.172 ## 5 Class1 0.638 0.362 ## 6 Class1 0.534 0.466 ## 7 Class1 0.737 0.263 ## 8 Class1 0.525 0.475 ## 9 Class1 0.989 0.0108 ## 10 Class2 0.492 0.508 library(tidymodels) tidymodels_prefer() data(two_class_dat) data_train <- two_class_dat[-(1:10), ] data_test <- two_class_dat[ 1:10 , ] logreg_cls_spec <- logistic_reg(penalty = 0.1) %>% set_engine(\"LiblineaR\") logreg_cls_spec ## Logistic Regression Model Specification (classification) ## ## Main Arguments: ## penalty = 0.1 ## ## Computational engine: LiblineaR set.seed(1) logreg_cls_fit <- logreg_cls_spec %>% fit(Class ~ ., data = data_train) logreg_cls_fit ## parsnip model object ## ## $TypeDetail ## [1] \"L2-regularized logistic regression primal (L2R_LR)\" ## ## $Type ## [1] 0 ## ## $W ## A B Bias ## [1,] 1.219818 -3.759034 3.674861 ## ## $Bias ## [1] 1 ## ## $ClassNames ## [1] Class1 Class2 ## Levels: Class1 Class2 ## ## $NbClass ## [1] 2 ## ## attr(,\"class\") ## [1] \"LiblineaR\" bind_cols( predict(logreg_cls_fit, data_test), predict(logreg_cls_fit, data_test, type = \"prob\") ) ## # A tibble: 10 × 3 ## .pred_class .pred_Class1 .pred_Class2 ## ## 1 Class1 0.517 0.483 ## 2 Class1 0.904 0.0964 ## 3 Class1 0.645 0.355 ## 4 Class1 0.604 0.396 ## 5 Class2 0.442 0.558 ## 6 Class2 0.210 0.790 ## 7 Class1 0.702 0.298 ## 8 Class1 0.565 0.435 ## 9 Class1 0.993 0.00667 ## 10 Class2 0.112 0.888 library(tidymodels) tidymodels_prefer() data(two_class_dat) data_train <- two_class_dat[-(1:10), ] data_test <- two_class_dat[ 1:10 , ] logreg_cls_spec <- logistic_reg() %>% set_engine(\"stan\") logreg_cls_spec ## Logistic Regression Model Specification (classification) ## ## Computational engine: stan set.seed(1) logreg_cls_fit <- logreg_cls_spec %>% fit(Class ~ ., data = data_train) logreg_cls_fit ## parsnip model object ## ## stan_glm ## family: binomial [logit] ## formula: Class ~ . ## observations: 781 ## predictors: 3 ## ------ ## Median MAD_SD ## (Intercept) -3.8 0.3 ## A -1.3 0.2 ## B 3.9 0.3 ## ## ------ ## * For help interpreting the printed output see ?print.stanreg ## * For info on the priors used see ?prior_summary.stanreg bind_cols( predict(logreg_cls_fit, data_test), predict(logreg_cls_fit, data_test, type = \"prob\") ) ## # A tibble: 10 × 3 ## .pred_class .pred_Class1 .pred_Class2 ## ## 1 Class1 0.518 0.482 ## 2 Class1 0.909 0.0909 ## 3 Class1 0.650 0.350 ## 4 Class1 0.609 0.391 ## 5 Class2 0.443 0.557 ## 6 Class2 0.206 0.794 ## 7 Class1 0.708 0.292 ## 8 Class1 0.568 0.432 ## 9 Class1 0.994 0.00580 ## 10 Class2 0.108 0.892"},{"path":"https://parsnip.tidymodels.org/dev/articles/Examples.html","id":"mars-models","dir":"Articles","previous_headings":"","what":"mars() models","title":"Fitting and predicting with parsnip","text":"’ll model ridership Chicago elevated trains function 14 day lagged ridership two stations. two predictors units (rides per day/1000) need normalized. last week data used training. last week predicted model fit. can define model specific parameters: Now create model fit object: holdout data can predicted: example data two predictors outcome two classes. predictors units. can define model specific parameters: Now create model fit object: holdout data can predicted hard class predictions probabilities. ’ll bind together one tibble:","code":"library(tidymodels) tidymodels_prefer() data(Chicago) n <- nrow(Chicago) Chicago <- Chicago %>% select(ridership, Clark_Lake, Quincy_Wells) Chicago_train <- Chicago[1:(n - 7), ] Chicago_test <- Chicago[(n - 6):n, ] mars_reg_spec <- mars(prod_degree = 1, prune_method = \"backward\") %>% # This model can be used for classification or regression, so set mode set_mode(\"regression\") %>% set_engine(\"earth\") mars_reg_spec ## MARS Model Specification (regression) ## ## Main Arguments: ## prod_degree = 1 ## prune_method = backward ## ## Computational engine: earth set.seed(1) mars_reg_fit <- mars_reg_spec %>% fit(ridership ~ ., data = Chicago_train) ## ## Attaching package: 'plotrix' ## The following object is masked from 'package:scales': ## ## rescale mars_reg_fit ## parsnip model object ## ## Selected 5 of 6 terms, and 2 of 2 predictors ## Termination condition: RSq changed by less than 0.001 at 6 terms ## Importance: Clark_Lake, Quincy_Wells ## Number of terms at each degree of interaction: 1 4 (additive model) ## GCV 9.085818 RSS 51543.98 GRSq 0.7889881 RSq 0.789581 predict(mars_reg_fit, Chicago_test) ## # A tibble: 7 × 1 ## .pred ## ## 1 20.4 ## 2 20.7 ## 3 21.0 ## 4 20.7 ## 5 19.0 ## 6 7.99 ## 7 6.68 library(tidymodels) tidymodels_prefer() data(two_class_dat) data_train <- two_class_dat[-(1:10), ] data_test <- two_class_dat[ 1:10 , ] mars_cls_spec <- mars(prod_degree = 1, prune_method = \"backward\") %>% # This model can be used for classification or regression, so set mode set_mode(\"classification\") %>% set_engine(\"earth\") mars_cls_spec ## MARS Model Specification (classification) ## ## Main Arguments: ## prod_degree = 1 ## prune_method = backward ## ## Computational engine: earth set.seed(1) mars_cls_fit <- mars_cls_spec %>% fit(Class ~ ., data = data_train) mars_cls_fit ## parsnip model object ## ## GLM (family binomial, link logit): ## nulldev df dev df devratio AIC iters converged ## 1073.43 780 632.723 775 0.411 644.7 5 1 ## ## Earth selected 6 of 13 terms, and 2 of 2 predictors ## Termination condition: Reached nk 21 ## Importance: B, A ## Number of terms at each degree of interaction: 1 5 (additive model) ## Earth GCV 0.1334948 RSS 101.3432 GRSq 0.461003 RSq 0.4747349 bind_cols( predict(mars_cls_fit, data_test), predict(mars_cls_fit, data_test, type = \"prob\") ) ## # A tibble: 10 × 3 ## .pred_class .pred_Class1 .pred_Class2 ## ## 1 Class2 0.332 0.668 ## 2 Class1 0.845 0.155 ## 3 Class1 0.585 0.415 ## 4 Class1 0.690 0.310 ## 5 Class2 0.483 0.517 ## 6 Class2 0.318 0.682 ## 7 Class1 0.661 0.339 ## 8 Class2 0.398 0.602 ## 9 Class1 0.990 0.00972 ## 10 Class2 0.0625 0.938"},{"path":"https://parsnip.tidymodels.org/dev/articles/Examples.html","id":"mlp-models","dir":"Articles","previous_headings":"","what":"mlp() models","title":"Fitting and predicting with parsnip","text":"’ll model ridership Chicago elevated trains function 14 day lagged ridership two stations. two predictors units (rides per day/1000) need normalized. last week data used training. last week predicted model fit. can define model specific parameters: Now create model fit object: holdout data can predicted: example data two predictors outcome two classes. predictors units. can define model specific parameters: Now create model fit object: holdout data can predicted hard class predictions probabilities. ’ll bind together one tibble: ’ll model ridership Chicago elevated trains function 14 day lagged ridership two stations. two predictors units (rides per day/1000) need normalized. last week data used training. last week predicted model fit. can define model specific parameters: Now create model fit object: holdout data can predicted: example data two predictors outcome two classes. predictors units. can define model specific parameters: Now create model fit object: holdout data can predicted hard class predictions probabilities. ’ll bind together one tibble:","code":"library(tidymodels) tidymodels_prefer() data(Chicago) n <- nrow(Chicago) Chicago <- Chicago %>% select(ridership, Clark_Lake, Quincy_Wells) Chicago_train <- Chicago[1:(n - 7), ] Chicago_test <- Chicago[(n - 6):n, ] mlp_reg_spec <- mlp(penalty = 0, epochs = 100) %>% # This model can be used for classification or regression, so set mode set_mode(\"regression\") %>% set_engine(\"nnet\") mlp_reg_spec ## Single Layer Neural Network Model Specification (regression) ## ## Main Arguments: ## penalty = 0 ## epochs = 100 ## ## Computational engine: nnet set.seed(1) mlp_reg_fit <- mlp_reg_spec %>% fit(ridership ~ ., data = Chicago_train) mlp_reg_fit ## parsnip model object ## ## a 2-5-1 network with 21 weights ## inputs: Clark_Lake Quincy_Wells ## output(s): ridership ## options were - linear output units predict(mlp_reg_fit, Chicago_test) ## # A tibble: 7 × 1 ## .pred ## ## 1 20.5 ## 2 20.8 ## 3 21.1 ## 4 20.8 ## 5 18.8 ## 6 8.09 ## 7 6.22 library(tidymodels) tidymodels_prefer() data(two_class_dat) data_train <- two_class_dat[-(1:10), ] data_test <- two_class_dat[ 1:10 , ] mlp_cls_spec <- mlp(penalty = 0, epochs = 100) %>% # This model can be used for classification or regression, so set mode set_mode(\"classification\") %>% set_engine(\"nnet\") mlp_cls_spec ## Single Layer Neural Network Model Specification (classification) ## ## Main Arguments: ## penalty = 0 ## epochs = 100 ## ## Computational engine: nnet set.seed(1) mlp_cls_fit <- mlp_cls_spec %>% fit(Class ~ ., data = data_train) mlp_cls_fit ## parsnip model object ## ## a 2-5-1 network with 21 weights ## inputs: A B ## output(s): Class ## options were - entropy fitting bind_cols( predict(mlp_cls_fit, data_test), predict(mlp_cls_fit, data_test, type = \"prob\") ) ## # A tibble: 10 × 3 ## .pred_class .pred_Class1 .pred_Class2 ## ## 1 Class2 0.364 0.636 ## 2 Class1 0.691 0.309 ## 3 Class1 0.577 0.423 ## 4 Class1 0.686 0.314 ## 5 Class2 0.466 0.534 ## 6 Class2 0.339 0.661 ## 7 Class1 0.670 0.330 ## 8 Class2 0.384 0.616 ## 9 Class1 0.692 0.308 ## 10 Class2 0.330 0.670 library(tidymodels) tidymodels_prefer() data(Chicago) n <- nrow(Chicago) Chicago <- Chicago %>% select(ridership, Clark_Lake, Quincy_Wells) Chicago_train <- Chicago[1:(n - 7), ] Chicago_test <- Chicago[(n - 6):n, ] mlp_reg_spec <- mlp(penalty = 0, epochs = 20) %>% # This model can be used for classification or regression, so set mode set_mode(\"regression\") %>% set_engine(\"keras\") mlp_reg_spec ## Single Layer Neural Network Model Specification (regression) ## ## Main Arguments: ## penalty = 0 ## epochs = 20 ## ## Computational engine: keras set.seed(1) mlp_reg_fit <- mlp_reg_spec %>% fit(ridership ~ ., data = Chicago_train) ## Epoch 1/20 ## 1/178 [..............................] - ETA: 33s - loss: 195.9154 70/178 [==========>...................] - ETA: 0s - loss: 213.1526 130/178 [====================>.........] - ETA: 0s - loss: 213.3227 178/178 [==============================] - 0s 783us/step - loss: 209.6472 ## Epoch 2/20 ## 1/178 [..............................] - ETA: 0s - loss: 199.8269 68/178 [==========>...................] - ETA: 0s - loss: 194.0793 135/178 [=====================>........] - ETA: 0s - loss: 191.9669 178/178 [==============================] - 0s 761us/step - loss: 190.0307 ## Epoch 3/20 ## 1/178 [..............................] - ETA: 0s - loss: 158.2600 68/178 [==========>...................] - ETA: 0s - loss: 182.6862 135/178 [=====================>........] - ETA: 0s - loss: 183.1960 178/178 [==============================] - 0s 761us/step - loss: 180.1448 ## Epoch 4/20 ## 1/178 [..............................] - ETA: 0s - loss: 215.8296 67/178 [==========>...................] - ETA: 0s - loss: 173.9482 134/178 [=====================>........] - ETA: 0s - loss: 172.0008 178/178 [==============================] - 0s 767us/step - loss: 171.5592 ## Epoch 5/20 ## 1/178 [..............................] - ETA: 0s - loss: 215.5383 68/178 [==========>...................] - ETA: 0s - loss: 167.2482 135/178 [=====================>........] - ETA: 0s - loss: 164.8201 178/178 [==============================] - 0s 766us/step - loss: 163.5937 ## Epoch 6/20 ## 1/178 [..............................] - ETA: 0s - loss: 135.6939 68/178 [==========>...................] - ETA: 0s - loss: 155.2889 135/178 [=====================>........] - ETA: 0s - loss: 156.1454 178/178 [==============================] - 0s 762us/step - loss: 156.0912 ## Epoch 7/20 ## 1/178 [..............................] - ETA: 0s - loss: 144.8098 68/178 [==========>...................] - ETA: 0s - loss: 152.5993 135/178 [=====================>........] - ETA: 0s - loss: 151.6861 178/178 [==============================] - 0s 765us/step - loss: 148.9920 ## Epoch 8/20 ## 1/178 [..............................] - ETA: 0s - loss: 107.1714 68/178 [==========>...................] - ETA: 0s - loss: 144.1254 135/178 [=====================>........] - ETA: 0s - loss: 142.7666 178/178 [==============================] - 0s 763us/step - loss: 142.2492 ## Epoch 9/20 ## 1/178 [..............................] - ETA: 0s - loss: 128.3298 68/178 [==========>...................] - ETA: 0s - loss: 139.5679 135/178 [=====================>........] - ETA: 0s - loss: 136.7252 178/178 [==============================] - 0s 762us/step - loss: 135.8328 ## Epoch 10/20 ## 1/178 [..............................] - ETA: 0s - loss: 150.5847 68/178 [==========>...................] - ETA: 0s - loss: 126.4342 135/178 [=====================>........] - ETA: 0s - loss: 129.5998 178/178 [==============================] - 0s 760us/step - loss: 129.7127 ## Epoch 11/20 ## 1/178 [..............................] - ETA: 0s - loss: 114.1486 67/178 [==========>...................] - ETA: 0s - loss: 125.5023 134/178 [=====================>........] - ETA: 0s - loss: 125.7385 178/178 [==============================] - 0s 767us/step - loss: 123.8771 ## Epoch 12/20 ## 1/178 [..............................] - ETA: 0s - loss: 90.9601 68/178 [==========>...................] - ETA: 0s - loss: 120.7318 134/178 [=====================>........] - ETA: 0s - loss: 119.0738 178/178 [==============================] - 0s 767us/step - loss: 118.3144 ## Epoch 13/20 ## 1/178 [..............................] - ETA: 0s - loss: 106.5899 68/178 [==========>...................] - ETA: 0s - loss: 114.7508 135/178 [=====================>........] - ETA: 0s - loss: 113.2992 178/178 [==============================] - 0s 763us/step - loss: 113.0014 ## Epoch 14/20 ## 1/178 [..............................] - ETA: 0s - loss: 118.1168 68/178 [==========>...................] - ETA: 0s - loss: 109.2904 134/178 [=====================>........] - ETA: 0s - loss: 107.8045 178/178 [==============================] - 0s 766us/step - loss: 107.9151 ## Epoch 15/20 ## 1/178 [..............................] - ETA: 0s - loss: 95.7403 67/178 [==========>...................] - ETA: 0s - loss: 102.9026 134/178 [=====================>........] - ETA: 0s - loss: 103.2705 178/178 [==============================] - 0s 766us/step - loss: 103.0387 ## Epoch 16/20 ## 1/178 [..............................] - ETA: 0s - loss: 93.6622 65/178 [=========>....................] - ETA: 0s - loss: 100.8347 132/178 [=====================>........] - ETA: 0s - loss: 98.9483 178/178 [==============================] - 0s 777us/step - loss: 98.3335 ## Epoch 17/20 ## 1/178 [..............................] - ETA: 0s - loss: 86.5690 68/178 [==========>...................] - ETA: 0s - loss: 96.3766 134/178 [=====================>........] - ETA: 0s - loss: 94.5167 178/178 [==============================] - 0s 766us/step - loss: 93.7449 ## Epoch 18/20 ## 1/178 [..............................] - ETA: 0s - loss: 97.1584 67/178 [==========>...................] - ETA: 0s - loss: 88.8328 134/178 [=====================>........] - ETA: 0s - loss: 90.4486 178/178 [==============================] - 0s 769us/step - loss: 89.2406 ## Epoch 19/20 ## 1/178 [..............................] - ETA: 0s - loss: 78.2105 67/178 [==========>...................] - ETA: 0s - loss: 85.0607 134/178 [=====================>........] - ETA: 0s - loss: 85.4981 178/178 [==============================] - 0s 765us/step - loss: 84.8999 ## Epoch 20/20 ## 1/178 [..............................] - ETA: 0s - loss: 74.8593 68/178 [==========>...................] - ETA: 0s - loss: 80.9710 135/178 [=====================>........] - ETA: 0s - loss: 80.7750 178/178 [==============================] - 0s 763us/step - loss: 80.7053 mlp_reg_fit ## parsnip model object ## ## Model: \"sequential_2\" ## __________________________________________________________________________ ## Layer (type) Output Shape Param # ## ========================================================================== ## dense_4 (Dense) (None, 5) 15 ## dense_5 (Dense) (None, 1) 6 ## ========================================================================== ## Total params: 21 ## Trainable params: 21 ## Non-trainable params: 0 ## __________________________________________________________________________ predict(mlp_reg_fit, Chicago_test) ## # A tibble: 7 × 1 ## .pred ## ## 1 7.50 ## 2 7.50 ## 3 7.50 ## 4 7.50 ## 5 7.50 ## 6 6.86 ## 7 6.69 library(tidymodels) tidymodels_prefer() data(two_class_dat) data_train <- two_class_dat[-(1:10), ] data_test <- two_class_dat[ 1:10 , ] mlp_cls_spec <- mlp(penalty = 0, epochs = 20) %>% # This model can be used for classification or regression, so set mode set_mode(\"classification\") %>% set_engine(\"keras\") mlp_cls_spec ## Single Layer Neural Network Model Specification (classification) ## ## Main Arguments: ## penalty = 0 ## epochs = 20 ## ## Computational engine: keras set.seed(1) mlp_cls_fit <- mlp_cls_spec %>% fit(Class ~ ., data = data_train) ## Epoch 1/20 ## 1/25 [>.............................] - ETA: 5s - loss: 0.7017 25/25 [==============================] - 0s 860us/step - loss: 0.6993 ## Epoch 2/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.6943 25/25 [==============================] - 0s 845us/step - loss: 0.6934 ## Epoch 3/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.6993 25/25 [==============================] - 0s 845us/step - loss: 0.6876 ## Epoch 4/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.6779 25/25 [==============================] - 0s 841us/step - loss: 0.6824 ## Epoch 5/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.6796 25/25 [==============================] - 0s 865us/step - loss: 0.6774 ## Epoch 6/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.6739 25/25 [==============================] - 0s 891us/step - loss: 0.6728 ## Epoch 7/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.6699 25/25 [==============================] - 0s 873us/step - loss: 0.6683 ## Epoch 8/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.6708 25/25 [==============================] - 0s 874us/step - loss: 0.6641 ## Epoch 9/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.6659 25/25 [==============================] - 0s 873us/step - loss: 0.6598 ## Epoch 10/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.6567 25/25 [==============================] - 0s 879us/step - loss: 0.6556 ## Epoch 11/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.6423 25/25 [==============================] - 0s 862us/step - loss: 0.6515 ## Epoch 12/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.6556 25/25 [==============================] - 0s 872us/step - loss: 0.6473 ## Epoch 13/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.6443 25/25 [==============================] - 0s 880us/step - loss: 0.6431 ## Epoch 14/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.6518 25/25 [==============================] - 0s 893us/step - loss: 0.6389 ## Epoch 15/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.6341 25/25 [==============================] - 0s 881us/step - loss: 0.6346 ## Epoch 16/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.6137 25/25 [==============================] - 0s 868us/step - loss: 0.6301 ## Epoch 17/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.6139 25/25 [==============================] - 0s 886us/step - loss: 0.6260 ## Epoch 18/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.6361 25/25 [==============================] - 0s 885us/step - loss: 0.6214 ## Epoch 19/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.6043 25/25 [==============================] - 0s 879us/step - loss: 0.6176 ## Epoch 20/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.6076 25/25 [==============================] - 0s 864us/step - loss: 0.6127 mlp_cls_fit ## parsnip model object ## ## Model: \"sequential_3\" ## __________________________________________________________________________ ## Layer (type) Output Shape Param # ## ========================================================================== ## dense_6 (Dense) (None, 5) 15 ## dense_7 (Dense) (None, 2) 12 ## ========================================================================== ## Total params: 27 ## Trainable params: 27 ## Non-trainable params: 0 ## __________________________________________________________________________ bind_cols( predict(mlp_cls_fit, data_test), predict(mlp_cls_fit, data_test, type = \"prob\") ) ## # A tibble: 10 × 3 ## .pred_class .pred_Class1 .pred_Class2 ## ## 1 Class1 0.538 0.462 ## 2 Class1 0.662 0.338 ## 3 Class1 0.595 0.405 ## 4 Class2 0.477 0.523 ## 5 Class2 0.474 0.526 ## 6 Class2 0.410 0.590 ## 7 Class1 0.552 0.448 ## 8 Class1 0.555 0.445 ## 9 Class1 0.745 0.255 ## 10 Class2 0.373 0.627"},{"path":"https://parsnip.tidymodels.org/dev/articles/Examples.html","id":"multinom_reg-models","dir":"Articles","previous_headings":"","what":"multinom_reg() models","title":"Fitting and predicting with parsnip","text":"’ll predict island penguins observed two variables unit (mm): bill length bill depth. can define model specific parameters: Now create model fit object: holdout data can predicted hard class predictions probabilities. ’ll bind together one tibble: ’ll predict island penguins observed two variables unit (mm): bill length bill depth. can define model specific parameters: Now create model fit object: holdout data can predicted hard class predictions probabilities. ’ll bind together one tibble: ’ll predict island penguins observed two variables unit (mm): bill length bill depth. can define model specific parameters: Now create model fit object: holdout data can predicted hard class predictions probabilities. ’ll bind together one tibble:","code":"library(tidymodels) tidymodels_prefer() data(penguins) penguins <- penguins %>% select(island, starts_with(\"bill_\")) penguins_train <- penguins[-c(21, 153, 31, 277, 1), ] penguins_test <- penguins[ c(21, 153, 31, 277, 1), ] mr_cls_spec <- multinom_reg(penalty = 0.1) %>% set_engine(\"glmnet\") mr_cls_spec ## Multinomial Regression Model Specification (classification) ## ## Main Arguments: ## penalty = 0.1 ## ## Computational engine: glmnet set.seed(1) mr_cls_fit <- mr_cls_spec %>% fit(island ~ ., data = penguins_train) mr_cls_fit ## parsnip model object ## ## ## Call: glmnet::glmnet(x = maybe_matrix(x), y = y, family = \"multinomial\") ## ## Df %Dev Lambda ## 1 0 0.00 0.31730 ## 2 1 3.43 0.28910 ## 3 1 6.30 0.26340 ## 4 1 8.74 0.24000 ## 5 1 10.83 0.21870 ## 6 1 12.62 0.19930 ## 7 1 14.17 0.18160 ## 8 1 15.51 0.16540 ## 9 1 16.67 0.15070 ## 10 1 17.68 0.13740 ## 11 1 18.56 0.12520 ## 12 2 19.93 0.11400 ## 13 2 21.31 0.10390 ## 14 2 22.50 0.09467 ## 15 2 23.52 0.08626 ## 16 2 24.40 0.07860 ## 17 2 25.16 0.07162 ## 18 2 25.81 0.06526 ## 19 2 26.37 0.05946 ## 20 2 26.86 0.05418 ## 21 2 27.27 0.04936 ## 22 2 27.63 0.04498 ## 23 2 27.94 0.04098 ## 24 2 28.21 0.03734 ## 25 2 28.44 0.03402 ## 26 2 28.63 0.03100 ## 27 2 28.80 0.02825 ## 28 2 28.94 0.02574 ## 29 2 29.06 0.02345 ## 30 2 29.17 0.02137 ## 31 2 29.26 0.01947 ## 32 2 29.33 0.01774 ## 33 2 29.39 0.01616 ## 34 2 29.45 0.01473 ## 35 2 29.49 0.01342 ## 36 2 29.53 0.01223 ## 37 2 29.56 0.01114 ## 38 2 29.59 0.01015 ## 39 2 29.61 0.00925 ## 40 2 29.63 0.00843 ## 41 2 29.65 0.00768 ## 42 2 29.67 0.00700 ## 43 2 29.68 0.00638 ## 44 2 29.69 0.00581 ## 45 2 29.70 0.00529 ## 46 2 29.71 0.00482 ## 47 2 29.71 0.00439 ## 48 2 29.72 0.00400 ## 49 2 29.72 0.00365 ## 50 2 29.73 0.00332 ## 51 2 29.73 0.00303 ## 52 2 29.74 0.00276 ## 53 2 29.74 0.00251 ## 54 2 29.74 0.00229 ## 55 2 29.75 0.00209 ## 56 2 29.75 0.00190 ## 57 2 29.75 0.00173 ## 58 2 29.75 0.00158 ## 59 2 29.75 0.00144 ## 60 2 29.75 0.00131 bind_cols( predict(mr_cls_fit, penguins_test), predict(mr_cls_fit, penguins_test, type = \"prob\") ) ## # A tibble: 5 × 4 ## .pred_class .pred_Biscoe .pred_Dream .pred_Torgersen ## ## 1 Dream 0.339 0.448 0.214 ## 2 Biscoe 0.879 0.0882 0.0331 ## 3 Biscoe 0.539 0.317 0.144 ## 4 Dream 0.403 0.435 0.162 ## 5 Dream 0.297 0.481 0.221 library(tidymodels) tidymodels_prefer() data(penguins) penguins <- penguins %>% select(island, starts_with(\"bill_\")) penguins_train <- penguins[-c(21, 153, 31, 277, 1), ] penguins_test <- penguins[ c(21, 153, 31, 277, 1), ] mr_cls_spec <- multinom_reg(penalty = 0.1) %>% set_engine(\"keras\") mr_cls_spec ## Multinomial Regression Model Specification (classification) ## ## Main Arguments: ## penalty = 0.1 ## ## Computational engine: keras set.seed(1) mr_cls_fit <- mr_cls_spec %>% fit(island ~ ., data = penguins_train) ## Epoch 1/20 ## 1/11 [=>............................] - ETA: 2s - loss: 3.3637 11/11 [==============================] - 0s 964us/step - loss: 4.0321 ## Epoch 2/20 ## 1/11 [=>............................] - ETA: 0s - loss: 3.4470 11/11 [==============================] - 0s 932us/step - loss: 3.7883 ## Epoch 3/20 ## 1/11 [=>............................] - ETA: 0s - loss: 3.6557 11/11 [==============================] - 0s 919us/step - loss: 3.5551 ## Epoch 4/20 ## 1/11 [=>............................] - ETA: 0s - loss: 3.0647 11/11 [==============================] - 0s 902us/step - loss: 3.3361 ## Epoch 5/20 ## 1/11 [=>............................] - ETA: 0s - loss: 2.2162 11/11 [==============================] - 0s 904us/step - loss: 3.1318 ## Epoch 6/20 ## 1/11 [=>............................] - ETA: 0s - loss: 3.7893 11/11 [==============================] - 0s 899us/step - loss: 2.9391 ## Epoch 7/20 ## 1/11 [=>............................] - ETA: 0s - loss: 3.1412 11/11 [==============================] - 0s 905us/step - loss: 2.7622 ## Epoch 8/20 ## 1/11 [=>............................] - ETA: 0s - loss: 2.7488 11/11 [==============================] - 0s 919us/step - loss: 2.6005 ## Epoch 9/20 ## 1/11 [=>............................] - ETA: 0s - loss: 2.7304 11/11 [==============================] - 0s 894us/step - loss: 2.4571 ## Epoch 10/20 ## 1/11 [=>............................] - ETA: 0s - loss: 3.2580 11/11 [==============================] - 0s 911us/step - loss: 2.3275 ## Epoch 11/20 ## 1/11 [=>............................] - ETA: 0s - loss: 1.9137 11/11 [==============================] - 0s 929us/step - loss: 2.2151 ## Epoch 12/20 ## 1/11 [=>............................] - ETA: 0s - loss: 2.0605 11/11 [==============================] - 0s 951us/step - loss: 2.1127 ## Epoch 13/20 ## 1/11 [=>............................] - ETA: 0s - loss: 1.9462 11/11 [==============================] - 0s 960us/step - loss: 2.0275 ## Epoch 14/20 ## 1/11 [=>............................] - ETA: 0s - loss: 1.7585 11/11 [==============================] - 0s 918us/step - loss: 1.9550 ## Epoch 15/20 ## 1/11 [=>............................] - ETA: 0s - loss: 1.6617 11/11 [==============================] - 0s 914us/step - loss: 1.8890 ## Epoch 16/20 ## 1/11 [=>............................] - ETA: 0s - loss: 1.6871 11/11 [==============================] - 0s 924us/step - loss: 1.8379 ## Epoch 17/20 ## 1/11 [=>............................] - ETA: 0s - loss: 1.6374 11/11 [==============================] - 0s 925us/step - loss: 1.7913 ## Epoch 18/20 ## 1/11 [=>............................] - ETA: 0s - loss: 1.9343 11/11 [==============================] - 0s 941us/step - loss: 1.7521 ## Epoch 19/20 ## 1/11 [=>............................] - ETA: 0s - loss: 1.7206 11/11 [==============================] - 0s 939us/step - loss: 1.7162 ## Epoch 20/20 ## 1/11 [=>............................] - ETA: 0s - loss: 1.9875 11/11 [==============================] - 0s 905us/step - loss: 1.6881 mr_cls_fit ## parsnip model object ## ## Model: \"sequential_4\" ## __________________________________________________________________________ ## Layer (type) Output Shape Param # ## ========================================================================== ## dense_8 (Dense) (None, 1) 3 ## dense_9 (Dense) (None, 3) 6 ## ========================================================================== ## Total params: 9 ## Trainable params: 9 ## Non-trainable params: 0 ## __________________________________________________________________________ bind_cols( predict(mr_cls_fit, penguins_test), predict(mr_cls_fit, penguins_test, type = \"prob\") ) ## # A tibble: 5 × 4 ## .pred_class .pred_Biscoe .pred_Dream .pred_Torgersen ## ## 1 Torgersen 0.286 0.0175 0.697 ## 2 Dream 0.000112 1.00 0.00000200 ## 3 Dream 0.318 0.474 0.207 ## 4 Dream 0.0495 0.939 0.0113 ## 5 Torgersen 0.304 0.0235 0.673 library(tidymodels) tidymodels_prefer() data(penguins) penguins <- penguins %>% select(island, starts_with(\"bill_\")) penguins_train <- penguins[-c(21, 153, 31, 277, 1), ] penguins_test <- penguins[ c(21, 153, 31, 277, 1), ] mr_cls_spec <- multinom_reg(penalty = 0.1) %>% set_engine(\"nnet\") mr_cls_spec ## Multinomial Regression Model Specification (classification) ## ## Main Arguments: ## penalty = 0.1 ## ## Computational engine: nnet set.seed(1) mr_cls_fit <- mr_cls_spec %>% fit(island ~ ., data = penguins_train) mr_cls_fit ## parsnip model object ## ## Call: ## nnet::multinom(formula = island ~ ., data = data, decay = ~0.1, ## trace = FALSE) ## ## Coefficients: ## (Intercept) bill_length_mm bill_depth_mm ## Dream -8.243575 -0.0580960 0.6168318 ## Torgersen -1.610588 -0.2789588 0.6978480 ## ## Residual Deviance: 502.5009 ## AIC: 514.5009 bind_cols( predict(mr_cls_fit, penguins_test), predict(mr_cls_fit, penguins_test, type = \"prob\") ) ## # A tibble: 5 × 4 ## .pred_class .pred_Biscoe .pred_Dream .pred_Torgersen ## ## 1 Dream 0.193 0.450 0.357 ## 2 Biscoe 0.937 0.0582 0.00487 ## 3 Biscoe 0.462 0.364 0.174 ## 4 Dream 0.450 0.495 0.0556 ## 5 Dream 0.183 0.506 0.311"},{"path":"https://parsnip.tidymodels.org/dev/articles/Examples.html","id":"nearest_neighbor-models","dir":"Articles","previous_headings":"","what":"nearest_neighbor() models","title":"Fitting and predicting with parsnip","text":"’ll model ridership Chicago elevated trains function 14 day lagged ridership two stations. two predictors units (rides per day/1000) need normalized. last week data used training. last week predicted model fit. can define model specific parameters: Now create model fit object: holdout data can predicted: example data two predictors outcome two classes. predictors units. Since two classes, ’ll use odd number neighbors avoid ties: Now create model fit object: holdout data can predicted hard class predictions probabilities. ’ll bind together one tibble:","code":"library(tidymodels) tidymodels_prefer() data(Chicago) n <- nrow(Chicago) Chicago <- Chicago %>% select(ridership, Clark_Lake, Quincy_Wells) Chicago_train <- Chicago[1:(n - 7), ] Chicago_test <- Chicago[(n - 6):n, ] knn_reg_spec <- nearest_neighbor(neighbors = 5, weight_func = \"triangular\") %>% # This model can be used for classification or regression, so set mode set_mode(\"regression\") %>% set_engine(\"kknn\") knn_reg_spec ## K-Nearest Neighbor Model Specification (regression) ## ## Main Arguments: ## neighbors = 5 ## weight_func = triangular ## ## Computational engine: kknn knn_reg_fit <- knn_reg_spec %>% fit(ridership ~ ., data = Chicago_train) knn_reg_fit ## parsnip model object ## ## ## Call: ## kknn::train.kknn(formula = ridership ~ ., data = data, ks = min_rows(5, data, 5), kernel = ~\"triangular\") ## ## Type of response variable: continuous ## minimal mean absolute error: 1.79223 ## Minimal mean squared error: 11.21809 ## Best kernel: triangular ## Best k: 5 predict(knn_reg_fit, Chicago_test) ## # A tibble: 7 × 1 ## .pred ## ## 1 20.5 ## 2 21.1 ## 3 21.4 ## 4 21.8 ## 5 19.5 ## 6 7.83 ## 7 5.54 library(tidymodels) tidymodels_prefer() data(two_class_dat) data_train <- two_class_dat[-(1:10), ] data_test <- two_class_dat[ 1:10 , ] knn_cls_spec <- nearest_neighbor(neighbors = 11, weight_func = \"triangular\") %>% # This model can be used for classification or regression, so set mode set_mode(\"classification\") %>% set_engine(\"kknn\") knn_cls_spec ## K-Nearest Neighbor Model Specification (classification) ## ## Main Arguments: ## neighbors = 11 ## weight_func = triangular ## ## Computational engine: kknn knn_cls_fit <- knn_cls_spec %>% fit(Class ~ ., data = data_train) knn_cls_fit ## parsnip model object ## ## ## Call: ## kknn::train.kknn(formula = Class ~ ., data = data, ks = min_rows(11, data, 5), kernel = ~\"triangular\") ## ## Type of response variable: nominal ## Minimal misclassification: 0.1869398 ## Best kernel: triangular ## Best k: 11 bind_cols( predict(knn_cls_fit, data_test), predict(knn_cls_fit, data_test, type = \"prob\") ) ## # A tibble: 10 × 3 ## .pred_class .pred_Class1 .pred_Class2 ## ## 1 Class2 0.177 0.823 ## 2 Class1 0.995 0.00515 ## 3 Class1 0.590 0.410 ## 4 Class1 0.770 0.230 ## 5 Class2 0.333 0.667 ## 6 Class2 0.182 0.818 ## 7 Class1 0.692 0.308 ## 8 Class2 0.400 0.600 ## 9 Class1 0.814 0.186 ## 10 Class2 0.0273 0.973"},{"path":"https://parsnip.tidymodels.org/dev/articles/Examples.html","id":"rand_forest-models","dir":"Articles","previous_headings":"","what":"rand_forest() models","title":"Fitting and predicting with parsnip","text":"’ll model ridership Chicago elevated trains function 14 day lagged ridership two stations. two predictors units (rides per day/1000) need normalized. last week data used training. last week predicted model fit. can define model specific parameters: Now create model fit object: holdout data can predicted: example data two predictors outcome two classes. predictors units. can define model specific parameters: Now create model fit object: holdout data can predicted hard class predictions probabilities. ’ll bind together one tibble: ’ll model ridership Chicago elevated trains function 14 day lagged ridership two stations. two predictors units (rides per day/1000) need normalized. last week data used training. last week predicted model fit. can define model specific parameters: Now create model fit object: holdout data can predicted: example data two predictors outcome two classes. predictors units. can define model specific parameters: Now create model fit object: holdout data can predicted hard class predictions probabilities. ’ll bind together one tibble:","code":"library(tidymodels) tidymodels_prefer() data(Chicago) n <- nrow(Chicago) Chicago <- Chicago %>% select(ridership, Clark_Lake, Quincy_Wells) Chicago_train <- Chicago[1:(n - 7), ] Chicago_test <- Chicago[(n - 6):n, ] rf_reg_spec <- rand_forest(trees = 200, min_n = 5) %>% # This model can be used for classification or regression, so set mode set_mode(\"regression\") %>% set_engine(\"ranger\") rf_reg_spec ## Random Forest Model Specification (regression) ## ## Main Arguments: ## trees = 200 ## min_n = 5 ## ## Computational engine: ranger set.seed(1) rf_reg_fit <- rf_reg_spec %>% fit(ridership ~ ., data = Chicago_train) rf_reg_fit ## parsnip model object ## ## Ranger result ## ## Call: ## ranger::ranger(x = maybe_data_frame(x), y = y, num.trees = ~200, min.node.size = min_rows(~5, x), num.threads = 1, verbose = FALSE, seed = sample.int(10^5, 1)) ## ## Type: Regression ## Number of trees: 200 ## Sample size: 5691 ## Number of independent variables: 2 ## Mtry: 1 ## Target node size: 5 ## Variable importance mode: none ## Splitrule: variance ## OOB prediction error (MSE): 9.72953 ## R squared (OOB): 0.7739986 predict(rf_reg_fit, Chicago_test) ## # A tibble: 7 × 1 ## .pred ## ## 1 20.4 ## 2 21.5 ## 3 20.8 ## 4 21.6 ## 5 19.4 ## 6 7.32 ## 7 6.03 library(tidymodels) tidymodels_prefer() data(two_class_dat) data_train <- two_class_dat[-(1:10), ] data_test <- two_class_dat[ 1:10 , ] rf_cls_spec <- rand_forest(trees = 200, min_n = 5) %>% # This model can be used for classification or regression, so set mode set_mode(\"classification\") %>% set_engine(\"ranger\") rf_cls_spec ## Random Forest Model Specification (classification) ## ## Main Arguments: ## trees = 200 ## min_n = 5 ## ## Computational engine: ranger set.seed(1) rf_cls_fit <- rf_cls_spec %>% fit(Class ~ ., data = data_train) rf_cls_fit ## parsnip model object ## ## Ranger result ## ## Call: ## ranger::ranger(x = maybe_data_frame(x), y = y, num.trees = ~200, min.node.size = min_rows(~5, x), num.threads = 1, verbose = FALSE, seed = sample.int(10^5, 1), probability = TRUE) ## ## Type: Probability estimation ## Number of trees: 200 ## Sample size: 781 ## Number of independent variables: 2 ## Mtry: 1 ## Target node size: 5 ## Variable importance mode: none ## Splitrule: gini ## OOB prediction error (Brier s.): 0.1534794 bind_cols( predict(rf_cls_fit, data_test), predict(rf_cls_fit, data_test, type = \"prob\") ) ## # A tibble: 10 × 3 ## .pred_class .pred_Class1 .pred_Class2 ## ## 1 Class2 0.274 0.725 ## 2 Class1 0.928 0.0716 ## 3 Class2 0.497 0.503 ## 4 Class1 0.703 0.297 ## 5 Class2 0.302 0.698 ## 6 Class2 0.151 0.849 ## 7 Class1 0.701 0.299 ## 8 Class1 0.592 0.409 ## 9 Class1 0.752 0.248 ## 10 Class2 0.00225 0.998 library(tidymodels) tidymodels_prefer() data(Chicago) n <- nrow(Chicago) Chicago <- Chicago %>% select(ridership, Clark_Lake, Quincy_Wells) Chicago_train <- Chicago[1:(n - 7), ] Chicago_test <- Chicago[(n - 6):n, ] rf_reg_spec <- rand_forest(trees = 200, min_n = 5) %>% # This model can be used for classification or regression, so set mode set_mode(\"regression\") %>% set_engine(\"randomForest\") rf_reg_spec ## Random Forest Model Specification (regression) ## ## Main Arguments: ## trees = 200 ## min_n = 5 ## ## Computational engine: randomForest set.seed(1) rf_reg_fit <- rf_reg_spec %>% fit(ridership ~ ., data = Chicago_train) rf_reg_fit ## parsnip model object ## ## ## Call: ## randomForest(x = maybe_data_frame(x), y = y, ntree = ~200, nodesize = min_rows(~5, x)) ## Type of random forest: regression ## Number of trees: 200 ## No. of variables tried at each split: 1 ## ## Mean of squared residuals: 9.696736 ## % Var explained: 77.47 predict(rf_reg_fit, Chicago_test) ## # A tibble: 7 × 1 ## .pred ## ## 1 20.4 ## 2 21.6 ## 3 20.9 ## 4 21.6 ## 5 19.3 ## 6 7.33 ## 7 6.16 library(tidymodels) tidymodels_prefer() data(two_class_dat) data_train <- two_class_dat[-(1:10), ] data_test <- two_class_dat[ 1:10 , ] rf_cls_spec <- rand_forest(trees = 200, min_n = 5) %>% # This model can be used for classification or regression, so set mode set_mode(\"classification\") %>% set_engine(\"randomForest\") rf_cls_spec ## Random Forest Model Specification (classification) ## ## Main Arguments: ## trees = 200 ## min_n = 5 ## ## Computational engine: randomForest set.seed(1) rf_cls_fit <- rf_cls_spec %>% fit(Class ~ ., data = data_train) rf_cls_fit ## parsnip model object ## ## ## Call: ## randomForest(x = maybe_data_frame(x), y = y, ntree = ~200, nodesize = min_rows(~5, x)) ## Type of random forest: classification ## Number of trees: 200 ## No. of variables tried at each split: 1 ## ## OOB estimate of error rate: 19.72% ## Confusion matrix: ## Class1 Class2 class.error ## Class1 363 70 0.1616628 ## Class2 84 264 0.2413793 bind_cols( predict(rf_cls_fit, data_test), predict(rf_cls_fit, data_test, type = \"prob\") ) ## # A tibble: 10 × 3 ## .pred_class .pred_Class1 .pred_Class2 ## ## 1 Class2 0.23 0.77 ## 2 Class1 0.95 0.05 ## 3 Class1 0.59 0.41 ## 4 Class1 0.75 0.25 ## 5 Class2 0.305 0.695 ## 6 Class2 0.105 0.895 ## 7 Class1 0.685 0.315 ## 8 Class1 0.63 0.37 ## 9 Class1 0.79 0.21 ## 10 Class2 0.02 0.98"},{"path":"https://parsnip.tidymodels.org/dev/articles/Examples.html","id":"svm_linear-models","dir":"Articles","previous_headings":"","what":"svm_linear() models","title":"Fitting and predicting with parsnip","text":"’ll model ridership Chicago elevated trains function 14 day lagged ridership two stations. two predictors units (rides per day/1000) need normalized. last week data used training. last week predicted model fit. can define model specific parameters: Now create model fit object: holdout data can predicted: example data two predictors outcome two classes. predictors units. can define model specific parameters: Now create model fit object: holdout data can predicted hard class predictions. ’ll model ridership Chicago elevated trains function 14 day lagged ridership two stations. two predictors units (rides per day/1000) need normalized. last week data used training. last week predicted model fit. can define model specific parameters: Now create model fit object: holdout data can predicted: example data two predictors outcome two classes. predictors units. can define model specific parameters: Now create model fit object: holdout data can predicted hard class predictions probabilities. ’ll bind together one tibble:","code":"library(tidymodels) tidymodels_prefer() data(Chicago) n <- nrow(Chicago) Chicago <- Chicago %>% select(ridership, Clark_Lake, Quincy_Wells) Chicago_train <- Chicago[1:(n - 7), ] Chicago_test <- Chicago[(n - 6):n, ] svm_reg_spec <- svm_linear(cost = 1, margin = 0.1) %>% # This model can be used for classification or regression, so set mode set_mode(\"regression\") %>% set_engine(\"LiblineaR\") svm_reg_spec ## Linear Support Vector Machine Model Specification (regression) ## ## Main Arguments: ## cost = 1 ## margin = 0.1 ## ## Computational engine: LiblineaR set.seed(1) svm_reg_fit <- svm_reg_spec %>% fit(ridership ~ ., data = Chicago_train) svm_reg_fit ## parsnip model object ## ## $TypeDetail ## [1] \"L2-regularized L2-loss support vector regression primal (L2R_L2LOSS_SVR)\" ## ## $Type ## [1] 11 ## ## $W ## Clark_Lake Quincy_Wells Bias ## [1,] 0.8277352 0.3430336 0.05042585 ## ## $Bias ## [1] 1 ## ## $NbClass ## [1] 2 ## ## attr(,\"class\") ## [1] \"LiblineaR\" predict(svm_reg_fit, Chicago_test) ## # A tibble: 7 × 1 ## .pred ## ## 1 20.6 ## 2 20.8 ## 3 21.1 ## 4 20.8 ## 5 18.9 ## 6 6.40 ## 7 5.90 library(tidymodels) tidymodels_prefer() data(two_class_dat) data_train <- two_class_dat[-(1:10), ] data_test <- two_class_dat[ 1:10 , ] svm_cls_spec <- svm_linear(cost = 1) %>% # This model can be used for classification or regression, so set mode set_mode(\"classification\") %>% set_engine(\"LiblineaR\") svm_cls_spec ## Linear Support Vector Machine Model Specification (classification) ## ## Main Arguments: ## cost = 1 ## ## Computational engine: LiblineaR set.seed(1) svm_cls_fit <- svm_cls_spec %>% fit(Class ~ ., data = data_train) svm_cls_fit ## parsnip model object ## ## $TypeDetail ## [1] \"L2-regularized L2-loss support vector classification dual (L2R_L2LOSS_SVC_DUAL)\" ## ## $Type ## [1] 1 ## ## $W ## A B Bias ## [1,] 0.4067922 -1.314783 1.321851 ## ## $Bias ## [1] 1 ## ## $ClassNames ## [1] Class1 Class2 ## Levels: Class1 Class2 ## ## $NbClass ## [1] 2 ## ## attr(,\"class\") ## [1] \"LiblineaR\" predict(svm_cls_fit, data_test) ## # A tibble: 10 × 1 ## .pred_class ## ## 1 Class1 ## 2 Class1 ## 3 Class1 ## 4 Class1 ## 5 Class2 ## 6 Class2 ## 7 Class1 ## 8 Class1 ## 9 Class1 ## 10 Class2 library(tidymodels) tidymodels_prefer() data(Chicago) n <- nrow(Chicago) Chicago <- Chicago %>% select(ridership, Clark_Lake, Quincy_Wells) Chicago_train <- Chicago[1:(n - 7), ] Chicago_test <- Chicago[(n - 6):n, ] svm_reg_spec <- svm_linear(cost = 1, margin = 0.1) %>% # This model can be used for classification or regression, so set mode set_mode(\"regression\") %>% set_engine(\"kernlab\") svm_reg_spec ## Linear Support Vector Machine Model Specification (regression) ## ## Main Arguments: ## cost = 1 ## margin = 0.1 ## ## Computational engine: kernlab set.seed(1) svm_reg_fit <- svm_reg_spec %>% fit(ridership ~ ., data = Chicago_train) ## Setting default kernel parameters svm_reg_fit ## parsnip model object ## ## Support Vector Machine object of class \"ksvm\" ## ## SV type: eps-svr (regression) ## parameter : epsilon = 0.1 cost C = 1 ## ## Linear (vanilla) kernel function. ## ## Number of Support Vectors : 2283 ## ## Objective Function Value : -825.1632 ## Training error : 0.226456 predict(svm_reg_fit, Chicago_test) ## # A tibble: 7 × 1 ## .pred ## ## 1 21.0 ## 2 21.2 ## 3 21.5 ## 4 21.2 ## 5 19.4 ## 6 6.87 ## 7 6.41 library(tidymodels) tidymodels_prefer() data(two_class_dat) data_train <- two_class_dat[-(1:10), ] data_test <- two_class_dat[ 1:10 , ] svm_cls_spec <- svm_linear(cost = 1) %>% # This model can be used for classification or regression, so set mode set_mode(\"classification\") %>% set_engine(\"kernlab\") svm_cls_spec ## Linear Support Vector Machine Model Specification (classification) ## ## Main Arguments: ## cost = 1 ## ## Computational engine: kernlab set.seed(1) svm_cls_fit <- svm_cls_spec %>% fit(Class ~ ., data = data_train) ## Setting default kernel parameters svm_cls_fit ## parsnip model object ## ## Support Vector Machine object of class \"ksvm\" ## ## SV type: C-svc (classification) ## parameter : cost C = 1 ## ## Linear (vanilla) kernel function. ## ## Number of Support Vectors : 353 ## ## Objective Function Value : -349.425 ## Training error : 0.174136 ## Probability model included. bind_cols( predict(svm_cls_fit, data_test), predict(svm_cls_fit, data_test, type = \"prob\") ) ## # A tibble: 10 × 3 ## .pred_class .pred_Class1 .pred_Class2 ## ## 1 Class1 0.517 0.483 ## 2 Class1 0.904 0.0956 ## 3 Class1 0.645 0.355 ## 4 Class1 0.610 0.390 ## 5 Class2 0.445 0.555 ## 6 Class2 0.212 0.788 ## 7 Class1 0.704 0.296 ## 8 Class1 0.565 0.435 ## 9 Class1 0.994 0.00646 ## 10 Class2 0.114 0.886"},{"path":"https://parsnip.tidymodels.org/dev/articles/Examples.html","id":"svm_poly-models","dir":"Articles","previous_headings":"","what":"svm_poly() models","title":"Fitting and predicting with parsnip","text":"’ll model ridership Chicago elevated trains function 14 day lagged ridership two stations. two predictors units (rides per day/1000) need normalized. last week data used training. last week predicted model fit. can define model specific parameters: Now create model fit object: holdout data can predicted: example data two predictors outcome two classes. predictors units. can define model specific parameters: Now create model fit object: holdout data can predicted hard class predictions probabilities. ’ll bind together one tibble:","code":"library(tidymodels) tidymodels_prefer() data(Chicago) n <- nrow(Chicago) Chicago <- Chicago %>% select(ridership, Clark_Lake, Quincy_Wells) Chicago_train <- Chicago[1:(n - 7), ] Chicago_test <- Chicago[(n - 6):n, ] svm_reg_spec <- svm_poly(cost = 1, margin = 0.1) %>% # This model can be used for classification or regression, so set mode set_mode(\"regression\") %>% set_engine(\"kernlab\") svm_reg_spec ## Polynomial Support Vector Machine Model Specification (regression) ## ## Main Arguments: ## cost = 1 ## margin = 0.1 ## ## Computational engine: kernlab set.seed(1) svm_reg_fit <- svm_reg_spec %>% fit(ridership ~ ., data = Chicago_train) ## Setting default kernel parameters svm_reg_fit ## parsnip model object ## ## Support Vector Machine object of class \"ksvm\" ## ## SV type: eps-svr (regression) ## parameter : epsilon = 0.1 cost C = 1 ## ## Polynomial kernel function. ## Hyperparameters : degree = 1 scale = 1 offset = 1 ## ## Number of Support Vectors : 2283 ## ## Objective Function Value : -825.1628 ## Training error : 0.226471 predict(svm_reg_fit, Chicago_test) ## # A tibble: 7 × 1 ## .pred ## ## 1 21.0 ## 2 21.2 ## 3 21.5 ## 4 21.2 ## 5 19.4 ## 6 6.87 ## 7 6.41 library(tidymodels) tidymodels_prefer() data(two_class_dat) data_train <- two_class_dat[-(1:10), ] data_test <- two_class_dat[ 1:10 , ] svm_cls_spec <- svm_poly(cost = 1) %>% # This model can be used for classification or regression, so set mode set_mode(\"classification\") %>% set_engine(\"kernlab\") svm_cls_spec ## Polynomial Support Vector Machine Model Specification (classification) ## ## Main Arguments: ## cost = 1 ## ## Computational engine: kernlab set.seed(1) svm_cls_fit <- svm_cls_spec %>% fit(Class ~ ., data = data_train) ## Setting default kernel parameters svm_cls_fit ## parsnip model object ## ## Support Vector Machine object of class \"ksvm\" ## ## SV type: C-svc (classification) ## parameter : cost C = 1 ## ## Polynomial kernel function. ## Hyperparameters : degree = 1 scale = 1 offset = 1 ## ## Number of Support Vectors : 353 ## ## Objective Function Value : -349.425 ## Training error : 0.174136 ## Probability model included. bind_cols( predict(svm_cls_fit, data_test), predict(svm_cls_fit, data_test, type = \"prob\") ) ## # A tibble: 10 × 3 ## .pred_class .pred_Class1 .pred_Class2 ## ## 1 Class1 0.517 0.483 ## 2 Class1 0.904 0.0956 ## 3 Class1 0.645 0.355 ## 4 Class1 0.610 0.390 ## 5 Class2 0.445 0.555 ## 6 Class2 0.212 0.788 ## 7 Class1 0.704 0.296 ## 8 Class1 0.565 0.435 ## 9 Class1 0.994 0.00646 ## 10 Class2 0.114 0.886"},{"path":"https://parsnip.tidymodels.org/dev/articles/Examples.html","id":"svm_rbf-models","dir":"Articles","previous_headings":"","what":"svm_rbf() models","title":"Fitting and predicting with parsnip","text":"’ll model ridership Chicago elevated trains function 14 day lagged ridership two stations. two predictors units (rides per day/1000) need normalized. last week data used training. last week predicted model fit. can define model specific parameters: Now create model fit object: holdout data can predicted: example data two predictors outcome two classes. predictors units. can define model specific parameters: Now create model fit object: holdout data can predicted hard class predictions probabilities. ’ll bind together one tibble:","code":"library(tidymodels) tidymodels_prefer() data(Chicago) n <- nrow(Chicago) Chicago <- Chicago %>% select(ridership, Clark_Lake, Quincy_Wells) Chicago_train <- Chicago[1:(n - 7), ] Chicago_test <- Chicago[(n - 6):n, ] svm_reg_spec <- svm_rbf(cost = 1, margin = 0.1) %>% # This model can be used for classification or regression, so set mode set_mode(\"regression\") %>% set_engine(\"kernlab\") svm_reg_spec ## Radial Basis Function Support Vector Machine Model Specification (regression) ## ## Main Arguments: ## cost = 1 ## margin = 0.1 ## ## Computational engine: kernlab set.seed(1) svm_reg_fit <- svm_reg_spec %>% fit(ridership ~ ., data = Chicago_train) svm_reg_fit ## parsnip model object ## ## Support Vector Machine object of class \"ksvm\" ## ## SV type: eps-svr (regression) ## parameter : epsilon = 0.1 cost C = 1 ## ## Gaussian Radial Basis kernel function. ## Hyperparameter : sigma = 10.8262370251485 ## ## Number of Support Vectors : 2233 ## ## Objective Function Value : -746.584 ## Training error : 0.205567 predict(svm_reg_fit, Chicago_test) ## # A tibble: 7 × 1 ## .pred ## ## 1 20.7 ## 2 21.2 ## 3 21.3 ## 4 21.1 ## 5 19.4 ## 6 6.77 ## 7 6.13 library(tidymodels) tidymodels_prefer() data(two_class_dat) data_train <- two_class_dat[-(1:10), ] data_test <- two_class_dat[ 1:10 , ] svm_cls_spec <- svm_rbf(cost = 1) %>% # This model can be used for classification or regression, so set mode set_mode(\"classification\") %>% set_engine(\"kernlab\") svm_cls_spec ## Radial Basis Function Support Vector Machine Model Specification (classification) ## ## Main Arguments: ## cost = 1 ## ## Computational engine: kernlab set.seed(1) svm_cls_fit <- svm_cls_spec %>% fit(Class ~ ., data = data_train) svm_cls_fit ## parsnip model object ## ## Support Vector Machine object of class \"ksvm\" ## ## SV type: C-svc (classification) ## parameter : cost C = 1 ## ## Gaussian Radial Basis kernel function. ## Hyperparameter : sigma = 1.63216688499952 ## ## Number of Support Vectors : 327 ## ## Objective Function Value : -294.4344 ## Training error : 0.169014 ## Probability model included. bind_cols( predict(svm_cls_fit, data_test), predict(svm_cls_fit, data_test, type = \"prob\") ) ## # A tibble: 10 × 3 ## .pred_class .pred_Class1 .pred_Class2 ## ## 1 Class2 0.238 0.762 ## 2 Class1 0.905 0.0950 ## 3 Class1 0.619 0.381 ## 4 Class1 0.879 0.121 ## 5 Class1 0.641 0.359 ## 6 Class2 0.153 0.847 ## 7 Class1 0.745 0.255 ## 8 Class2 0.313 0.687 ## 9 Class1 0.878 0.122 ## 10 Class2 0.137 0.863"},{"path":"https://parsnip.tidymodels.org/dev/articles/parsnip.html","id":"motivation","dir":"Articles","previous_headings":"","what":"Motivation","title":"Introduction to parsnip","text":"Modeling functions across different R packages can different interfaces. like try different approaches, lot syntactical minutiae remember. problem worsens move -platforms (e.g. logistic regression R’s glm versus Spark’s implementation). parsnip tries solve providing similar interfaces models. example, fitting random forest model like adjust number trees forest different argument names remember: randomForest::randomForest uses ntree, ranger::ranger uses num.trees, Spark’s sparklyr::ml_random_forest uses num_trees. Rather remembering values, common interface models can used package makes translation trees real names implementations. terminology: model type differentiates models. Example types : random forests, logistic regression, linear support vector machines, etc. mode model denotes used. Two common modes classification regression. Others include “censored regression” “risk regression” (parametric Cox PH models censored data, respectively), well unsupervised models (e.g. “clustering”). computational engine indicates actual model might fit. often R packages (randomForest ranger) might also methods outside R (e.g. Stan, Spark, others). parsnip, similar ggplot2, dplyr recipes, separates specification want actual . allows us create broader functionality modeling.","code":"library(parsnip) rf_mod <- rand_forest(trees = 2000)"},{"path":"https://parsnip.tidymodels.org/dev/articles/parsnip.html","id":"placeholders-for-parameters","dir":"Articles","previous_headings":"","what":"Placeholders for Parameters","title":"Introduction to parsnip","text":"times like change parameter default sure final value . basis model tuning use tune package. Since model executing created, types parameters can changed using tune() function. provides simple placeholder value. come handy later fit model different values mtry.","code":"tune_mtry <- rand_forest(trees = 2000, mtry = tune()) tune_mtry #> Random Forest Model Specification (unknown mode) #> #> Main Arguments: #> mtry = tune() #> trees = 2000 #> #> Computational engine: ranger"},{"path":"https://parsnip.tidymodels.org/dev/articles/parsnip.html","id":"specifying-arguments","dir":"Articles","previous_headings":"","what":"Specifying Arguments","title":"Introduction to parsnip","text":"Commonly used arguments modeling functions parameters exposed function. example, rand_forest arguments : mtry: number predictors randomly sampled split creating tree models. trees: number trees contained ensemble. min_n: minimum number data points node required node split . arguments default function : However, might arguments like change allow vary. accessible using set_engine. example, ranger option set internal random number seed. set specific value:","code":"args(rand_forest) #> function (mode = \"unknown\", engine = \"ranger\", mtry = NULL, trees = NULL, #> min_n = NULL) #> NULL rf_with_seed <- rand_forest(trees = 2000, mtry = tune(), mode = \"regression\") %>% set_engine(\"ranger\", seed = 63233) rf_with_seed #> Random Forest Model Specification (regression) #> #> Main Arguments: #> mtry = tune() #> trees = 2000 #> #> Engine-Specific Arguments: #> seed = 63233 #> #> Computational engine: ranger"},{"path":"https://parsnip.tidymodels.org/dev/articles/parsnip.html","id":"process","dir":"Articles","previous_headings":"","what":"Process","title":"Introduction to parsnip","text":"fit model, must: defined model, including mode, tune() parameters, specify computational engine. example, rf_with_seed ready fitting due tune() parameter. can set parameter’s value create model fit: , using randomForest package: Note call objects show num.trees = ~2000. tilde consequence parsnip using quosures process model specification’s arguments. Normally, function executed, function’s arguments immediately evaluated. case parsnip, model specification’s arguments ; expression captured along environment evaluated. quosure . parsnip uses expressions make model fit call evaluated. tilde call reflects argument captured using quosure.","code":"rf_with_seed %>% set_args(mtry = 4) %>% set_engine(\"ranger\") %>% fit(mpg ~ ., data = mtcars) #> parsnip model object #> #> Ranger result #> #> Call: #> ranger::ranger(x = maybe_data_frame(x), y = y, mtry = min_cols(~4, x), num.trees = ~2000, num.threads = 1, verbose = FALSE, seed = sample.int(10^5, 1)) #> #> Type: Regression #> Number of trees: 2000 #> Sample size: 32 #> Number of independent variables: 10 #> Mtry: 4 #> Target node size: 5 #> Variable importance mode: none #> Splitrule: variance #> OOB prediction error (MSE): 5.57 #> R squared (OOB): 0.847 set.seed(56982) rf_with_seed %>% set_args(mtry = 4) %>% set_engine(\"randomForest\") %>% fit(mpg ~ ., data = mtcars) #> parsnip model object #> #> #> Call: #> randomForest(x = maybe_data_frame(x), y = y, ntree = ~2000, mtry = min_cols(~4, x)) #> Type of random forest: regression #> Number of trees: 2000 #> No. of variables tried at each split: 4 #> #> Mean of squared residuals: 5.52 #> % Var explained: 84.3"},{"path":"https://parsnip.tidymodels.org/dev/authors.html","id":null,"dir":"","previous_headings":"","what":"Authors","title":"Authors and Citation","text":"Max Kuhn. Author, maintainer. Davis Vaughan. Author. Emil Hvitfeldt. Contributor. . Copyright holder, funder.","code":""},{"path":"https://parsnip.tidymodels.org/dev/authors.html","id":"citation","dir":"","previous_headings":"","what":"Citation","title":"Authors and Citation","text":"Kuhn M, Vaughan D (2024). parsnip: Common API Modeling Analysis Functions. R package version 1.1.1.9007, https://parsnip.tidymodels.org/, https://github.com/tidymodels/parsnip.","code":"@Manual{, title = {parsnip: A Common API to Modeling and Analysis Functions}, author = {Max Kuhn and Davis Vaughan}, year = {2024}, note = {R package version 1.1.1.9007, https://parsnip.tidymodels.org/}, url = {https://github.com/tidymodels/parsnip}, }"},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/index.html","id":"introduction","dir":"","previous_headings":"","what":"Introduction","title":"A Common API to Modeling and Analysis Functions","text":"goal parsnip provide tidy, unified interface models can used try range models without getting bogged syntactical minutiae underlying packages.","code":""},{"path":"https://parsnip.tidymodels.org/dev/index.html","id":"installation","dir":"","previous_headings":"","what":"Installation","title":"A Common API to Modeling and Analysis Functions","text":"","code":"# The easiest way to get parsnip is to install all of tidymodels: install.packages(\"tidymodels\") # Alternatively, install just parsnip: install.packages(\"parsnip\") # Or the development version from GitHub: # install.packages(\"pak\") pak::pak(\"tidymodels/parsnip\")"},{"path":"https://parsnip.tidymodels.org/dev/index.html","id":"getting-started","dir":"","previous_headings":"","what":"Getting started","title":"A Common API to Modeling and Analysis Functions","text":"One challenge different modeling functions available R thing can different interfaces arguments. example, fit random forest regression model, might : Note model syntax can different argument names (formats) also different. pain switch implementations. example: type model “random forest”, mode model “regression” (opposed classification, etc), computational engine name R package. goals parsnip : Separate definition model evaluation. Decouple model specification implementation (whether implementation R, spark, something else). example, user call rand_forest instead ranger::ranger specific packages. Harmonize argument names (e.g. n.trees, ntrees, trees) users need remember single name. help across model types trees argument across random forest well boosting bagging. Using example , parsnip approach : engine can easily changed. use Spark, change straightforward: Either one model specifications can fit way: list parsnip models across different CRAN packages can found https://www.tidymodels.org/find/parsnip.","code":"# From randomForest rf_1 <- randomForest( y ~ ., data = dat, mtry = 10, ntree = 2000, importance = TRUE ) # From ranger rf_2 <- ranger( y ~ ., data = dat, mtry = 10, num.trees = 2000, importance = \"impurity\" ) # From sparklyr rf_3 <- ml_random_forest( dat, intercept = FALSE, response = \"y\", features = names(dat)[names(dat) != \"y\"], col.sample.rate = 10, num.trees = 2000 ) library(parsnip) rand_forest(mtry = 10, trees = 2000) %>% set_engine(\"ranger\", importance = \"impurity\") %>% set_mode(\"regression\") #> Random Forest Model Specification (regression) #> #> Main Arguments: #> mtry = 10 #> trees = 2000 #> #> Engine-Specific Arguments: #> importance = impurity #> #> Computational engine: ranger rand_forest(mtry = 10, trees = 2000) %>% set_engine(\"spark\") %>% set_mode(\"regression\") #> Random Forest Model Specification (regression) #> #> Main Arguments: #> mtry = 10 #> trees = 2000 #> #> Computational engine: spark set.seed(192) rand_forest(mtry = 10, trees = 2000) %>% set_engine(\"ranger\", importance = \"impurity\") %>% set_mode(\"regression\") %>% fit(mpg ~ ., data = mtcars) #> parsnip model object #> #> Ranger result #> #> Call: #> ranger::ranger(x = maybe_data_frame(x), y = y, mtry = min_cols(~10, x), num.trees = ~2000, importance = ~\"impurity\", num.threads = 1, verbose = FALSE, seed = sample.int(10^5, 1)) #> #> Type: Regression #> Number of trees: 2000 #> Sample size: 32 #> Number of independent variables: 10 #> Mtry: 10 #> Target node size: 5 #> Variable importance mode: impurity #> Splitrule: variance #> OOB prediction error (MSE): 5.976917 #> R squared (OOB): 0.8354559"},{"path":"https://parsnip.tidymodels.org/dev/index.html","id":"contributing","dir":"","previous_headings":"","what":"Contributing","title":"A Common API to Modeling and Analysis Functions","text":"project released Contributor Code Conduct. contributing project, agree abide terms. questions discussions tidymodels packages, modeling, machine learning, please post RStudio Community. think encountered bug, please submit issue. Either way, learn create share reprex (minimal, reproducible example), clearly communicate code. Check details contributing guidelines tidymodels packages get help.","code":""},{"path":"https://parsnip.tidymodels.org/dev/issue_template.html","id":null,"dir":"","previous_headings":"","what":"PLEASE READ: Making a new issue for parsnip","title":"PLEASE READ: Making a new issue for parsnip","text":"Please follow template . question related specific data analysis, please include minimal reprex (reproducible example). ’ve never heard reprex , start reading “reprex”, follow advice page. Tips: good example issue: #139 Issues without reprex lower priority others. don’t want use confidential data; can blind data simulate data demonstrate issue. functions caret::twoClassSim() caret::SLC14_1() might good tools simulate data . Unless problem explicitly parallel processing, please run sequentially. Even parallel processing, please make sure runs sequentially first. Please use set.seed() ensure randomness code reproducible. Please check https://stackoverflow.com/ https://community.rstudio.com/ see someone already asked question (see: Yihui’s Rule). might need install : ready file issue, please delete parts line: < – ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ –>","code":"install.packages(c(\"reprex\", \"sessioninfo\"), repos = \"http://cran.r-project.org\")"},{"path":"https://parsnip.tidymodels.org/dev/issue_template.html","id":"the-problem","dir":"","previous_headings":"","what":"The problem","title":"PLEASE READ: Making a new issue for parsnip","text":"’m trouble … considered …","code":""},{"path":"https://parsnip.tidymodels.org/dev/issue_template.html","id":"reproducible-example","dir":"","previous_headings":"","what":"Reproducible example","title":"PLEASE READ: Making a new issue for parsnip","text":"Copy code clipboard run:","code":"reprex::reprex(si = TRUE)"},{"path":"https://parsnip.tidymodels.org/dev/reference/C5.0_train.html","id":null,"dir":"Reference","previous_headings":"","what":"Boosted trees via C5.0 — C5.0_train","title":"Boosted trees via C5.0 — C5.0_train","text":"C5.0_train wrapper C5.0() function C50 package fits tree-based models model arguments main function.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/C5.0_train.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Boosted trees via C5.0 — C5.0_train","text":"","code":"C5.0_train(x, y, weights = NULL, trials = 15, minCases = 2, sample = 0, ...)"},{"path":"https://parsnip.tidymodels.org/dev/reference/C5.0_train.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Boosted trees via C5.0 — C5.0_train","text":"x data frame matrix predictors. y factor vector 2 levels weights optional numeric vector case weights. Note data used case weights used splitting variable model (see https://www.rulequest.com/see5-info.html Quinlan's notes case weights). trials integer specifying number boosting iterations. value one indicates single model used. minCases integer smallest number samples must put least two splits. sample value (0, .999) specifies random proportion data used train model. default, samples used model training. Samples used training used evaluate accuracy model printed output. value zero means training data used. ... arguments pass.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/C5.0_train.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Boosted trees via C5.0 — C5.0_train","text":"fitted C5.0 model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/C5_rules.html","id":null,"dir":"Reference","previous_headings":"","what":"C5.0 rule-based classification models — C5_rules","title":"C5.0 rule-based classification models — C5_rules","text":"C5_rules() defines model derives feature rules tree prediction. single tree boosted ensemble can used. function can fit classification models. different ways fit model, method estimation chosen setting model engine. engine-specific pages model listed . C5.0¹² information parsnip used modeling https://www.tidymodels.org/.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/C5_rules.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"C5.0 rule-based classification models — C5_rules","text":"","code":"C5_rules(mode = \"classification\", trees = NULL, min_n = NULL, engine = \"C5.0\")"},{"path":"https://parsnip.tidymodels.org/dev/reference/C5_rules.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"C5.0 rule-based classification models — C5_rules","text":"mode single character string type model. possible value model \"classification\". trees non-negative integer (greater 100) number members ensemble. min_n integer greater zero nine minimum number data points node required node split . engine single character string specifying computational engine use fitting.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/C5_rules.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"C5.0 rule-based classification models — C5_rules","text":"C5.0 classification model extension C4.5 model Quinlan (1993). tree- rule-based versions also include boosting capabilities. C5_rules() enables version model uses series rules (see examples ). make set rules, initial C5.0 tree created flattened rules. rules pruned, simplified, ordered. Rule sets created within iteration boosting. function defines type model fit. engine specified, method fit model also defined. See set_engine() setting engine, including set engine arguments. model trained fit fit() function used data. arguments function mode engine captured quosures. pass values programmatically, use injection operator like :","code":"value <- 1 C5_rules(argument = !!value)"},{"path":"https://parsnip.tidymodels.org/dev/reference/C5_rules.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"C5.0 rule-based classification models — C5_rules","text":"Quinlan R (1993). C4.5: Programs Machine Learning. Morgan Kaufmann Publishers. https://www.tidymodels.org, Tidy Modeling R, searchable table parsnip models","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/C5_rules.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"C5.0 rule-based classification models — C5_rules","text":"","code":"show_engines(\"C5_rules\") #> # A tibble: 0 × 2 #> # ℹ 2 variables: engine , mode C5_rules() #> ! parsnip could not locate an implementation for `C5_rules` model #> specifications. #> ℹ The parsnip extension package rules implements support for this #> specification. #> ℹ Please install (if needed) and load to continue. #> C5.0 Model Specification (classification) #> #> Computational engine: C5.0 #>"},{"path":"https://parsnip.tidymodels.org/dev/reference/add_on_exports.html","id":null,"dir":"Reference","previous_headings":"","what":"Functions required for parsnip-adjacent packages — null_value","title":"Functions required for parsnip-adjacent packages — null_value","text":"functions helpful creating new packages register new model specifications.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/add_on_exports.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Functions required for parsnip-adjacent packages — null_value","text":"","code":"null_value(x) show_fit(model, eng) check_args(object) update_dot_check(...) new_model_spec( cls, args, eng_args, mode, user_specified_mode = TRUE, method, engine, user_specified_engine = TRUE ) check_final_param(x) update_main_parameters(args, param) update_engine_parameters(eng_args, fresh, ...) print_model_spec(x, cls = class(x)[1], desc = get_model_desc(cls), ...) update_spec(object, parameters, args_enquo_list, fresh, cls, ...) is_varying(x)"},{"path":"https://parsnip.tidymodels.org/dev/reference/add_rowindex.html","id":null,"dir":"Reference","previous_headings":"","what":"Add a column of row numbers to a data frame — add_rowindex","title":"Add a column of row numbers to a data frame — add_rowindex","text":"Add column row numbers data frame","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/add_rowindex.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Add a column of row numbers to a data frame — add_rowindex","text":"","code":"add_rowindex(x)"},{"path":"https://parsnip.tidymodels.org/dev/reference/add_rowindex.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Add a column of row numbers to a data frame — add_rowindex","text":"x data frame","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/add_rowindex.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Add a column of row numbers to a data frame — add_rowindex","text":"data frame column 1-based integers named .row.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/add_rowindex.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Add a column of row numbers to a data frame — add_rowindex","text":"","code":"mtcars %>% add_rowindex() #> mpg cyl disp hp drat wt qsec vs am gear carb #> Mazda RX4 21.0 6 160.0 110 3.90 2.620 16.46 0 1 4 4 #> Mazda RX4 Wag 21.0 6 160.0 110 3.90 2.875 17.02 0 1 4 4 #> Datsun 710 22.8 4 108.0 93 3.85 2.320 18.61 1 1 4 1 #> Hornet 4 Drive 21.4 6 258.0 110 3.08 3.215 19.44 1 0 3 1 #> Hornet Sportabout 18.7 8 360.0 175 3.15 3.440 17.02 0 0 3 2 #> Valiant 18.1 6 225.0 105 2.76 3.460 20.22 1 0 3 1 #> Duster 360 14.3 8 360.0 245 3.21 3.570 15.84 0 0 3 4 #> Merc 240D 24.4 4 146.7 62 3.69 3.190 20.00 1 0 4 2 #> Merc 230 22.8 4 140.8 95 3.92 3.150 22.90 1 0 4 2 #> Merc 280 19.2 6 167.6 123 3.92 3.440 18.30 1 0 4 4 #> Merc 280C 17.8 6 167.6 123 3.92 3.440 18.90 1 0 4 4 #> Merc 450SE 16.4 8 275.8 180 3.07 4.070 17.40 0 0 3 3 #> Merc 450SL 17.3 8 275.8 180 3.07 3.730 17.60 0 0 3 3 #> Merc 450SLC 15.2 8 275.8 180 3.07 3.780 18.00 0 0 3 3 #> Cadillac Fleetwood 10.4 8 472.0 205 2.93 5.250 17.98 0 0 3 4 #> Lincoln Continental 10.4 8 460.0 215 3.00 5.424 17.82 0 0 3 4 #> Chrysler Imperial 14.7 8 440.0 230 3.23 5.345 17.42 0 0 3 4 #> Fiat 128 32.4 4 78.7 66 4.08 2.200 19.47 1 1 4 1 #> Honda Civic 30.4 4 75.7 52 4.93 1.615 18.52 1 1 4 2 #> Toyota Corolla 33.9 4 71.1 65 4.22 1.835 19.90 1 1 4 1 #> Toyota Corona 21.5 4 120.1 97 3.70 2.465 20.01 1 0 3 1 #> Dodge Challenger 15.5 8 318.0 150 2.76 3.520 16.87 0 0 3 2 #> AMC Javelin 15.2 8 304.0 150 3.15 3.435 17.30 0 0 3 2 #> Camaro Z28 13.3 8 350.0 245 3.73 3.840 15.41 0 0 3 4 #> Pontiac Firebird 19.2 8 400.0 175 3.08 3.845 17.05 0 0 3 2 #> Fiat X1-9 27.3 4 79.0 66 4.08 1.935 18.90 1 1 4 1 #> Porsche 914-2 26.0 4 120.3 91 4.43 2.140 16.70 0 1 5 2 #> Lotus Europa 30.4 4 95.1 113 3.77 1.513 16.90 1 1 5 2 #> Ford Pantera L 15.8 8 351.0 264 4.22 3.170 14.50 0 1 5 4 #> Ferrari Dino 19.7 6 145.0 175 3.62 2.770 15.50 0 1 5 6 #> Maserati Bora 15.0 8 301.0 335 3.54 3.570 14.60 0 1 5 8 #> Volvo 142E 21.4 4 121.0 109 4.11 2.780 18.60 1 1 4 2 #> .row #> Mazda RX4 1 #> Mazda RX4 Wag 2 #> Datsun 710 3 #> Hornet 4 Drive 4 #> Hornet Sportabout 5 #> Valiant 6 #> Duster 360 7 #> Merc 240D 8 #> Merc 230 9 #> Merc 280 10 #> Merc 280C 11 #> Merc 450SE 12 #> Merc 450SL 13 #> Merc 450SLC 14 #> Cadillac Fleetwood 15 #> Lincoln Continental 16 #> Chrysler Imperial 17 #> Fiat 128 18 #> Honda Civic 19 #> Toyota Corolla 20 #> Toyota Corona 21 #> Dodge Challenger 22 #> AMC Javelin 23 #> Camaro Z28 24 #> Pontiac Firebird 25 #> Fiat X1-9 26 #> Porsche 914-2 27 #> Lotus Europa 28 #> Ford Pantera L 29 #> Ferrari Dino 30 #> Maserati Bora 31 #> Volvo 142E 32"},{"path":"https://parsnip.tidymodels.org/dev/reference/augment.html","id":null,"dir":"Reference","previous_headings":"","what":"Augment data with predictions — augment.model_fit","title":"Augment data with predictions — augment.model_fit","text":"augment() add column(s) predictions given data.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/augment.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Augment data with predictions — augment.model_fit","text":"","code":"# S3 method for model_fit augment(x, new_data, eval_time = NULL, ...)"},{"path":"https://parsnip.tidymodels.org/dev/reference/augment.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Augment data with predictions — augment.model_fit","text":"x model_fit object produced fit.model_spec() fit_xy.model_spec(). new_data data frame matrix. eval_time censored regression models, vector time points survival probability estimated. ... currently used.","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/augment.html","id":"regression","dir":"Reference","previous_headings":"","what":"Regression","title":"Augment data with predictions — augment.model_fit","text":"regression models, .pred column added. x created using fit.model_spec() new_data contains regression outcome column, .resid column also added.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/augment.html","id":"classification","dir":"Reference","previous_headings":"","what":"Classification","title":"Augment data with predictions — augment.model_fit","text":"classification models, results can include column called .pred_class well class probability columns named .pred_{level}. depends type prediction types available model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/augment.html","id":"censored-regression","dir":"Reference","previous_headings":"","what":"Censored Regression","title":"Augment data with predictions — augment.model_fit","text":"models, predictions expected time survival probability created (model engine supports ). model supports survival prediction, eval_time argument required. survival predictions created new_data contains survival::Surv() object, additional columns added inverse probability censoring weights (IPCW) also created (see tidymodels.org page references ). enables user compute performance metrics yardstick package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/augment.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Augment data with predictions — augment.model_fit","text":"https://www.tidymodels.org/learn/statistics/survival-metrics/","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/augment.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Augment data with predictions — augment.model_fit","text":"","code":"car_trn <- mtcars[11:32,] car_tst <- mtcars[ 1:10,] reg_form <- linear_reg() %>% set_engine(\"lm\") %>% fit(mpg ~ ., data = car_trn) reg_xy <- linear_reg() %>% set_engine(\"lm\") %>% fit_xy(car_trn[, -1], car_trn$mpg) augment(reg_form, car_tst) #> # A tibble: 10 × 13 #> .pred .resid mpg cyl disp hp drat wt qsec vs am #> #> 1 23.4 -2.43 21 6 160 110 3.9 2.62 16.5 0 1 #> 2 23.3 -2.30 21 6 160 110 3.9 2.88 17.0 0 1 #> 3 27.6 -4.83 22.8 4 108 93 3.85 2.32 18.6 1 1 #> 4 21.5 -0.147 21.4 6 258 110 3.08 3.22 19.4 1 0 #> 5 17.6 1.13 18.7 8 360 175 3.15 3.44 17.0 0 0 #> 6 21.6 -3.48 18.1 6 225 105 2.76 3.46 20.2 1 0 #> 7 13.9 0.393 14.3 8 360 245 3.21 3.57 15.8 0 0 #> 8 21.7 2.70 24.4 4 147. 62 3.69 3.19 20 1 0 #> 9 25.6 -2.81 22.8 4 141. 95 3.92 3.15 22.9 1 0 #> 10 17.1 2.09 19.2 6 168. 123 3.92 3.44 18.3 1 0 #> # ℹ 2 more variables: gear , carb augment(reg_form, car_tst[, -1]) #> # A tibble: 10 × 11 #> .pred cyl disp hp drat wt qsec vs am gear carb #> #> 1 23.4 6 160 110 3.9 2.62 16.5 0 1 4 4 #> 2 23.3 6 160 110 3.9 2.88 17.0 0 1 4 4 #> 3 27.6 4 108 93 3.85 2.32 18.6 1 1 4 1 #> 4 21.5 6 258 110 3.08 3.22 19.4 1 0 3 1 #> 5 17.6 8 360 175 3.15 3.44 17.0 0 0 3 2 #> 6 21.6 6 225 105 2.76 3.46 20.2 1 0 3 1 #> 7 13.9 8 360 245 3.21 3.57 15.8 0 0 3 4 #> 8 21.7 4 147. 62 3.69 3.19 20 1 0 4 2 #> 9 25.6 4 141. 95 3.92 3.15 22.9 1 0 4 2 #> 10 17.1 6 168. 123 3.92 3.44 18.3 1 0 4 4 augment(reg_xy, car_tst) #> # A tibble: 10 × 12 #> .pred mpg cyl disp hp drat wt qsec vs am gear carb #> #> 1 23.4 21 6 160 110 3.9 2.62 16.5 0 1 4 4 #> 2 23.3 21 6 160 110 3.9 2.88 17.0 0 1 4 4 #> 3 27.6 22.8 4 108 93 3.85 2.32 18.6 1 1 4 1 #> 4 21.5 21.4 6 258 110 3.08 3.22 19.4 1 0 3 1 #> 5 17.6 18.7 8 360 175 3.15 3.44 17.0 0 0 3 2 #> 6 21.6 18.1 6 225 105 2.76 3.46 20.2 1 0 3 1 #> 7 13.9 14.3 8 360 245 3.21 3.57 15.8 0 0 3 4 #> 8 21.7 24.4 4 147. 62 3.69 3.19 20 1 0 4 2 #> 9 25.6 22.8 4 141. 95 3.92 3.15 22.9 1 0 4 2 #> 10 17.1 19.2 6 168. 123 3.92 3.44 18.3 1 0 4 4 augment(reg_xy, car_tst[, -1]) #> # A tibble: 10 × 11 #> .pred cyl disp hp drat wt qsec vs am gear carb #> #> 1 23.4 6 160 110 3.9 2.62 16.5 0 1 4 4 #> 2 23.3 6 160 110 3.9 2.88 17.0 0 1 4 4 #> 3 27.6 4 108 93 3.85 2.32 18.6 1 1 4 1 #> 4 21.5 6 258 110 3.08 3.22 19.4 1 0 3 1 #> 5 17.6 8 360 175 3.15 3.44 17.0 0 0 3 2 #> 6 21.6 6 225 105 2.76 3.46 20.2 1 0 3 1 #> 7 13.9 8 360 245 3.21 3.57 15.8 0 0 3 4 #> 8 21.7 4 147. 62 3.69 3.19 20 1 0 4 2 #> 9 25.6 4 141. 95 3.92 3.15 22.9 1 0 4 2 #> 10 17.1 6 168. 123 3.92 3.44 18.3 1 0 4 4 # ------------------------------------------------------------------------------ data(two_class_dat, package = \"modeldata\") cls_trn <- two_class_dat[-(1:10), ] cls_tst <- two_class_dat[ 1:10 , ] cls_form <- logistic_reg() %>% set_engine(\"glm\") %>% fit(Class ~ ., data = cls_trn) cls_xy <- logistic_reg() %>% set_engine(\"glm\") %>% fit_xy(cls_trn[, -3], cls_trn$Class) augment(cls_form, cls_tst) #> # A tibble: 10 × 6 #> .pred_class .pred_Class1 .pred_Class2 A B Class #> #> 1 Class1 0.518 0.482 2.07 1.63 Class1 #> 2 Class1 0.909 0.0913 2.02 1.04 Class1 #> 3 Class1 0.648 0.352 1.69 1.37 Class2 #> 4 Class1 0.610 0.390 3.43 1.98 Class2 #> 5 Class2 0.443 0.557 2.88 1.98 Class1 #> 6 Class2 0.206 0.794 3.31 2.41 Class2 #> 7 Class1 0.708 0.292 2.50 1.56 Class2 #> 8 Class1 0.567 0.433 1.98 1.55 Class2 #> 9 Class1 0.994 0.00582 2.88 0.580 Class1 #> 10 Class2 0.108 0.892 3.74 2.74 Class2 augment(cls_form, cls_tst[, -3]) #> # A tibble: 10 × 5 #> .pred_class .pred_Class1 .pred_Class2 A B #> #> 1 Class1 0.518 0.482 2.07 1.63 #> 2 Class1 0.909 0.0913 2.02 1.04 #> 3 Class1 0.648 0.352 1.69 1.37 #> 4 Class1 0.610 0.390 3.43 1.98 #> 5 Class2 0.443 0.557 2.88 1.98 #> 6 Class2 0.206 0.794 3.31 2.41 #> 7 Class1 0.708 0.292 2.50 1.56 #> 8 Class1 0.567 0.433 1.98 1.55 #> 9 Class1 0.994 0.00582 2.88 0.580 #> 10 Class2 0.108 0.892 3.74 2.74 augment(cls_xy, cls_tst) #> # A tibble: 10 × 6 #> .pred_class .pred_Class1 .pred_Class2 A B Class #> #> 1 Class1 0.518 0.482 2.07 1.63 Class1 #> 2 Class1 0.909 0.0913 2.02 1.04 Class1 #> 3 Class1 0.648 0.352 1.69 1.37 Class2 #> 4 Class1 0.610 0.390 3.43 1.98 Class2 #> 5 Class2 0.443 0.557 2.88 1.98 Class1 #> 6 Class2 0.206 0.794 3.31 2.41 Class2 #> 7 Class1 0.708 0.292 2.50 1.56 Class2 #> 8 Class1 0.567 0.433 1.98 1.55 Class2 #> 9 Class1 0.994 0.00582 2.88 0.580 Class1 #> 10 Class2 0.108 0.892 3.74 2.74 Class2 augment(cls_xy, cls_tst[, -3]) #> # A tibble: 10 × 5 #> .pred_class .pred_Class1 .pred_Class2 A B #> #> 1 Class1 0.518 0.482 2.07 1.63 #> 2 Class1 0.909 0.0913 2.02 1.04 #> 3 Class1 0.648 0.352 1.69 1.37 #> 4 Class1 0.610 0.390 3.43 1.98 #> 5 Class2 0.443 0.557 2.88 1.98 #> 6 Class2 0.206 0.794 3.31 2.41 #> 7 Class1 0.708 0.292 2.50 1.56 #> 8 Class1 0.567 0.433 1.98 1.55 #> 9 Class1 0.994 0.00582 2.88 0.580 #> 10 Class2 0.108 0.892 3.74 2.74"},{"path":"https://parsnip.tidymodels.org/dev/reference/auto_ml.html","id":null,"dir":"Reference","previous_headings":"","what":"Automatic Machine Learning — auto_ml","title":"Automatic Machine Learning — auto_ml","text":"auto_ml() defines automated searching tuning process many models different families trained ranked given performance training data. different ways fit model, method estimation chosen setting model engine. engine-specific pages model listed . h2o¹² information parsnip used modeling https://www.tidymodels.org/.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/auto_ml.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Automatic Machine Learning — auto_ml","text":"","code":"auto_ml(mode = \"unknown\", engine = \"h2o\")"},{"path":"https://parsnip.tidymodels.org/dev/reference/auto_ml.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Automatic Machine Learning — auto_ml","text":"mode single character string prediction outcome mode. Possible values model \"unknown\", \"regression\", \"classification\". engine single character string specifying computational engine use fitting.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/auto_ml.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Automatic Machine Learning — auto_ml","text":"function defines type model fit. engine specified, method fit model also defined. See set_engine() setting engine, including set engine arguments. model trained fit fit() function used data. arguments function mode engine captured quosures. pass values programmatically, use injection operator like :","code":"value <- 1 auto_ml(argument = !!value)"},{"path":"https://parsnip.tidymodels.org/dev/reference/auto_ml.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Automatic Machine Learning — auto_ml","text":"https://www.tidymodels.org, Tidy Modeling R, searchable table parsnip models","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/autoplot.model_fit.html","id":null,"dir":"Reference","previous_headings":"","what":"Create a ggplot for a model object — autoplot.model_fit","title":"Create a ggplot for a model object — autoplot.model_fit","text":"method provides good visualization method model results. Currently, methods glmnet models implemented.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/autoplot.model_fit.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Create a ggplot for a model object — autoplot.model_fit","text":"","code":"# S3 method for model_fit autoplot(object, ...) # S3 method for glmnet autoplot(object, ..., min_penalty = 0, best_penalty = NULL, top_n = 3L)"},{"path":"https://parsnip.tidymodels.org/dev/reference/autoplot.model_fit.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Create a ggplot for a model object — autoplot.model_fit","text":"object model fit object. ... autoplot.glmnet(), options pass ggrepel::geom_label_repel(). Otherwise, argument ignored. min_penalty single, non-negative number smallest penalty value shown plot. left NULL, whole data range used. best_penalty single, non-negative number show vertical line marker. left NULL, line shown. argument used, ggrepl package required. top_n non-negative integer many model predictors label. top predictors ranked absolute coefficient value. multinomial multivariate models, top_n terms selected within class response, respectively.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/autoplot.model_fit.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Create a ggplot for a model object — autoplot.model_fit","text":"ggplot object penalty x-axis coefficients y-axis. multinomial multivariate models, plot faceted.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/autoplot.model_fit.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Create a ggplot for a model object — autoplot.model_fit","text":"glmnet package need attached loaded autoplot() method work correctly.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/bag_mars.html","id":null,"dir":"Reference","previous_headings":"","what":"Ensembles of MARS models — bag_mars","title":"Ensembles of MARS models — bag_mars","text":"bag_mars() defines ensemble generalized linear models use artificial features predictors. features resemble hinge functions result model segmented regression small dimensions. function can fit classification regression models. different ways fit model, method estimation chosen setting model engine. engine-specific pages model listed . earth¹² information parsnip used modeling https://www.tidymodels.org/.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/bag_mars.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Ensembles of MARS models — bag_mars","text":"","code":"bag_mars( mode = \"unknown\", num_terms = NULL, prod_degree = NULL, prune_method = NULL, engine = \"earth\" )"},{"path":"https://parsnip.tidymodels.org/dev/reference/bag_mars.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Ensembles of MARS models — bag_mars","text":"mode single character string prediction outcome mode. Possible values model \"unknown\", \"regression\", \"classification\". num_terms number features retained final model, including intercept. prod_degree highest possible interaction degree. prune_method pruning method. engine single character string specifying computational engine use fitting.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/bag_mars.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Ensembles of MARS models — bag_mars","text":"function defines type model fit. engine specified, method fit model also defined. See set_engine() setting engine, including set engine arguments. model trained fit fit() function used data. arguments function mode engine captured quosures. pass values programmatically, use injection operator like :","code":"value <- 1 bag_mars(argument = !!value)"},{"path":"https://parsnip.tidymodels.org/dev/reference/bag_mars.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Ensembles of MARS models — bag_mars","text":"https://www.tidymodels.org, Tidy Modeling R, searchable table parsnip models","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/bag_mlp.html","id":null,"dir":"Reference","previous_headings":"","what":"Ensembles of neural networks — bag_mlp","title":"Ensembles of neural networks — bag_mlp","text":"bag_mlp() defines ensemble single layer, feed-forward neural networks. function can fit classification regression models. different ways fit model, method estimation chosen setting model engine. engine-specific pages model listed . nnet¹² information parsnip used modeling https://www.tidymodels.org/.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/bag_mlp.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Ensembles of neural networks — bag_mlp","text":"","code":"bag_mlp( mode = \"unknown\", hidden_units = NULL, penalty = NULL, epochs = NULL, engine = \"nnet\" )"},{"path":"https://parsnip.tidymodels.org/dev/reference/bag_mlp.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Ensembles of neural networks — bag_mlp","text":"mode single character string prediction outcome mode. Possible values model \"unknown\", \"regression\", \"classification\". hidden_units integer number units hidden model. penalty non-negative numeric value amount weight decay. epochs integer number training iterations. engine single character string specifying computational engine use fitting.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/bag_mlp.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Ensembles of neural networks — bag_mlp","text":"function defines type model fit. engine specified, method fit model also defined. See set_engine() setting engine, including set engine arguments. model trained fit fit() function used data. arguments function mode engine captured quosures. pass values programmatically, use injection operator like :","code":"value <- 1 bag_mlp(argument = !!value)"},{"path":"https://parsnip.tidymodels.org/dev/reference/bag_mlp.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Ensembles of neural networks — bag_mlp","text":"https://www.tidymodels.org, Tidy Modeling R, searchable table parsnip models","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/bag_tree.html","id":null,"dir":"Reference","previous_headings":"","what":"Ensembles of decision trees — bag_tree","title":"Ensembles of decision trees — bag_tree","text":"bag_tree() defines ensemble decision trees. function can fit classification, regression, censored regression models. different ways fit model, method estimation chosen setting model engine. engine-specific pages model listed . rpart¹² C5.0² information parsnip used modeling https://www.tidymodels.org/.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/bag_tree.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Ensembles of decision trees — bag_tree","text":"","code":"bag_tree( mode = \"unknown\", cost_complexity = 0, tree_depth = NULL, min_n = 2, class_cost = NULL, engine = \"rpart\" )"},{"path":"https://parsnip.tidymodels.org/dev/reference/bag_tree.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Ensembles of decision trees — bag_tree","text":"mode single character string prediction outcome mode. Possible values model \"unknown\", \"regression\", \"classification\", \"censored regression\". cost_complexity positive number cost/complexity parameter (.k.. Cp) used CART models (specific engines ). tree_depth integer maximum depth tree (.e. number splits) (specific engines ). min_n integer minimum number data points node required node split . class_cost non-negative scalar class cost (cost 1 means extra cost). useful first level outcome factor minority class. case, values zero one can used bias second level factor. engine single character string specifying computational engine use fitting.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/bag_tree.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Ensembles of decision trees — bag_tree","text":"function defines type model fit. engine specified, method fit model also defined. See set_engine() setting engine, including set engine arguments. model trained fit fit() function used data. arguments function mode engine captured quosures. pass values programmatically, use injection operator like :","code":"value <- 1 bag_tree(argument = !!value)"},{"path":"https://parsnip.tidymodels.org/dev/reference/bag_tree.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Ensembles of decision trees — bag_tree","text":"https://www.tidymodels.org, Tidy Modeling R, searchable table parsnip models","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/bart-internal.html","id":null,"dir":"Reference","previous_headings":"","what":"Developer functions for predictions via BART models — bart-internal","title":"Developer functions for predictions via BART models — bart-internal","text":"Developer functions predictions via BART models","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/bart-internal.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Developer functions for predictions via BART models — bart-internal","text":"","code":"bartMachine_interval_calc(new_data, obj, ci = TRUE, level = 0.95) dbart_predict_calc(obj, new_data, type, level = 0.95, std_err = FALSE)"},{"path":"https://parsnip.tidymodels.org/dev/reference/bart-internal.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Developer functions for predictions via BART models — bart-internal","text":"new_data rectangular data object, data frame. obj parsnip object. ci Confidence (TRUE) prediction interval (FALSE) level Confidence level. type single character value NULL. Possible values \"numeric\", \"class\", \"prob\", \"conf_int\", \"pred_int\", \"quantile\", \"time\", \"hazard\", \"survival\", \"raw\". NULL, predict() choose appropriate value based model's mode. std_err Attach column standard error prediction .","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/bart.html","id":null,"dir":"Reference","previous_headings":"","what":"Bayesian additive regression trees (BART) — bart","title":"Bayesian additive regression trees (BART) — bart","text":"bart() defines tree ensemble model uses Bayesian analysis assemble ensemble. function can fit classification regression models. different ways fit model, method estimation chosen setting model engine. engine-specific pages model listed . dbarts¹ information parsnip used modeling https://www.tidymodels.org/.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/bart.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Bayesian additive regression trees (BART) — bart","text":"","code":"bart( mode = \"unknown\", engine = \"dbarts\", trees = NULL, prior_terminal_node_coef = NULL, prior_terminal_node_expo = NULL, prior_outcome_range = NULL )"},{"path":"https://parsnip.tidymodels.org/dev/reference/bart.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Bayesian additive regression trees (BART) — bart","text":"mode single character string prediction outcome mode. Possible values model \"unknown\", \"regression\", \"classification\". engine single character string specifying computational engine use fitting. trees integer number trees contained ensemble. prior_terminal_node_coef coefficient prior probability node terminal node. Values usually 0 one default 0.95. affects baseline probability; smaller numbers make probabilities larger overall. See Details . prior_terminal_node_expo exponent prior probability node terminal node. Values usually non-negative default 2 affects rate prior probability decreases depth tree increases. Larger values make deeper trees less likely. prior_outcome_range positive value defines width prior predicted outcome within certain range. regression related observed range data; prior number standard deviations Gaussian distribution defined observed range data. classification, defined range +/-3 (assumed logit scale). default value 2.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/bart.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Bayesian additive regression trees (BART) — bart","text":"prior terminal node probability expressed prior = * (1 + d)^(-b) d depth node, prior_terminal_node_coef b prior_terminal_node_expo. See Examples section example graph prior probability terminal node different values parameters. function defines type model fit. engine specified, method fit model also defined. See set_engine() setting engine, including set engine arguments. model trained fit fit() function used data. arguments function mode engine captured quosures. pass values programmatically, use injection operator like :","code":"value <- 1 bart(argument = !!value)"},{"path":"https://parsnip.tidymodels.org/dev/reference/bart.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Bayesian additive regression trees (BART) — bart","text":"https://www.tidymodels.org, Tidy Modeling R, searchable table parsnip models","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/bart.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Bayesian additive regression trees (BART) — bart","text":"","code":"show_engines(\"bart\") #> # A tibble: 2 × 2 #> engine mode #> #> 1 dbarts classification #> 2 dbarts regression bart(mode = \"regression\", trees = 5) #> BART Model Specification (regression) #> #> Main Arguments: #> trees = 5 #> #> Computational engine: dbarts #> # ------------------------------------------------------------------------------ # Examples for terminal node prior library(ggplot2) library(dplyr) #> #> Attaching package: ‘dplyr’ #> The following objects are masked from ‘package:stats’: #> #> filter, lag #> The following objects are masked from ‘package:base’: #> #> intersect, setdiff, setequal, union prior_test <- function(coef = 0.95, expo = 2, depths = 1:10) { tidyr::crossing(coef = coef, expo = expo, depth = depths) %>% mutate( `terminial node prior` = coef * (1 + depth)^(-expo), coef = format(coef), expo = format(expo)) } prior_test(coef = c(0.05, 0.5, .95), expo = c(1/2, 1, 2)) %>% ggplot(aes(depth, `terminial node prior`, col = coef)) + geom_line() + geom_point() + facet_wrap(~ expo)"},{"path":"https://parsnip.tidymodels.org/dev/reference/boost_tree.html","id":null,"dir":"Reference","previous_headings":"","what":"Boosted trees — boost_tree","title":"Boosted trees — boost_tree","text":"boost_tree() defines model creates series decision trees forming ensemble. tree depends results previous trees. trees ensemble combined produce final prediction. function can fit classification, regression, censored regression models. different ways fit model, method estimation chosen setting model engine. engine-specific pages model listed . xgboost¹ C5.0 h2o² lightgbm² mboost² spark information parsnip used modeling https://www.tidymodels.org/.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/boost_tree.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Boosted trees — boost_tree","text":"","code":"boost_tree( mode = \"unknown\", engine = \"xgboost\", mtry = NULL, trees = NULL, min_n = NULL, tree_depth = NULL, learn_rate = NULL, loss_reduction = NULL, sample_size = NULL, stop_iter = NULL )"},{"path":"https://parsnip.tidymodels.org/dev/reference/boost_tree.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Boosted trees — boost_tree","text":"mode single character string prediction outcome mode. Possible values model \"unknown\", \"regression\", \"classification\", \"censored regression\". engine single character string specifying computational engine use fitting. mtry number number (proportion) predictors randomly sampled split creating tree models (specific engines ). trees integer number trees contained ensemble. min_n integer minimum number data points node required node split . tree_depth integer maximum depth tree (.e. number splits) (specific engines ). learn_rate number rate boosting algorithm adapts iteration--iteration (specific engines ). sometimes referred shrinkage parameter. loss_reduction number reduction loss function required split (specific engines ). sample_size number number (proportion) data exposed fitting routine. xgboost, sampling done iteration C5.0 samples training. stop_iter number iterations without improvement stopping (specific engines ).","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/boost_tree.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Boosted trees — boost_tree","text":"function defines type model fit. engine specified, method fit model also defined. See set_engine() setting engine, including set engine arguments. model trained fit fit() function used data. arguments function mode engine captured quosures. pass values programmatically, use injection operator like :","code":"value <- 1 boost_tree(argument = !!value)"},{"path":"https://parsnip.tidymodels.org/dev/reference/boost_tree.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Boosted trees — boost_tree","text":"https://www.tidymodels.org, Tidy Modeling R, searchable table parsnip models","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/boost_tree.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Boosted trees — boost_tree","text":"","code":"show_engines(\"boost_tree\") #> # A tibble: 5 × 2 #> engine mode #> #> 1 xgboost classification #> 2 xgboost regression #> 3 C5.0 classification #> 4 spark classification #> 5 spark regression boost_tree(mode = \"classification\", trees = 20) #> Boosted Tree Model Specification (classification) #> #> Main Arguments: #> trees = 20 #> #> Computational engine: xgboost #>"},{"path":"https://parsnip.tidymodels.org/dev/reference/case_weights.html","id":null,"dir":"Reference","previous_headings":"","what":"Using case weights with parsnip — case_weights","title":"Using case weights with parsnip — case_weights","text":"Case weights positive numeric values influence much data point model fitting process. variety situations case weights can used.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/case_weights.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Using case weights with parsnip — case_weights","text":"tidymodels packages differentiate different types case weights used entire data analysis process, including preprocessing data, model fitting, performance calculations, etc. tidymodels packages require users convert numeric vectors vector class reflects used. example, situations weights affect operations centering scaling preprocessing operations. types weights allowed tidymodels : Frequency weights via hardhat::frequency_weights() Importance weights via hardhat::importance_weights() types can added request. parsnip, fit() fit_xy functions contain case_weight argument takes data. Spark models, argument value character value.","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/case_weights_allowed.html","id":null,"dir":"Reference","previous_headings":"","what":"Determine if case weights are used — case_weights_allowed","title":"Determine if case weights are used — case_weights_allowed","text":"modeling engines can incorporate case weights calculations. function can determine whether can used.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/case_weights_allowed.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Determine if case weights are used — case_weights_allowed","text":"","code":"case_weights_allowed(spec)"},{"path":"https://parsnip.tidymodels.org/dev/reference/case_weights_allowed.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Determine if case weights are used — case_weights_allowed","text":"spec parsnip model specification.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/case_weights_allowed.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Determine if case weights are used — case_weights_allowed","text":"single logical.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/case_weights_allowed.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Determine if case weights are used — case_weights_allowed","text":"","code":"case_weights_allowed(linear_reg()) #> [1] TRUE case_weights_allowed(linear_reg(engine = \"keras\")) #> [1] FALSE"},{"path":"https://parsnip.tidymodels.org/dev/reference/censoring_weights.html","id":null,"dir":"Reference","previous_headings":"","what":"Calculations for inverse probability of censoring weights (IPCW) — censoring_weights","title":"Calculations for inverse probability of censoring weights (IPCW) — censoring_weights","text":"method Graf et al (1999) used compute weights specific evaluation times can used help measure model's time-dependent performance (e.g. time-dependent Brier score area ROC curve). internal function.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/censoring_weights.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Calculations for inverse probability of censoring weights (IPCW) — censoring_weights","text":"","code":".censoring_weights_graf(object, ...) # S3 method for default .censoring_weights_graf(object, ...) # S3 method for model_fit .censoring_weights_graf( object, predictions, cens_predictors = NULL, trunc = 0.05, eps = 10^-10, ... )"},{"path":"https://parsnip.tidymodels.org/dev/reference/censoring_weights.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Calculations for inverse probability of censoring weights (IPCW) — censoring_weights","text":"object fitted parsnip model object fitted workflow mode \"censored regression\". predictions data frame column containing survival::Surv() object well list column called .pred contains data structure produced predict.model_fit(). cens_predictors currently used. potential future slot models informative censoring based columns predictions. trunc potential lower bound probability censoring avoid large weight values. eps small value subtracted evaluation time computing censoring probabilities. See Details .","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/censoring_weights.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Calculations for inverse probability of censoring weights (IPCW) — censoring_weights","text":"data returned pred tibbles containing several new columns: .weight_time: time inverse censoring probability weights computed. function observed time time analysis (.e., eval_time). See Details information. .pred_censored: probability censored .weight_time. .weight_censored: inverse censoring probability.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/censoring_weights.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Calculations for inverse probability of censoring weights (IPCW) — censoring_weights","text":"probability data censored immediately prior specific time computed. , must determine time make prediction. two time values row data set: observed time (either censored ) time model evaluated (e.g. survival function prediction time point), constant across rows. . Graf et al (1999) three cases: observed time censoring time evaluation time, data point make contribution performance metric (\"category 3\"). values missing value probability estimate (also weight column). observed time corresponds actual event, time prior evaluation time (category 1), probability censored predicted observed time (minus epsilon). observed time evaluation time (category 2), regardless status, probability censored predicted evaluation time (minus epsilon). epsilon used since, actual information time t data point predicted time t (data prior time t available). censoring probability computed, trunc option used avoid using numbers pathologically close zero. , weight computed inverting censoring probability. eps argument used avoid information leakage computing censoring probability. Subtracting small number avoids using data known time prediction. example, making survival probability predictions eval_time = 3.0, know probability censored exact time (since occurred yet). creating weights inverting probabilities, risk cases severe outliers due probabilities close zero. mitigate , trunc argument can used put cap weights. smallest probability greater trunc, probabilities values less trunc given value. Otherwise, trunc adjusted half smallest probability value used lower bound.. Note n rows data t time points, resulting data, unnested, n * t rows. Computations easily scale well t becomes large.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/censoring_weights.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Calculations for inverse probability of censoring weights (IPCW) — censoring_weights","text":"Graf, E., Schmoor, C., Sauerbrei, W. Schumacher, M. (1999), Assessment comparison prognostic classification schemes survival data. Statist. Med., 18: 2529-2545.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/check_empty_ellipse.html","id":null,"dir":"Reference","previous_headings":"","what":"Check to ensure that ellipses are empty — check_empty_ellipse","title":"Check to ensure that ellipses are empty — check_empty_ellipse","text":"Check ensure ellipses empty","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/check_empty_ellipse.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Check to ensure that ellipses are empty — check_empty_ellipse","text":"","code":"check_empty_ellipse(...)"},{"path":"https://parsnip.tidymodels.org/dev/reference/check_empty_ellipse.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Check to ensure that ellipses are empty — check_empty_ellipse","text":"... Extra arguments.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/check_empty_ellipse.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Check to ensure that ellipses are empty — check_empty_ellipse","text":"error thrown (non-empty ellipses), NULL list.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/condense_control.html","id":null,"dir":"Reference","previous_headings":"","what":"Condense control object into strictly smaller control object — condense_control","title":"Condense control object into strictly smaller control object — condense_control","text":"function used help hierarchy control functions used throughout tidymodels packages. now assumed control function either subset superset another control function.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/condense_control.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Condense control object into strictly smaller control object — condense_control","text":"","code":"condense_control(x, ref)"},{"path":"https://parsnip.tidymodels.org/dev/reference/condense_control.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Condense control object into strictly smaller control object — condense_control","text":"x control object condensed. ref control object used determine element kept.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/condense_control.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Condense control object into strictly smaller control object — condense_control","text":"control object elements classes ref, values x.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/condense_control.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Condense control object into strictly smaller control object — condense_control","text":"","code":"ctrl <- control_parsnip(catch = TRUE) ctrl$allow_par <- TRUE str(ctrl) #> List of 3 #> $ verbosity: int 1 #> $ catch : logi TRUE #> $ allow_par: logi TRUE #> - attr(*, \"class\")= chr \"control_parsnip\" ctrl <- condense_control(ctrl, control_parsnip()) str(ctrl) #> List of 2 #> $ verbosity: int 1 #> $ catch : logi TRUE #> - attr(*, \"class\")= chr \"control_parsnip\""},{"path":"https://parsnip.tidymodels.org/dev/reference/contr_one_hot.html","id":null,"dir":"Reference","previous_headings":"","what":"Contrast function for one-hot encodings — contr_one_hot","title":"Contrast function for one-hot encodings — contr_one_hot","text":"contrast function produces model matrix indicator columns level factor.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/contr_one_hot.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Contrast function for one-hot encodings — contr_one_hot","text":"","code":"contr_one_hot(n, contrasts = TRUE, sparse = FALSE)"},{"path":"https://parsnip.tidymodels.org/dev/reference/contr_one_hot.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Contrast function for one-hot encodings — contr_one_hot","text":"n vector character factor levels number unique levels. contrasts argument backwards compatibility default TRUE supported. sparse argument backwards compatibility default FALSE supported.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/contr_one_hot.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Contrast function for one-hot encodings — contr_one_hot","text":"diagonal matrix n--n.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/contr_one_hot.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Contrast function for one-hot encodings — contr_one_hot","text":"default, model.matrix() generates binary indicator variables factor predictors. formula remove intercept, incomplete set indicators created; indicator made first level factor. example, species island three levels model.matrix() creates two indicator variables : formula intercept, first factor expanded indicators factor levels factors expanded one (): inference, hybrid encoding can problematic. generate indicators, use contrast: Removing intercept affect factor encodings.","code":"library(dplyr) library(modeldata) data(penguins) levels(penguins$species) ## [1] \"Adelie\" \"Chinstrap\" \"Gentoo\" levels(penguins$island) ## [1] \"Biscoe\" \"Dream\" \"Torgersen\" model.matrix(~ species + island, data = penguins) %>% colnames() ## [1] \"(Intercept)\" \"speciesChinstrap\" \"speciesGentoo\" \"islandDream\" ## [5] \"islandTorgersen\" model.matrix(~ 0 + species + island, data = penguins) %>% colnames() ## [1] \"speciesAdelie\" \"speciesChinstrap\" \"speciesGentoo\" \"islandDream\" ## [5] \"islandTorgersen\" # Switch out the contrast method old_contr <- options(\"contrasts\")$contrasts new_contr <- old_contr new_contr[\"unordered\"] <- \"contr_one_hot\" options(contrasts = new_contr) model.matrix(~ species + island, data = penguins) %>% colnames() ## [1] \"(Intercept)\" \"speciesAdelie\" \"speciesChinstrap\" \"speciesGentoo\" ## [5] \"islandBiscoe\" \"islandDream\" \"islandTorgersen\" options(contrasts = old_contr)"},{"path":"https://parsnip.tidymodels.org/dev/reference/control_parsnip.html","id":null,"dir":"Reference","previous_headings":"","what":"Control the fit function — control_parsnip","title":"Control the fit function — control_parsnip","text":"Pass options fit.model_spec() function control output computations","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/control_parsnip.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Control the fit function — control_parsnip","text":"","code":"control_parsnip(verbosity = 1L, catch = FALSE)"},{"path":"https://parsnip.tidymodels.org/dev/reference/control_parsnip.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Control the fit function — control_parsnip","text":"verbosity integer control verbose output . value zero, messages output shown packages loaded model fit. value 1, package loading quiet model fits can produce output screen (depending contain verbose-type argument). value 2 , output displayed execution time fit recorded printed. catch logical value TRUE evaluate model inside try(, silent = TRUE). model fails, object still returned (without error) inherits class \"try-error\".","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/control_parsnip.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Control the fit function — control_parsnip","text":"S3 object class \"control_parsnip\" named list results function call","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/control_parsnip.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Control the fit function — control_parsnip","text":"","code":"control_parsnip(verbosity = 2L) #> parsnip control object #> - verbose level 2"},{"path":"https://parsnip.tidymodels.org/dev/reference/convert_helpers.html","id":null,"dir":"Reference","previous_headings":"","what":"Helper functions to convert between formula and matrix interface — .convert_form_to_xy_fit","title":"Helper functions to convert between formula and matrix interface — .convert_form_to_xy_fit","text":"Functions take formula interface get resulting objects (y, x, weights, etc) back way around. functions intended developer use. part, emulates internals lm() (also see notes https://developer.r-project.org/model-fitting-functions.html). .convert_form_to_xy_fit() .convert_xy_to_form_fit() data created modeling. .convert_form_to_xy_fit() saves data objects well objects needed new data predicted (e.g. terms, etc.). .convert_form_to_xy_new() .convert_xy_to_form_new() used new samples predicted require predictors available.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/convert_helpers.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Helper functions to convert between formula and matrix interface — .convert_form_to_xy_fit","text":"","code":".convert_form_to_xy_fit( formula, data, ..., na.action = na.omit, indicators = \"traditional\", composition = \"data.frame\", remove_intercept = TRUE ) .convert_form_to_xy_new( object, new_data, na.action = na.pass, composition = \"data.frame\" ) .convert_xy_to_form_fit( x, y, weights = NULL, y_name = \"..y\", remove_intercept = TRUE ) .convert_xy_to_form_new(object, new_data)"},{"path":"https://parsnip.tidymodels.org/dev/reference/convert_helpers.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Helper functions to convert between formula and matrix interface — .convert_form_to_xy_fit","text":"formula object class formula (one can coerced class): symbolic description model fitted. data data frame containing relevant variables (e.g. outcome(s), predictors, case weights, etc). ... Additional arguments passed stats::model.frame(). na.action function indicates happen data contain NAs. indicators string describing whether create indicator/dummy variables factor predictors. Possible options \"none\", \"traditional\", \"one_hot\". composition string describing whether resulting x y returned \"matrix\" \"data.frame\". remove_intercept logical indicating whether remove intercept column model.matrix() finished. object object class model_fit. new_data rectangular data object, data frame. x matrix, sparse matrix, data frame predictors. models support sparse matrix input. See parsnip::get_encoding() details. x column names. y vector, matrix data frame outcome data. weights numeric vector containing weights. y_name string specifying name outcome.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/convert_stan_interval.html","id":null,"dir":"Reference","previous_headings":"","what":"Convenience function for intervals — convert_stan_interval","title":"Convenience function for intervals — convert_stan_interval","text":"Convenience function intervals","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/convert_stan_interval.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Convenience function for intervals — convert_stan_interval","text":"","code":"convert_stan_interval(x, level = 0.95, lower = TRUE)"},{"path":"https://parsnip.tidymodels.org/dev/reference/convert_stan_interval.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Convenience function for intervals — convert_stan_interval","text":"x fitted model object level Level uncertainty intervals lower level lower level?","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/ctree_train.html","id":null,"dir":"Reference","previous_headings":"","what":"A wrapper function for conditional inference tree models — ctree_train","title":"A wrapper function for conditional inference tree models — ctree_train","text":"functions slightly different APIs partykit::ctree() partykit::cforest() several important arguments top-level arguments (opposed specified partykit::ctree_control()).","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/ctree_train.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"A wrapper function for conditional inference tree models — ctree_train","text":"","code":"ctree_train( formula, data, weights = NULL, minsplit = 20L, maxdepth = Inf, teststat = \"quadratic\", testtype = \"Bonferroni\", mincriterion = 0.95, ... ) cforest_train( formula, data, weights = NULL, minsplit = 20L, maxdepth = Inf, teststat = \"quadratic\", testtype = \"Univariate\", mincriterion = 0, mtry = ceiling(sqrt(ncol(data) - 1)), ntree = 500L, ... )"},{"path":"https://parsnip.tidymodels.org/dev/reference/ctree_train.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"A wrapper function for conditional inference tree models — ctree_train","text":"formula symbolic description model fit. data data frame containing variables model. weights vector weights whose length nrow(data). partykit::ctree() models, required non-negative integers partykit::cforest() can non-negative integers doubles. minsplit minimum sum weights node order considered splitting. maxdepth maximum depth tree. default maxdepth = Inf means restrictions applied tree sizes. teststat character specifying type test statistic applied. testtype character specifying compute distribution test statistic. mincriterion value test statistic (testtype == \"Teststatistic\"), 1 - p-value (values testtype) must exceeded order implement split. ... options pass partykit::ctree() partykit::cforest(). mtry Number input variables randomly sampled candidates node random forest like algorithms. default mtry = Inf means random selection takes place. ntree Number trees grow forest.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/ctree_train.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"A wrapper function for conditional inference tree models — ctree_train","text":"object class party (ctree) cforest.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/ctree_train.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"A wrapper function for conditional inference tree models — ctree_train","text":"","code":"if (rlang::is_installed(c(\"modeldata\", \"partykit\"))) { data(bivariate, package = \"modeldata\") ctree_train(Class ~ ., data = bivariate_train) ctree_train(Class ~ ., data = bivariate_train, maxdepth = 1) } #> #> Model formula: #> Class ~ A + B #> #> Fitted party: #> [1] root #> | [2] B <= 56.77622: Two (n = 100, err = 34.0%) #> | [3] B > 56.77622: One (n = 909, err = 33.8%) #> #> Number of inner nodes: 1 #> Number of terminal nodes: 2"},{"path":"https://parsnip.tidymodels.org/dev/reference/cubist_rules.html","id":null,"dir":"Reference","previous_headings":"","what":"Cubist rule-based regression models — cubist_rules","title":"Cubist rule-based regression models — cubist_rules","text":"cubist_rules() defines model derives simple feature rules tree ensemble creates regression models within rule. function can fit regression models. different ways fit model, method estimation chosen setting model engine. engine-specific pages model listed . Cubist¹² information parsnip used modeling https://www.tidymodels.org/.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/cubist_rules.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Cubist rule-based regression models — cubist_rules","text":"","code":"cubist_rules( mode = \"regression\", committees = NULL, neighbors = NULL, max_rules = NULL, engine = \"Cubist\" )"},{"path":"https://parsnip.tidymodels.org/dev/reference/cubist_rules.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Cubist rule-based regression models — cubist_rules","text":"mode single character string type model. possible value model \"regression\". committees non-negative integer (greater 100) number members ensemble. neighbors integer zero nine number training set instances used adjust model-based prediction. max_rules largest number rules. engine single character string specifying computational engine use fitting.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/cubist_rules.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Cubist rule-based regression models — cubist_rules","text":"Cubist rule-based ensemble regression model. basic model tree (Quinlan, 1992) created separate linear regression model corresponding terminal node. paths along model tree flattened rules rules simplified pruned. parameter min_n primary method controlling size tree max_rules controls number rules. Cubist ensembles created using committees, similar boosting. first model committee created, second model uses modified version outcome data based whether previous model - -predicted outcome. iteration m, new outcome y* computed using sample -predicted previous iteration, outcome adjusted next time likely -predicted compensate. adjustment continues ensemble iteration. See Kuhn Johnson (2013) details. model created, also option post-hoc adjustment uses training set (Quinlan, 1993). new sample predicted model, can modified nearest neighbors original training set. K neighbors, model-based predicted value adjusted neighbor using: t training set prediction w weight inverse distance neighbor. function defines type model fit. engine specified, method fit model also defined. See set_engine() setting engine, including set engine arguments. model trained fit fit() function used data. arguments function mode engine captured quosures. pass values programmatically, use injection operator like :","code":"value <- 1 cubist_rules(argument = !!value)"},{"path":"https://parsnip.tidymodels.org/dev/reference/cubist_rules.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Cubist rule-based regression models — cubist_rules","text":"https://www.tidymodels.org, Tidy Modeling R, searchable table parsnip models Quinlan R (1992). \"Learning Continuous Classes.\" Proceedings 5th Australian Joint Conference Artificial Intelligence, pp. 343-348. Quinlan R (1993).\"Combining Instance-Based Model-Based Learning.\" Proceedings Tenth International Conference Machine Learning, pp. 236-243. Kuhn M Johnson K (2013). Applied Predictive Modeling. Springer.","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/decision_tree.html","id":null,"dir":"Reference","previous_headings":"","what":"Decision trees — decision_tree","title":"Decision trees — decision_tree","text":"decision_tree() defines model set /statements creates tree-based structure. function can fit classification, regression, censored regression models. different ways fit model, method estimation chosen setting model engine. engine-specific pages model listed . rpart¹² C5.0 partykit² spark information parsnip used modeling https://www.tidymodels.org/.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/decision_tree.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Decision trees — decision_tree","text":"","code":"decision_tree( mode = \"unknown\", engine = \"rpart\", cost_complexity = NULL, tree_depth = NULL, min_n = NULL )"},{"path":"https://parsnip.tidymodels.org/dev/reference/decision_tree.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Decision trees — decision_tree","text":"mode single character string prediction outcome mode. Possible values model \"unknown\", \"regression\", \"classification\", \"censored regression\". engine single character string specifying computational engine use fitting. cost_complexity positive number cost/complexity parameter (.k.. Cp) used CART models (specific engines ). tree_depth integer maximum depth tree. min_n integer minimum number data points node required node split .","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/decision_tree.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Decision trees — decision_tree","text":"function defines type model fit. engine specified, method fit model also defined. See set_engine() setting engine, including set engine arguments. model trained fit fit() function used data. arguments function mode engine captured quosures. pass values programmatically, use injection operator like :","code":"value <- 1 decision_tree(argument = !!value)"},{"path":"https://parsnip.tidymodels.org/dev/reference/decision_tree.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Decision trees — decision_tree","text":"https://www.tidymodels.org, Tidy Modeling R, searchable table parsnip models","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/decision_tree.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Decision trees — decision_tree","text":"","code":"show_engines(\"decision_tree\") #> # A tibble: 5 × 2 #> engine mode #> #> 1 rpart classification #> 2 rpart regression #> 3 C5.0 classification #> 4 spark classification #> 5 spark regression decision_tree(mode = \"classification\", tree_depth = 5) #> Decision Tree Model Specification (classification) #> #> Main Arguments: #> tree_depth = 5 #> #> Computational engine: rpart #>"},{"path":"https://parsnip.tidymodels.org/dev/reference/descriptors.html","id":null,"dir":"Reference","previous_headings":"","what":"Data Set Characteristics Available when Fitting Models — descriptors","title":"Data Set Characteristics Available when Fitting Models — descriptors","text":"using fit() functions variables available use arguments. example, user like choose argument value based current number rows data set, .obs() function can used. See Details .","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/descriptors.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Data Set Characteristics Available when Fitting Models — descriptors","text":"","code":".cols() .preds() .obs() .lvls() .facts() .x() .y() .dat()"},{"path":"https://parsnip.tidymodels.org/dev/reference/descriptors.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Data Set Characteristics Available when Fitting Models — descriptors","text":"Existing functions: .obs(): current number rows data set. .preds(): number columns data set associated predictors prior dummy variable creation. .cols(): number predictor columns available dummy variables created (). .facts(): number factor predictors data set. .lvls(): outcome factor, table counts level (NA otherwise). .x(): predictors returned format given. Either data frame matrix. .y(): known outcomes returned format given. Either vector, matrix, data frame. .dat(): data frame containing predictors outcomes. fit_xy() used, outcomes attached column, ..y. example, use model formula circumference ~ . built-Orange data, values formula Tree ~ . used: use model fit, pass model specification. evaluation delayed time model run via fit() (variables listed available). example: descriptors found, computation descriptor values executed.","code":".preds() = 2 (the 2 remaining columns in `Orange`) .cols() = 5 (1 numeric column + 4 from Tree dummy variables) .obs() = 35 .lvls() = NA (no factor outcome) .facts() = 1 (the Tree predictor) .y() = (circumference as a vector) .x() = (The other 2 columns as a data frame) .dat() = (The full data set) .preds() = 2 (the 2 numeric columns in `Orange`) .cols() = 2 (same) .obs() = 35 .lvls() = c(\"1\" = 7, \"2\" = 7, \"3\" = 7, \"4\" = 7, \"5\" = 7) .facts() = 0 .y() = (Tree as a vector) .x() = (The other 2 columns as a data frame) .dat() = (The full data set) library(modeldata) data(\"lending_club\") rand_forest(mode = \"classification\", mtry = .cols() - 2)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_C5_rules_C5.0.html","id":null,"dir":"Reference","previous_headings":"","what":"C5.0 rule-based classification models — details_C5_rules_C5.0","title":"C5.0 rule-based classification models — details_C5_rules_C5.0","text":"C50::C5.0() fits model derives feature rules tree prediction. single tree boosted ensemble can used. rules::c5_fit() wrapper around function.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_C5_rules_C5.0.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"C5.0 rule-based classification models — details_C5_rules_C5.0","text":"engine, single mode: classification","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_C5_rules_C5.0.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"C5.0 rule-based classification models — details_C5_rules_C5.0","text":"model 2 tuning parameters: trees: # Trees (type: integer, default: 1L) min_n: Minimal Node Size (type: integer, default: 2L) Note C5.0 tool early stopping boosting less iterations boosting performed number requested. C5_rules() turns feature (although can re-enabled using C50::C5.0Control()).","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_C5_rules_C5.0.html","id":"translation-from-parsnip-to-the-underlying-model-call-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the underlying model call (classification)","title":"C5.0 rule-based classification models — details_C5_rules_C5.0","text":"rules extension package required fit model.","code":"library(rules) C5_rules( trees = integer(1), min_n = integer(1) ) %>% set_engine(\"C5.0\") %>% set_mode(\"classification\") %>% translate() ## C5.0 Model Specification (classification) ## ## Main Arguments: ## trees = integer(1) ## min_n = integer(1) ## ## Computational engine: C5.0 ## ## Model fit template: ## rules::c5_fit(x = missing_arg(), y = missing_arg(), weights = missing_arg(), ## trials = integer(1), minCases = integer(1))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_C5_rules_C5.0.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"C5.0 rule-based classification models — details_C5_rules_C5.0","text":"engine require special encoding predictors. Categorical predictors can partitioned groups factor levels (e.g. {, c} vs {b, d}) splitting node. Dummy variables required model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_C5_rules_C5.0.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"C5.0 rule-based classification models — details_C5_rules_C5.0","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_C5_rules_C5.0.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"C5.0 rule-based classification models — details_C5_rules_C5.0","text":"model object contains data required make predictions. saving model purpose prediction, size saved object might substantially reduced using functions butcher package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_C5_rules_C5.0.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"C5.0 rule-based classification models — details_C5_rules_C5.0","text":"Quinlan R (1992). “Learning Continuous Classes.” Proceedings 5th Australian Joint Conference Artificial Intelligence, pp. 343-348. Quinlan R (1993).”Combining Instance-Based Model-Based Learning.” Proceedings Tenth International Conference Machine Learning, pp. 236-243. Kuhn M Johnson K (2013). Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_auto_ml_h2o.html","id":null,"dir":"Reference","previous_headings":"","what":"Automatic machine learning via h2o — details_auto_ml_h2o","title":"Automatic machine learning via h2o — details_auto_ml_h2o","text":"h2o::h2o.automl defines automated model training process returns leaderboard models best performances.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_auto_ml_h2o.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Automatic machine learning via h2o — details_auto_ml_h2o","text":"engine, multiple modes: classification regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_auto_ml_h2o.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Automatic machine learning via h2o — details_auto_ml_h2o","text":"model tuning parameters. Engine arguments interest max_runtime_secs max_models: controls maximum running time number models build automatic process. exclude_algos include_algos: character vector indicating excluded included algorithms model building. see full list supported models, see details section h2o::h2o.automl(). validation: integer 0 1 specifying proportion training data reserved validation set. used h2o performance assessment potential early stopping.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_auto_ml_h2o.html","id":"translation-from-parsnip-to-the-original-package-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (regression)","title":"Automatic machine learning via h2o — details_auto_ml_h2o","text":"agua::h2o_train_auto() wrapper around h2o::h2o.automl().","code":"auto_ml() %>% set_engine(\"h2o\") %>% set_mode(\"regression\") %>% translate() ## Automatic Machine Learning Model Specification (regression) ## ## Computational engine: h2o ## ## Model fit template: ## agua::h2o_train_auto(x = missing_arg(), y = missing_arg(), weights = missing_arg(), ## validation_frame = missing_arg(), verbosity = NULL)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_auto_ml_h2o.html","id":"translation-from-parsnip-to-the-original-package-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (classification)","title":"Automatic machine learning via h2o — details_auto_ml_h2o","text":"","code":"auto_ml() %>% set_engine(\"h2o\") %>% set_mode(\"classification\") %>% translate() ## Automatic Machine Learning Model Specification (classification) ## ## Computational engine: h2o ## ## Model fit template: ## agua::h2o_train_auto(x = missing_arg(), y = missing_arg(), weights = missing_arg(), ## validation_frame = missing_arg(), verbosity = NULL)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_auto_ml_h2o.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Automatic machine learning via h2o — details_auto_ml_h2o","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_auto_ml_h2o.html","id":"initializing-h-o","dir":"Reference","previous_headings":"","what":"Initializing h2o","title":"Automatic machine learning via h2o — details_auto_ml_h2o","text":"use h2o engine tidymodels, please run h2o::h2o.init() first. default, connects R local h2o server. needs done every new R session. can also connect remote h2o server IP address, details see h2o::h2o.init(). can control number threads thread pool used h2o nthreads argument. default, uses CPUs host. different usual parallel processing mechanism tidymodels tuning, tidymodels parallelizes resamples, h2o parallelizes hyperparameter combinations given resample. h2o automatically shut local h2o instance started R R terminated. manually stop h2o server, run h2o::h2o.shutdown().","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_auto_ml_h2o.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Automatic machine learning via h2o — details_auto_ml_h2o","text":"Models fitted engine may require native serialization methods properly saved /passed R sessions. learn preparing fitted models serialization, see bundle package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bag_mars_earth.html","id":null,"dir":"Reference","previous_headings":"","what":"Bagged MARS via earth — details_bag_mars_earth","title":"Bagged MARS via earth — details_bag_mars_earth","text":"baguette::bagger() creates collection MARS models forming ensemble. models ensemble combined produce final prediction.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bag_mars_earth.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Bagged MARS via earth — details_bag_mars_earth","text":"engine, multiple modes: classification regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bag_mars_earth.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Bagged MARS via earth — details_bag_mars_earth","text":"model 3 tuning parameters: prod_degree: Degree Interaction (type: integer, default: 1L) prune_method: Pruning Method (type: character, default: ‘backward’) num_terms: # Model Terms (type: integer, default: see ) default value num_terms depends number predictor columns. data frame x, default min(200, max(20, 2 * ncol(x))) + 1 (see earth::earth() reference ).","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bag_mars_earth.html","id":"translation-from-parsnip-to-the-original-package-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (regression)","title":"Bagged MARS via earth — details_bag_mars_earth","text":"baguette extension package required fit model.","code":"bag_mars(num_terms = integer(1), prod_degree = integer(1), prune_method = character(1)) %>% set_engine(\"earth\") %>% set_mode(\"regression\") %>% translate() ## Bagged MARS Model Specification (regression) ## ## Main Arguments: ## num_terms = integer(1) ## prod_degree = integer(1) ## prune_method = character(1) ## ## Computational engine: earth ## ## Model fit template: ## baguette::bagger(formula = missing_arg(), data = missing_arg(), ## weights = missing_arg(), nprune = integer(1), degree = integer(1), ## pmethod = character(1), base_model = \"MARS\")"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bag_mars_earth.html","id":"translation-from-parsnip-to-the-original-package-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (classification)","title":"Bagged MARS via earth — details_bag_mars_earth","text":"baguette extension package required fit model.","code":"library(baguette) bag_mars( num_terms = integer(1), prod_degree = integer(1), prune_method = character(1) ) %>% set_engine(\"earth\") %>% set_mode(\"classification\") %>% translate() ## Bagged MARS Model Specification (classification) ## ## Main Arguments: ## num_terms = integer(1) ## prod_degree = integer(1) ## prune_method = character(1) ## ## Computational engine: earth ## ## Model fit template: ## baguette::bagger(formula = missing_arg(), data = missing_arg(), ## weights = missing_arg(), nprune = integer(1), degree = integer(1), ## pmethod = character(1), base_model = \"MARS\")"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bag_mars_earth.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Bagged MARS via earth — details_bag_mars_earth","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bag_mars_earth.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Bagged MARS via earth — details_bag_mars_earth","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights. Note earth package documentation : “current implementation, building models weights can slow.”","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bag_mars_earth.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Bagged MARS via earth — details_bag_mars_earth","text":"Breiman, L. 1996. “Bagging predictors”. Machine Learning. 24 (2): 123-140 Friedman, J. 1991. “Multivariate Adaptive Regression Splines.” Annals Statistics, vol. 19, . 1, pp. 1-67. Milborrow, S. “Notes earth package.” Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bag_mlp_nnet.html","id":null,"dir":"Reference","previous_headings":"","what":"Bagged neural networks via nnet — details_bag_mlp_nnet","title":"Bagged neural networks via nnet — details_bag_mlp_nnet","text":"baguette::bagger() creates collection neural networks forming ensemble. trees ensemble combined produce final prediction.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bag_mlp_nnet.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Bagged neural networks via nnet — details_bag_mlp_nnet","text":"engine, multiple modes: classification regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bag_mlp_nnet.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Bagged neural networks via nnet — details_bag_mlp_nnet","text":"model 3 tuning parameters: hidden_units: # Hidden Units (type: integer, default: 10L) penalty: Amount Regularization (type: double, default: 0.0) epochs: # Epochs (type: integer, default: 1000L) defaults set baguette package different nnet::nnet().","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bag_mlp_nnet.html","id":"translation-from-parsnip-to-the-original-package-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (classification)","title":"Bagged neural networks via nnet — details_bag_mlp_nnet","text":"baguette extension package required fit model.","code":"library(baguette) bag_mlp(penalty = double(1), hidden_units = integer(1)) %>% set_engine(\"nnet\") %>% set_mode(\"classification\") %>% translate() ## Bagged Neural Network Model Specification (classification) ## ## Main Arguments: ## hidden_units = integer(1) ## penalty = double(1) ## ## Computational engine: nnet ## ## Model fit template: ## baguette::bagger(formula = missing_arg(), data = missing_arg(), ## weights = missing_arg(), size = integer(1), decay = double(1), ## base_model = \"nnet\")"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bag_mlp_nnet.html","id":"translation-from-parsnip-to-the-original-package-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (regression)","title":"Bagged neural networks via nnet — details_bag_mlp_nnet","text":"baguette extension package required fit model.","code":"library(baguette) bag_mlp(penalty = double(1), hidden_units = integer(1)) %>% set_engine(\"nnet\") %>% set_mode(\"regression\") %>% translate() ## Bagged Neural Network Model Specification (regression) ## ## Main Arguments: ## hidden_units = integer(1) ## penalty = double(1) ## ## Computational engine: nnet ## ## Model fit template: ## baguette::bagger(formula = missing_arg(), data = missing_arg(), ## weights = missing_arg(), size = integer(1), decay = double(1), ## base_model = \"nnet\")"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bag_mlp_nnet.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Bagged neural networks via nnet — details_bag_mlp_nnet","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Predictors scale. One way achieve center scale predictor mean zero variance one.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bag_mlp_nnet.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Bagged neural networks via nnet — details_bag_mlp_nnet","text":"underlying model implementation allow case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bag_mlp_nnet.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Bagged neural networks via nnet — details_bag_mlp_nnet","text":"Breiman L. 1996. “Bagging predictors”. Machine Learning. 24 (2): 123-140 Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bag_tree_C5.0.html","id":null,"dir":"Reference","previous_headings":"","what":"Bagged trees via C5.0 — details_bag_tree_C5.0","title":"Bagged trees via C5.0 — details_bag_tree_C5.0","text":"baguette::bagger() creates collection decision trees forming ensemble. trees ensemble combined produce final prediction.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bag_tree_C5.0.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Bagged trees via C5.0 — details_bag_tree_C5.0","text":"engine, single mode: classification","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bag_tree_C5.0.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Bagged trees via C5.0 — details_bag_tree_C5.0","text":"model 1 tuning parameters: min_n: Minimal Node Size (type: integer, default: 2L)","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bag_tree_C5.0.html","id":"translation-from-parsnip-to-the-original-package-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (classification)","title":"Bagged trees via C5.0 — details_bag_tree_C5.0","text":"baguette extension package required fit model.","code":"library(baguette) bag_tree(min_n = integer()) %>% set_engine(\"C5.0\") %>% set_mode(\"classification\") %>% translate() ## Bagged Decision Tree Model Specification (classification) ## ## Main Arguments: ## cost_complexity = 0 ## min_n = integer() ## ## Computational engine: C5.0 ## ## Model fit template: ## baguette::bagger(x = missing_arg(), y = missing_arg(), weights = missing_arg(), ## minCases = integer(), base_model = \"C5.0\")"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bag_tree_C5.0.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Bagged trees via C5.0 — details_bag_tree_C5.0","text":"engine require special encoding predictors. Categorical predictors can partitioned groups factor levels (e.g. {, c} vs {b, d}) splitting node. Dummy variables required model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bag_tree_C5.0.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Bagged trees via C5.0 — details_bag_tree_C5.0","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bag_tree_C5.0.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Bagged trees via C5.0 — details_bag_tree_C5.0","text":"Breiman, L. 1996. “Bagging predictors”. Machine Learning. 24 (2): 123-140 Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bag_tree_rpart.html","id":null,"dir":"Reference","previous_headings":"","what":"Bagged trees via rpart — details_bag_tree_rpart","title":"Bagged trees via rpart — details_bag_tree_rpart","text":"baguette::bagger() ipred::bagging() create collections decision trees forming ensemble. trees ensemble combined produce final prediction.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bag_tree_rpart.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Bagged trees via rpart — details_bag_tree_rpart","text":"engine, multiple modes: classification, regression, censored regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bag_tree_rpart.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Bagged trees via rpart — details_bag_tree_rpart","text":"model 4 tuning parameters: class_cost: Class Cost (type: double, default: (see )) tree_depth: Tree Depth (type: integer, default: 30L) min_n: Minimal Node Size (type: integer, default: 2L) cost_complexity: Cost-Complexity Parameter (type: double, default: 0.01) class_cost parameter, value can non-negative scalar class cost (cost 1 means extra cost). useful first level outcome factor minority class. case, values zero one can used bias second level factor.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bag_tree_rpart.html","id":"translation-from-parsnip-to-the-original-package-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (classification)","title":"Bagged trees via rpart — details_bag_tree_rpart","text":"baguette extension package required fit model.","code":"library(baguette) bag_tree(tree_depth = integer(1), min_n = integer(1), cost_complexity = double(1)) %>% set_engine(\"rpart\") %>% set_mode(\"classification\") %>% translate() ## Bagged Decision Tree Model Specification (classification) ## ## Main Arguments: ## cost_complexity = double(1) ## tree_depth = integer(1) ## min_n = integer(1) ## ## Computational engine: rpart ## ## Model fit template: ## baguette::bagger(formula = missing_arg(), data = missing_arg(), ## weights = missing_arg(), cp = double(1), maxdepth = integer(1), ## minsplit = integer(1), base_model = \"CART\")"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bag_tree_rpart.html","id":"translation-from-parsnip-to-the-original-package-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (regression)","title":"Bagged trees via rpart — details_bag_tree_rpart","text":"baguette extension package required fit model.","code":"library(baguette) bag_tree(tree_depth = integer(1), min_n = integer(1), cost_complexity = double(1)) %>% set_engine(\"rpart\") %>% set_mode(\"regression\") %>% translate() ## Bagged Decision Tree Model Specification (regression) ## ## Main Arguments: ## cost_complexity = double(1) ## tree_depth = integer(1) ## min_n = integer(1) ## ## Computational engine: rpart ## ## Model fit template: ## baguette::bagger(formula = missing_arg(), data = missing_arg(), ## weights = missing_arg(), cp = double(1), maxdepth = integer(1), ## minsplit = integer(1), base_model = \"CART\")"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bag_tree_rpart.html","id":"translation-from-parsnip-to-the-original-package-censored-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (censored regression)","title":"Bagged trees via rpart — details_bag_tree_rpart","text":"censored extension package required fit model.","code":"library(censored) bag_tree(tree_depth = integer(1), min_n = integer(1), cost_complexity = double(1)) %>% set_engine(\"rpart\") %>% set_mode(\"censored regression\") %>% translate() ## Bagged Decision Tree Model Specification (censored regression) ## ## Main Arguments: ## cost_complexity = double(1) ## tree_depth = integer(1) ## min_n = integer(1) ## ## Computational engine: rpart ## ## Model fit template: ## ipred::bagging(formula = missing_arg(), data = missing_arg(), ## weights = missing_arg(), cp = double(1), maxdepth = integer(1), ## minsplit = integer(1))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bag_tree_rpart.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Bagged trees via rpart — details_bag_tree_rpart","text":"engine require special encoding predictors. Categorical predictors can partitioned groups factor levels (e.g. {, c} vs {b, d}) splitting node. Dummy variables required model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bag_tree_rpart.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Bagged trees via rpart — details_bag_tree_rpart","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bag_tree_rpart.html","id":"other-details","dir":"Reference","previous_headings":"","what":"Other details","title":"Bagged trees via rpart — details_bag_tree_rpart","text":"Predictions type \"time\" predictions median survival time.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bag_tree_rpart.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Bagged trees via rpart — details_bag_tree_rpart","text":"Breiman L. 1996. “Bagging predictors”. Machine Learning. 24 (2): 123-140 Hothorn T, Lausen B, Benner , Radespiel-Troeger M. 2004. Bagging Survival Trees. Statistics Medicine, 23(1), 77–91. Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bart_dbarts.html","id":null,"dir":"Reference","previous_headings":"","what":"Bayesian additive regression trees via dbarts — details_bart_dbarts","title":"Bayesian additive regression trees via dbarts — details_bart_dbarts","text":"dbarts::bart() creates ensemble tree-based model whose training assembly determined using Bayesian analysis.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bart_dbarts.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Bayesian additive regression trees via dbarts — details_bart_dbarts","text":"engine, multiple modes: classification regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bart_dbarts.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Bayesian additive regression trees via dbarts — details_bart_dbarts","text":"model 4 tuning parameters: trees: # Trees (type: integer, default: 200L) prior_terminal_node_coef: Terminal Node Prior Coefficient (type: double, default: 0.95) prior_terminal_node_expo: Terminal Node Prior Exponent (type: double, default: 2.00) prior_outcome_range: Prior Outcome Range (type: double, default: 2.00)","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bart_dbarts.html","id":"important-engine-specific-options","dir":"Reference","previous_headings":"","what":"Important engine-specific options","title":"Bayesian additive regression trees via dbarts — details_bart_dbarts","text":"relevant arguments can passed set_engine(): keepevery, n.thin: Every keepevery draw kept returned user. Useful “thinning” samples. ntree, n.trees: number trees sum--trees formulation. ndpost, n.samples: number posterior draws burn , ndpost / keepevery actually returned. nskip, n.burn: Number MCMC iterations treated burn . nchain, n.chains: Integer specifying many independent tree sets fits calculated. nthread, n.threads: Integer specifying many threads use. Depending CPU architecture, using number chains can degrade performance small/medium data sets. calculations may executed single threaded regardless. combinechains, combineChains: Logical; TRUE, samples returned arrays dimensions equal nchain times ndpost times number observations.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bart_dbarts.html","id":"translation-from-parsnip-to-the-original-package-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (classification)","title":"Bayesian additive regression trees via dbarts — details_bart_dbarts","text":"","code":"bart( trees = integer(1), prior_terminal_node_coef = double(1), prior_terminal_node_expo = double(1), prior_outcome_range = double(1) ) %>% set_engine(\"dbarts\") %>% set_mode(\"classification\") %>% translate() ## BART Model Specification (classification) ## ## Main Arguments: ## trees = integer(1) ## prior_terminal_node_coef = double(1) ## prior_terminal_node_expo = double(1) ## prior_outcome_range = double(1) ## ## Computational engine: dbarts ## ## Model fit template: ## dbarts::bart(x = missing_arg(), y = missing_arg(), ntree = integer(1), ## base = double(1), power = double(1), k = double(1), verbose = FALSE, ## keeptrees = TRUE, keepcall = FALSE)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bart_dbarts.html","id":"translation-from-parsnip-to-the-original-package-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (regression)","title":"Bayesian additive regression trees via dbarts — details_bart_dbarts","text":"","code":"bart( trees = integer(1), prior_terminal_node_coef = double(1), prior_terminal_node_expo = double(1), prior_outcome_range = double(1) ) %>% set_engine(\"dbarts\") %>% set_mode(\"regression\") %>% translate() ## BART Model Specification (regression) ## ## Main Arguments: ## trees = integer(1) ## prior_terminal_node_coef = double(1) ## prior_terminal_node_expo = double(1) ## prior_outcome_range = double(1) ## ## Computational engine: dbarts ## ## Model fit template: ## dbarts::bart(x = missing_arg(), y = missing_arg(), ntree = integer(1), ## base = double(1), power = double(1), k = double(1), verbose = FALSE, ## keeptrees = TRUE, keepcall = FALSE)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bart_dbarts.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Bayesian additive regression trees via dbarts — details_bart_dbarts","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. dbarts::bart() also convert factors indicators user create first.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bart_dbarts.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Bayesian additive regression trees via dbarts — details_bart_dbarts","text":"Chipman, George, McCulloch. “BART: Bayesian additive regression trees.” Ann. Appl. Stat. 4 (1) 266 - 298, March 2010.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_C5.0.html","id":null,"dir":"Reference","previous_headings":"","what":"Boosted trees via C5.0 — details_boost_tree_C5.0","title":"Boosted trees via C5.0 — details_boost_tree_C5.0","text":"C50::C5.0() creates series classification trees forming ensemble. tree depends results previous trees. trees ensemble combined produce final prediction.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_C5.0.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Boosted trees via C5.0 — details_boost_tree_C5.0","text":"engine, single mode: classification","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_C5.0.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Boosted trees via C5.0 — details_boost_tree_C5.0","text":"model 3 tuning parameters: trees: # Trees (type: integer, default: 15L) min_n: Minimal Node Size (type: integer, default: 2L) sample_size: Proportion Observations Sampled (type: double, default: 1.0) implementation C5.0 limits number trees 1 100.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_C5.0.html","id":"translation-from-parsnip-to-the-original-package-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (classification)","title":"Boosted trees via C5.0 — details_boost_tree_C5.0","text":"C5.0_train() wrapper around C50::C5.0() makes easier run model.","code":"boost_tree(trees = integer(), min_n = integer(), sample_size = numeric()) %>% set_engine(\"C5.0\") %>% set_mode(\"classification\") %>% translate() ## Boosted Tree Model Specification (classification) ## ## Main Arguments: ## trees = integer() ## min_n = integer() ## sample_size = numeric() ## ## Computational engine: C5.0 ## ## Model fit template: ## parsnip::C5.0_train(x = missing_arg(), y = missing_arg(), weights = missing_arg(), ## trials = integer(), minCases = integer(), sample = numeric())"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_C5.0.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Boosted trees via C5.0 — details_boost_tree_C5.0","text":"engine require special encoding predictors. Categorical predictors can partitioned groups factor levels (e.g. {, c} vs {b, d}) splitting node. Dummy variables required model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_C5.0.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Boosted trees via C5.0 — details_boost_tree_C5.0","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_C5.0.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Boosted trees via C5.0 — details_boost_tree_C5.0","text":"model object contains data required make predictions. saving model purpose prediction, size saved object might substantially reduced using functions butcher package.","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_C5.0.html","id":"early-stopping","dir":"Reference","previous_headings":"","what":"Early stopping","title":"Boosted trees via C5.0 — details_boost_tree_C5.0","text":"default, early stopping used. use complete set boosting iterations, pass earlyStopping = FALSE set_engine(). Also, unlikely early stopping occur sample_size = 1.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_C5.0.html","id":"examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Boosted trees via C5.0 — details_boost_tree_C5.0","text":"“Fitting Predicting parsnip” article contains examples boost_tree() \"C5.0\" engine.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_C5.0.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Boosted trees via C5.0 — details_boost_tree_C5.0","text":"Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_h2o.html","id":null,"dir":"Reference","previous_headings":"","what":"Boosted trees via h2o — details_boost_tree_h2o","title":"Boosted trees via h2o — details_boost_tree_h2o","text":"h2o::h2o.xgboost() creates series decision trees forming ensemble. tree depends results previous trees. trees ensemble combined produce final prediction.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_h2o.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Boosted trees via h2o — details_boost_tree_h2o","text":"engine, multiple modes: classification regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_h2o.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Boosted trees via h2o — details_boost_tree_h2o","text":"model 8 tuning parameters: trees: # Trees (type: integer, default: 50) tree_depth: Tree Depth (type: integer, default: 6) min_n: Minimal Node Size (type: integer, default: 1) learn_rate: Learning Rate (type: double, default: 0.3) sample_size: # Observations Sampled (type: integer, default: 1) mtry: # Randomly Selected Predictors (type: integer, default: 1) loss_reduction: Minimum Loss Reduction (type: double, default: 0) stop_iter: # Iterations Stopping (type: integer, default: 0) min_n represents fewest allowed observations terminal node, h2o::h2o.xgboost() allows one row leaf default. stop_iter controls early stopping rounds based convergence engine parameter stopping_metric. default, h2o::h2o.xgboost() use early stopping. stop_iter 0, h2o::h2o.xgboost() uses logloss classification, deviance regression anonomaly score Isolation Forest. mostly useful used alongside engine parameter validation, proportion train-validation split, parsnip split pass two data frames h2o. h2o::h2o.xgboost() evaluate metric early stopping criteria validation set.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_h2o.html","id":"translation-from-parsnip-to-the-original-package-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (regression)","title":"Boosted trees via h2o — details_boost_tree_h2o","text":"agua::h2o_train_xgboost() wrapper around h2o::h2o.xgboost(). agua extension package required fit model.","code":"boost_tree( mtry = integer(), trees = integer(), tree_depth = integer(), learn_rate = numeric(), min_n = integer(), loss_reduction = numeric(), stop_iter = integer() ) %>% set_engine(\"h2o\") %>% set_mode(\"regression\") %>% translate() ## Boosted Tree Model Specification (regression) ## ## Main Arguments: ## mtry = integer() ## trees = integer() ## min_n = integer() ## tree_depth = integer() ## learn_rate = numeric() ## loss_reduction = numeric() ## stop_iter = integer() ## ## Computational engine: h2o ## ## Model fit template: ## agua::h2o_train_xgboost(x = missing_arg(), y = missing_arg(), ## weights = missing_arg(), validation_frame = missing_arg(), ## col_sample_rate = integer(), ntrees = integer(), min_rows = integer(), ## max_depth = integer(), learn_rate = numeric(), min_split_improvement = numeric(), ## stopping_rounds = integer())"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_h2o.html","id":"translation-from-parsnip-to-the-original-package-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (classification)","title":"Boosted trees via h2o — details_boost_tree_h2o","text":"agua extension package required fit model.","code":"boost_tree( mtry = integer(), trees = integer(), tree_depth = integer(), learn_rate = numeric(), min_n = integer(), loss_reduction = numeric(), stop_iter = integer() ) %>% set_engine(\"h2o\") %>% set_mode(\"classification\") %>% translate() ## Boosted Tree Model Specification (classification) ## ## Main Arguments: ## mtry = integer() ## trees = integer() ## min_n = integer() ## tree_depth = integer() ## learn_rate = numeric() ## loss_reduction = numeric() ## stop_iter = integer() ## ## Computational engine: h2o ## ## Model fit template: ## agua::h2o_train_xgboost(x = missing_arg(), y = missing_arg(), ## weights = missing_arg(), validation_frame = missing_arg(), ## col_sample_rate = integer(), ntrees = integer(), min_rows = integer(), ## max_depth = integer(), learn_rate = numeric(), min_split_improvement = numeric(), ## stopping_rounds = integer())"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_h2o.html","id":"preprocessing","dir":"Reference","previous_headings":"","what":"Preprocessing","title":"Boosted trees via h2o — details_boost_tree_h2o","text":"engine require special encoding predictors. Categorical predictors can partitioned groups factor levels (e.g. {, c} vs {b, d}) splitting node. Dummy variables required model. Non-numeric predictors (.e., factors) internally converted numeric. classification context, non-numeric outcomes (.e., factors) also internally converted numeric.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_h2o.html","id":"interpreting-mtry","dir":"Reference","previous_headings":"","what":"Interpreting mtry","title":"Boosted trees via h2o — details_boost_tree_h2o","text":"mtry argument denotes number predictors randomly sampled split creating tree models. engines, \"xgboost\", \"xrf\", \"lightgbm\", interpret analogue mtry argument proportion predictors randomly sampled split rather count. settings, tuning preprocessors influence number predictors, parameterization quite helpful—interpreting mtry proportion means [0, 1] always valid range parameter, regardless input data. parsnip extensions accommodate parameterization using counts argument: logical indicating whether mtry interpreted number predictors randomly sampled split. TRUE indicates mtry interpreted sense count, FALSE indicates argument interpreted sense proportion. mtry main model argument boost_tree() rand_forest(), thus engine-specific interface. , regardless engine, counts defaults TRUE. engines support proportion interpretation (currently \"xgboost\" \"xrf\", via rules package, \"lightgbm\" via bonsai package) user can pass counts = FALSE argument set_engine() supply mtry values within [0, 1].","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_h2o.html","id":"initializing-h-o","dir":"Reference","previous_headings":"","what":"Initializing h2o","title":"Boosted trees via h2o — details_boost_tree_h2o","text":"use h2o engine tidymodels, please run h2o::h2o.init() first. default, connects R local h2o server. needs done every new R session. can also connect remote h2o server IP address, details see h2o::h2o.init(). can control number threads thread pool used h2o nthreads argument. default, uses CPUs host. different usual parallel processing mechanism tidymodels tuning, tidymodels parallelizes resamples, h2o parallelizes hyperparameter combinations given resample. h2o automatically shut local h2o instance started R R terminated. manually stop h2o server, run h2o::h2o.shutdown().","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_h2o.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Boosted trees via h2o — details_boost_tree_h2o","text":"Models fitted engine may require native serialization methods properly saved /passed R sessions. learn preparing fitted models serialization, see bundle package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_lightgbm.html","id":null,"dir":"Reference","previous_headings":"","what":"Boosted trees via lightgbm — details_boost_tree_lightgbm","title":"Boosted trees via lightgbm — details_boost_tree_lightgbm","text":"lightgbm::lgb.train() creates series decision trees forming ensemble. tree depends results previous trees. trees ensemble combined produce final prediction.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_lightgbm.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Boosted trees via lightgbm — details_boost_tree_lightgbm","text":"engine, multiple modes: regression classification","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_lightgbm.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Boosted trees via lightgbm — details_boost_tree_lightgbm","text":"model 6 tuning parameters: tree_depth: Tree Depth (type: integer, default: -1) trees: # Trees (type: integer, default: 100) learn_rate: Learning Rate (type: double, default: 0.1) mtry: # Randomly Selected Predictors (type: integer, default: see ) min_n: Minimal Node Size (type: integer, default: 20) loss_reduction: Minimum Loss Reduction (type: double, default: 0) mtry parameter gives number predictors randomly sampled split. default use predictors. Rather number, lightgbm::lgb.train()’s feature_fraction argument encodes mtry proportion predictors randomly sampled split. parsnip translates mtry, supplied number predictors, proportion hood. , user still supply argument mtry boost_tree(), sense number rather proportion; passing mtry lightgbm::lgb.train(), parsnip convert mtry value proportion. Note parsnip’s translation can overridden via counts argument, supplied set_engine(). default, counts set TRUE, supplying argument counts = FALSE allows user supply mtry proportion rather number.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_lightgbm.html","id":"translation-from-parsnip-to-the-original-package-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (regression)","title":"Boosted trees via lightgbm — details_boost_tree_lightgbm","text":"bonsai extension package required fit model.","code":"boost_tree( mtry = integer(), trees = integer(), tree_depth = integer(), learn_rate = numeric(), min_n = integer(), loss_reduction = numeric() ) %>% set_engine(\"lightgbm\") %>% set_mode(\"regression\") %>% translate() ## Boosted Tree Model Specification (regression) ## ## Main Arguments: ## mtry = integer() ## trees = integer() ## min_n = integer() ## tree_depth = integer() ## learn_rate = numeric() ## loss_reduction = numeric() ## ## Computational engine: lightgbm ## ## Model fit template: ## bonsai::train_lightgbm(x = missing_arg(), y = missing_arg(), ## feature_fraction_bynode = integer(), num_iterations = integer(), ## min_data_in_leaf = integer(), max_depth = integer(), learning_rate = numeric(), ## min_gain_to_split = numeric(), verbose = -1, num_threads = 0, ## seed = sample.int(10^5, 1), deterministic = TRUE)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_lightgbm.html","id":"translation-from-parsnip-to-the-original-package-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (classification)","title":"Boosted trees via lightgbm — details_boost_tree_lightgbm","text":"bonsai extension package required fit model. bonsai::train_lightgbm() wrapper around lightgbm::lgb.train() (functions) make easier run model.","code":"boost_tree( mtry = integer(), trees = integer(), tree_depth = integer(), learn_rate = numeric(), min_n = integer(), loss_reduction = numeric() ) %>% set_engine(\"lightgbm\") %>% set_mode(\"classification\") %>% translate() ## Boosted Tree Model Specification (classification) ## ## Main Arguments: ## mtry = integer() ## trees = integer() ## min_n = integer() ## tree_depth = integer() ## learn_rate = numeric() ## loss_reduction = numeric() ## ## Computational engine: lightgbm ## ## Model fit template: ## bonsai::train_lightgbm(x = missing_arg(), y = missing_arg(), ## feature_fraction_bynode = integer(), num_iterations = integer(), ## min_data_in_leaf = integer(), max_depth = integer(), learning_rate = numeric(), ## min_gain_to_split = numeric(), verbose = -1, num_threads = 0, ## seed = sample.int(10^5, 1), deterministic = TRUE)"},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_lightgbm.html","id":"preprocessing","dir":"Reference","previous_headings":"","what":"Preprocessing","title":"Boosted trees via lightgbm — details_boost_tree_lightgbm","text":"engine require special encoding predictors. Categorical predictors can partitioned groups factor levels (e.g. {, c} vs {b, d}) splitting node. Dummy variables required model. Non-numeric predictors (.e., factors) internally converted numeric. classification context, non-numeric outcomes (.e., factors) also internally converted numeric.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_lightgbm.html","id":"interpreting-mtry","dir":"Reference","previous_headings":"","what":"Interpreting mtry","title":"Boosted trees via lightgbm — details_boost_tree_lightgbm","text":"mtry argument denotes number predictors randomly sampled split creating tree models. engines, \"xgboost\", \"xrf\", \"lightgbm\", interpret analogue mtry argument proportion predictors randomly sampled split rather count. settings, tuning preprocessors influence number predictors, parameterization quite helpful—interpreting mtry proportion means [0, 1] always valid range parameter, regardless input data. parsnip extensions accommodate parameterization using counts argument: logical indicating whether mtry interpreted number predictors randomly sampled split. TRUE indicates mtry interpreted sense count, FALSE indicates argument interpreted sense proportion. mtry main model argument boost_tree() rand_forest(), thus engine-specific interface. , regardless engine, counts defaults TRUE. engines support proportion interpretation (currently \"xgboost\" \"xrf\", via rules package, \"lightgbm\" via bonsai package) user can pass counts = FALSE argument set_engine() supply mtry values within [0, 1].","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_lightgbm.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Boosted trees via lightgbm — details_boost_tree_lightgbm","text":"Models fitted engine may require native serialization methods properly saved /passed R sessions. learn preparing fitted models serialization, see bundle package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_lightgbm.html","id":"bagging","dir":"Reference","previous_headings":"","what":"Bagging","title":"Boosted trees via lightgbm — details_boost_tree_lightgbm","text":"sample_size argument translated bagging_fraction parameter param argument lgb.train. argument interpreted lightgbm proportion rather count, bonsai internally reparameterizes sample_size argument dials::sample_prop() tuning. effectively enable bagging, user also need set bagging_freq argument lightgbm. bagging_freq defaults 0, means bagging disabled, bagging_freq argument k means booster perform bagging every kth boosting iteration. Thus, default, sample_size argument ignored without setting argument manually. boosting libraries, like xgboost, analogous argument bagging_freq use k = 1 analogue bagging_fraction $(0, 1)$. bonsai thus automatically set bagging_freq = 1 set_engine(\"lightgbm\", ...) sample_size (.e. bagging_fraction) equal 1 bagging_freq value supplied. default can overridden setting bagging_freq argument set_engine() manually.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_lightgbm.html","id":"verbosity","dir":"Reference","previous_headings":"","what":"Verbosity","title":"Boosted trees via lightgbm — details_boost_tree_lightgbm","text":"bonsai quiets much logging output lightgbm::lgb.train() default. default settings, logged warnings errors still passed user. print logs training, set quiet = TRUE.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_lightgbm.html","id":"examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Boosted trees via lightgbm — details_boost_tree_lightgbm","text":"“Introduction bonsai” article contains examples boost_tree() \"lightgbm\" engine.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_lightgbm.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Boosted trees via lightgbm — details_boost_tree_lightgbm","text":"LightGBM: Highly Efficient Gradient Boosting Decision Tree Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_mboost.html","id":null,"dir":"Reference","previous_headings":"","what":"Boosted trees — details_boost_tree_mboost","title":"Boosted trees — details_boost_tree_mboost","text":"mboost::blackboost() fits series decision trees forming ensemble. tree depends results previous trees. trees ensemble combined produce final prediction.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_mboost.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Boosted trees — details_boost_tree_mboost","text":"engine, single mode: censored regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_mboost.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Boosted trees — details_boost_tree_mboost","text":"model 5 tuning parameters: mtry: # Randomly Selected Predictors (type: integer, default: see ) trees: # Trees (type: integer, default: 100L) tree_depth: Tree Depth (type: integer, default: 2L) min_n: Minimal Node Size (type: integer, default: 10L) loss_reduction: Minimum Loss Reduction (type: double, default: 0) mtry parameter related number predictors. default use predictors.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_mboost.html","id":"translation-from-parsnip-to-the-original-package-censored-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (censored regression)","title":"Boosted trees — details_boost_tree_mboost","text":"censored extension package required fit model. censored::blackboost_train() wrapper around mboost::blackboost() (functions) makes easier run model.","code":"library(censored) boost_tree() %>% set_engine(\"mboost\") %>% set_mode(\"censored regression\") %>% translate() ## Boosted Tree Model Specification (censored regression) ## ## Computational engine: mboost ## ## Model fit template: ## censored::blackboost_train(formula = missing_arg(), data = missing_arg(), ## weights = missing_arg(), family = mboost::CoxPH())"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_mboost.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Boosted trees — details_boost_tree_mboost","text":"engine require special encoding predictors. Categorical predictors can partitioned groups factor levels (e.g. {, c} vs {b, d}) splitting node. Dummy variables required model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_mboost.html","id":"other-details","dir":"Reference","previous_headings":"","what":"Other details","title":"Boosted trees — details_boost_tree_mboost","text":"Predictions type \"time\" predictions mean survival time.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_mboost.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Boosted trees — details_boost_tree_mboost","text":"Buehlmann P, Hothorn T. 2007. Boosting algorithms: regularization, prediction model fitting. Statistical Science, 22(4), 477–505. Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_spark.html","id":null,"dir":"Reference","previous_headings":"","what":"Boosted trees via Spark — details_boost_tree_spark","title":"Boosted trees via Spark — details_boost_tree_spark","text":"sparklyr::ml_gradient_boosted_trees() creates series decision trees forming ensemble. tree depends results previous trees. trees ensemble combined produce final prediction.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_spark.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Boosted trees via Spark — details_boost_tree_spark","text":"engine, multiple modes: classification regression. However, multiclass classification supported yet.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_spark.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Boosted trees via Spark — details_boost_tree_spark","text":"model 7 tuning parameters: tree_depth: Tree Depth (type: integer, default: 5L) trees: # Trees (type: integer, default: 20L) learn_rate: Learning Rate (type: double, default: 0.1) mtry: # Randomly Selected Predictors (type: integer, default: see ) min_n: Minimal Node Size (type: integer, default: 1L) loss_reduction: Minimum Loss Reduction (type: double, default: 0.0) sample_size: # Observations Sampled (type: integer, default: 1.0) mtry parameter related number predictors. default depends model mode. classification, square root number predictors used regression, one third predictors sampled.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_spark.html","id":"translation-from-parsnip-to-the-original-package-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (regression)","title":"Boosted trees via Spark — details_boost_tree_spark","text":"","code":"boost_tree( mtry = integer(), trees = integer(), min_n = integer(), tree_depth = integer(), learn_rate = numeric(), loss_reduction = numeric(), sample_size = numeric() ) %>% set_engine(\"spark\") %>% set_mode(\"regression\") %>% translate() ## Boosted Tree Model Specification (regression) ## ## Main Arguments: ## mtry = integer() ## trees = integer() ## min_n = integer() ## tree_depth = integer() ## learn_rate = numeric() ## loss_reduction = numeric() ## sample_size = numeric() ## ## Computational engine: spark ## ## Model fit template: ## sparklyr::ml_gradient_boosted_trees(x = missing_arg(), formula = missing_arg(), ## type = \"regression\", feature_subset_strategy = integer(), ## max_iter = integer(), min_instances_per_node = min_rows(integer(0), ## x), max_depth = integer(), step_size = numeric(), min_info_gain = numeric(), ## subsampling_rate = numeric(), seed = sample.int(10^5, 1))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_spark.html","id":"translation-from-parsnip-to-the-original-package-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (classification)","title":"Boosted trees via Spark — details_boost_tree_spark","text":"","code":"boost_tree( mtry = integer(), trees = integer(), min_n = integer(), tree_depth = integer(), learn_rate = numeric(), loss_reduction = numeric(), sample_size = numeric() ) %>% set_engine(\"spark\") %>% set_mode(\"classification\") %>% translate() ## Boosted Tree Model Specification (classification) ## ## Main Arguments: ## mtry = integer() ## trees = integer() ## min_n = integer() ## tree_depth = integer() ## learn_rate = numeric() ## loss_reduction = numeric() ## sample_size = numeric() ## ## Computational engine: spark ## ## Model fit template: ## sparklyr::ml_gradient_boosted_trees(x = missing_arg(), formula = missing_arg(), ## type = \"classification\", feature_subset_strategy = integer(), ## max_iter = integer(), min_instances_per_node = min_rows(integer(0), ## x), max_depth = integer(), step_size = numeric(), min_info_gain = numeric(), ## subsampling_rate = numeric(), seed = sample.int(10^5, 1))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_spark.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Boosted trees via Spark — details_boost_tree_spark","text":"engine require special encoding predictors. Categorical predictors can partitioned groups factor levels (e.g. {, c} vs {b, d}) splitting node. Dummy variables required model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_spark.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Boosted trees via Spark — details_boost_tree_spark","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights. Note , spark engines, case_weight argument value character string specify column numeric case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_spark.html","id":"other-details","dir":"Reference","previous_headings":"","what":"Other details","title":"Boosted trees via Spark — details_boost_tree_spark","text":"models created using \"spark\" engine, several things consider. formula interface via fit() available; using fit_xy() generate error. predictions always Spark table format. names documented without dots. equivalent factor columns Spark tables class predictions returned character columns. retain model object new R session (via save()), model$fit element parsnip object serialized via ml_save(object$fit) separately saved disk. new session, object can reloaded reattached parsnip object.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_spark.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Boosted trees via Spark — details_boost_tree_spark","text":"Luraschi, J, K Kuo, E Ruiz. 2019. Mastering Spark R. O’Reilly Media Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_xgboost.html","id":null,"dir":"Reference","previous_headings":"","what":"Boosted trees via xgboost — details_boost_tree_xgboost","title":"Boosted trees via xgboost — details_boost_tree_xgboost","text":"xgboost::xgb.train() creates series decision trees forming ensemble. tree depends results previous trees. trees ensemble combined produce final prediction.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_xgboost.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Boosted trees via xgboost — details_boost_tree_xgboost","text":"engine, multiple modes: classification regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_xgboost.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Boosted trees via xgboost — details_boost_tree_xgboost","text":"model 8 tuning parameters: tree_depth: Tree Depth (type: integer, default: 6L) trees: # Trees (type: integer, default: 15L) learn_rate: Learning Rate (type: double, default: 0.3) mtry: # Randomly Selected Predictors (type: integer, default: see ) min_n: Minimal Node Size (type: integer, default: 1L) loss_reduction: Minimum Loss Reduction (type: double, default: 0.0) sample_size: Proportion Observations Sampled (type: double, default: 1.0) stop_iter: # Iterations Stopping (type: integer, default: Inf) mtry, default value NULL translates using available columns.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_xgboost.html","id":"translation-from-parsnip-to-the-original-package-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (regression)","title":"Boosted trees via xgboost — details_boost_tree_xgboost","text":"","code":"boost_tree( mtry = integer(), trees = integer(), min_n = integer(), tree_depth = integer(), learn_rate = numeric(), loss_reduction = numeric(), sample_size = numeric(), stop_iter = integer() ) %>% set_engine(\"xgboost\") %>% set_mode(\"regression\") %>% translate() ## Boosted Tree Model Specification (regression) ## ## Main Arguments: ## mtry = integer() ## trees = integer() ## min_n = integer() ## tree_depth = integer() ## learn_rate = numeric() ## loss_reduction = numeric() ## sample_size = numeric() ## stop_iter = integer() ## ## Computational engine: xgboost ## ## Model fit template: ## parsnip::xgb_train(x = missing_arg(), y = missing_arg(), weights = missing_arg(), ## colsample_bynode = integer(), nrounds = integer(), min_child_weight = integer(), ## max_depth = integer(), eta = numeric(), gamma = numeric(), ## subsample = numeric(), early_stop = integer(), nthread = 1, ## verbose = 0)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_xgboost.html","id":"translation-from-parsnip-to-the-original-package-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (classification)","title":"Boosted trees via xgboost — details_boost_tree_xgboost","text":"xgb_train() wrapper around xgboost::xgb.train() (functions) makes easier run model.","code":"boost_tree( mtry = integer(), trees = integer(), min_n = integer(), tree_depth = integer(), learn_rate = numeric(), loss_reduction = numeric(), sample_size = numeric(), stop_iter = integer() ) %>% set_engine(\"xgboost\") %>% set_mode(\"classification\") %>% translate() ## Boosted Tree Model Specification (classification) ## ## Main Arguments: ## mtry = integer() ## trees = integer() ## min_n = integer() ## tree_depth = integer() ## learn_rate = numeric() ## loss_reduction = numeric() ## sample_size = numeric() ## stop_iter = integer() ## ## Computational engine: xgboost ## ## Model fit template: ## parsnip::xgb_train(x = missing_arg(), y = missing_arg(), weights = missing_arg(), ## colsample_bynode = integer(), nrounds = integer(), min_child_weight = integer(), ## max_depth = integer(), eta = numeric(), gamma = numeric(), ## subsample = numeric(), early_stop = integer(), nthread = 1, ## verbose = 0)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_xgboost.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Boosted trees via xgboost — details_boost_tree_xgboost","text":"xgboost means translate factor predictors grouped splits. Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit.model_spec(), parsnip convert factor columns indicators using one-hot encoding. classification, non-numeric outcomes (.e., factors) internally converted numeric. binary classification, event_level argument set_engine() can set either \"first\" \"second\" specify level used event. can helpful watchlist used monitor performance xgboost training process.","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_xgboost.html","id":"interfacing-with-the-params-argument","dir":"Reference","previous_headings":"","what":"Interfacing with the params argument","title":"Boosted trees via xgboost — details_boost_tree_xgboost","text":"xgboost function parsnip indirectly wraps, xgboost::xgb.train(), takes arguments via params list argument. supply engine-specific arguments documented xgboost::xgb.train() arguments passed via params, supply list elements directly named arguments set_engine() rather elements params. example, pass non-default evaluation metric like : …rather : parsnip route arguments needed. case arguments passed params via set_engine(), parsnip warn re-route arguments needed. Note, though, arguments passed params tuned.","code":"# good boost_tree() %>% set_engine(\"xgboost\", eval_metric = \"mae\") ## Boosted Tree Model Specification (unknown mode) ## ## Engine-Specific Arguments: ## eval_metric = mae ## ## Computational engine: xgboost # bad boost_tree() %>% set_engine(\"xgboost\", params = list(eval_metric = \"mae\")) ## Boosted Tree Model Specification (unknown mode) ## ## Engine-Specific Arguments: ## params = list(eval_metric = \"mae\") ## ## Computational engine: xgboost"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_xgboost.html","id":"sparse-matrices","dir":"Reference","previous_headings":"","what":"Sparse matrices","title":"Boosted trees via xgboost — details_boost_tree_xgboost","text":"xgboost requires data sparse format. predictor data already format, use fit_xy.model_spec() pass model function. Otherwise, parsnip converts data format.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_xgboost.html","id":"parallel-processing","dir":"Reference","previous_headings":"","what":"Parallel processing","title":"Boosted trees via xgboost — details_boost_tree_xgboost","text":"default, model trained without parallel processing. can change passing nthread parameter set_engine(). However, unwise combine external parallel processing using package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_xgboost.html","id":"interpreting-mtry","dir":"Reference","previous_headings":"","what":"Interpreting mtry","title":"Boosted trees via xgboost — details_boost_tree_xgboost","text":"mtry argument denotes number predictors randomly sampled split creating tree models. engines, \"xgboost\", \"xrf\", \"lightgbm\", interpret analogue mtry argument proportion predictors randomly sampled split rather count. settings, tuning preprocessors influence number predictors, parameterization quite helpful—interpreting mtry proportion means [0, 1] always valid range parameter, regardless input data. parsnip extensions accommodate parameterization using counts argument: logical indicating whether mtry interpreted number predictors randomly sampled split. TRUE indicates mtry interpreted sense count, FALSE indicates argument interpreted sense proportion. mtry main model argument boost_tree() rand_forest(), thus engine-specific interface. , regardless engine, counts defaults TRUE. engines support proportion interpretation (currently \"xgboost\" \"xrf\", via rules package, \"lightgbm\" via bonsai package) user can pass counts = FALSE argument set_engine() supply mtry values within [0, 1].","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_xgboost.html","id":"early-stopping","dir":"Reference","previous_headings":"","what":"Early stopping","title":"Boosted trees via xgboost — details_boost_tree_xgboost","text":"stop_iter() argument allows model prematurely stop training objective function improve within early_stop iterations. best way use feature conjunction internal validation set. , pass validation parameter xgb_train() via parsnip set_engine() function. proportion training set reserved measuring performance (stopping early). model specification early_stop >= trees, early_stop converted trees - 1 warning issued. Note , since validation argument provides alternative interface watchlist, watchlist argument guarded parsnip ignored (warning) passed.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_xgboost.html","id":"objective-function","dir":"Reference","previous_headings":"","what":"Objective function","title":"Boosted trees via xgboost — details_boost_tree_xgboost","text":"parsnip chooses objective function based characteristics outcome. use different loss, pass objective argument set_engine() directly.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_xgboost.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Boosted trees via xgboost — details_boost_tree_xgboost","text":"model object contains data required make predictions. saving model purpose prediction, size saved object might substantially reduced using functions butcher package. Models fitted engine may require native serialization methods properly saved /passed R sessions. learn preparing fitted models serialization, see bundle package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_xgboost.html","id":"examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Boosted trees via xgboost — details_boost_tree_xgboost","text":"“Fitting Predicting parsnip” article contains examples boost_tree() \"xgboost\" engine.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_xgboost.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Boosted trees via xgboost — details_boost_tree_xgboost","text":"XGBoost: Scalable Tree Boosting System Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_cubist_rules_Cubist.html","id":null,"dir":"Reference","previous_headings":"","what":"Cubist rule-based regression models — details_cubist_rules_Cubist","title":"Cubist rule-based regression models — details_cubist_rules_Cubist","text":"Cubist::cubist() fits model derives simple feature rules tree ensemble uses creates regression models within rule. rules::cubist_fit() wrapper around function.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_cubist_rules_Cubist.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Cubist rule-based regression models — details_cubist_rules_Cubist","text":"engine, single mode: regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_cubist_rules_Cubist.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Cubist rule-based regression models — details_cubist_rules_Cubist","text":"model 3 tuning parameters: committees: # Committees (type: integer, default: 1L) neighbors: # Nearest Neighbors (type: integer, default: 0L) max_rules: Max. Rules (type: integer, default: NA_integer)","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_cubist_rules_Cubist.html","id":"translation-from-parsnip-to-the-underlying-model-call-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the underlying model call (regression)","title":"Cubist rule-based regression models — details_cubist_rules_Cubist","text":"rules extension package required fit model.","code":"library(rules) cubist_rules( committees = integer(1), neighbors = integer(1), max_rules = integer(1) ) %>% set_engine(\"Cubist\") %>% set_mode(\"regression\") %>% translate() ## Cubist Model Specification (regression) ## ## Main Arguments: ## committees = integer(1) ## neighbors = integer(1) ## max_rules = integer(1) ## ## Computational engine: Cubist ## ## Model fit template: ## rules::cubist_fit(x = missing_arg(), y = missing_arg(), weights = missing_arg(), ## committees = integer(1), neighbors = integer(1), max_rules = integer(1))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_cubist_rules_Cubist.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Cubist rule-based regression models — details_cubist_rules_Cubist","text":"engine require special encoding predictors. Categorical predictors can partitioned groups factor levels (e.g. {, c} vs {b, d}) splitting node. Dummy variables required model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_cubist_rules_Cubist.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Cubist rule-based regression models — details_cubist_rules_Cubist","text":"Quinlan R (1992). “Learning Continuous Classes.” Proceedings 5th Australian Joint Conference Artificial Intelligence, pp. 343-348. Quinlan R (1993).”Combining Instance-Based Model-Based Learning.” Proceedings Tenth International Conference Machine Learning, pp. 236-243. Kuhn M Johnson K (2013). Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_C5.0.html","id":null,"dir":"Reference","previous_headings":"","what":"Decision trees via C5.0 — details_decision_tree_C5.0","title":"Decision trees via C5.0 — details_decision_tree_C5.0","text":"C50::C5.0() fits model set /statements creates tree-based structure.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_C5.0.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Decision trees via C5.0 — details_decision_tree_C5.0","text":"engine, single mode: classification","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_C5.0.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Decision trees via C5.0 — details_decision_tree_C5.0","text":"model 1 tuning parameters: min_n: Minimal Node Size (type: integer, default: 2L)","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_C5.0.html","id":"translation-from-parsnip-to-the-original-package-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (classification)","title":"Decision trees via C5.0 — details_decision_tree_C5.0","text":"C5.0_train() wrapper around C50::C5.0() makes easier run model.","code":"decision_tree(min_n = integer()) %>% set_engine(\"C5.0\") %>% set_mode(\"classification\") %>% translate() ## Decision Tree Model Specification (classification) ## ## Main Arguments: ## min_n = integer() ## ## Computational engine: C5.0 ## ## Model fit template: ## parsnip::C5.0_train(x = missing_arg(), y = missing_arg(), weights = missing_arg(), ## minCases = integer(), trials = 1)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_C5.0.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Decision trees via C5.0 — details_decision_tree_C5.0","text":"engine require special encoding predictors. Categorical predictors can partitioned groups factor levels (e.g. {, c} vs {b, d}) splitting node. Dummy variables required model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_C5.0.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Decision trees via C5.0 — details_decision_tree_C5.0","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_C5.0.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Decision trees via C5.0 — details_decision_tree_C5.0","text":"model object contains data required make predictions. saving model purpose prediction, size saved object might substantially reduced using functions butcher package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_C5.0.html","id":"examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Decision trees via C5.0 — details_decision_tree_C5.0","text":"“Fitting Predicting parsnip” article contains examples decision_tree() \"C5.0\" engine.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_C5.0.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Decision trees via C5.0 — details_decision_tree_C5.0","text":"Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_partykit.html","id":null,"dir":"Reference","previous_headings":"","what":"Decision trees via partykit — details_decision_tree_partykit","title":"Decision trees via partykit — details_decision_tree_partykit","text":"partykit::ctree() fits model set /statements creates tree-based structure using hypothesis testing methods.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_partykit.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Decision trees via partykit — details_decision_tree_partykit","text":"engine, multiple modes: censored regression, regression, classification","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_partykit.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Decision trees via partykit — details_decision_tree_partykit","text":"model 2 tuning parameters: tree_depth: Tree Depth (type: integer, default: see ) min_n: Minimal Node Size (type: integer, default: 20L) tree_depth parameter defaults 0 means restrictions applied tree depth. engine-specific parameter model : mtry: number predictors, selected random, evaluated splitting. default use predictors.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_partykit.html","id":"translation-from-parsnip-to-the-original-package-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (regression)","title":"Decision trees via partykit — details_decision_tree_partykit","text":"bonsai extension package required fit model.","code":"library(bonsai) decision_tree(tree_depth = integer(1), min_n = integer(1)) %>% set_engine(\"partykit\") %>% set_mode(\"regression\") %>% translate() ## Decision Tree Model Specification (regression) ## ## Main Arguments: ## tree_depth = integer(1) ## min_n = integer(1) ## ## Computational engine: partykit ## ## Model fit template: ## parsnip::ctree_train(formula = missing_arg(), data = missing_arg(), ## weights = missing_arg(), maxdepth = integer(1), minsplit = min_rows(0L, ## data))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_partykit.html","id":"translation-from-parsnip-to-the-original-package-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (classification)","title":"Decision trees via partykit — details_decision_tree_partykit","text":"bonsai extension package required fit model. parsnip::ctree_train() wrapper around partykit::ctree() (functions) makes easier run model.","code":"library(bonsai) decision_tree(tree_depth = integer(1), min_n = integer(1)) %>% set_engine(\"partykit\") %>% set_mode(\"classification\") %>% translate() ## Decision Tree Model Specification (classification) ## ## Main Arguments: ## tree_depth = integer(1) ## min_n = integer(1) ## ## Computational engine: partykit ## ## Model fit template: ## parsnip::ctree_train(formula = missing_arg(), data = missing_arg(), ## weights = missing_arg(), maxdepth = integer(1), minsplit = min_rows(0L, ## data))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_partykit.html","id":"translation-from-parsnip-to-the-original-package-censored-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (censored regression)","title":"Decision trees via partykit — details_decision_tree_partykit","text":"censored extension package required fit model. censored::cond_inference_surv_ctree() wrapper around partykit::ctree() (functions) makes easier run model.","code":"library(censored) decision_tree(tree_depth = integer(1), min_n = integer(1)) %>% set_engine(\"partykit\") %>% set_mode(\"censored regression\") %>% translate() ## Decision Tree Model Specification (censored regression) ## ## Main Arguments: ## tree_depth = integer(1) ## min_n = integer(1) ## ## Computational engine: partykit ## ## Model fit template: ## parsnip::ctree_train(formula = missing_arg(), data = missing_arg(), ## weights = missing_arg(), maxdepth = integer(1), minsplit = min_rows(0L, ## data))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_partykit.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Decision trees via partykit — details_decision_tree_partykit","text":"engine require special encoding predictors. Categorical predictors can partitioned groups factor levels (e.g. {, c} vs {b, d}) splitting node. Dummy variables required model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_partykit.html","id":"other-details","dir":"Reference","previous_headings":"","what":"Other details","title":"Decision trees via partykit — details_decision_tree_partykit","text":"Predictions type \"time\" predictions median survival time.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_partykit.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Decision trees via partykit — details_decision_tree_partykit","text":"partykit: Modular Toolkit Recursive Partytioning R Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_rpart.html","id":null,"dir":"Reference","previous_headings":"","what":"Decision trees via CART — details_decision_tree_rpart","title":"Decision trees via CART — details_decision_tree_rpart","text":"rpart::rpart() fits model set /statements creates tree-based structure.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_rpart.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Decision trees via CART — details_decision_tree_rpart","text":"engine, multiple modes: classification, regression, censored regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_rpart.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Decision trees via CART — details_decision_tree_rpart","text":"model 3 tuning parameters: tree_depth: Tree Depth (type: integer, default: 30L) min_n: Minimal Node Size (type: integer, default: 2L) cost_complexity: Cost-Complexity Parameter (type: double, default: 0.01)","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_rpart.html","id":"translation-from-parsnip-to-the-original-package-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (classification)","title":"Decision trees via CART — details_decision_tree_rpart","text":"","code":"decision_tree(tree_depth = integer(1), min_n = integer(1), cost_complexity = double(1)) %>% set_engine(\"rpart\") %>% set_mode(\"classification\") %>% translate() ## Decision Tree Model Specification (classification) ## ## Main Arguments: ## cost_complexity = double(1) ## tree_depth = integer(1) ## min_n = integer(1) ## ## Computational engine: rpart ## ## Model fit template: ## rpart::rpart(formula = missing_arg(), data = missing_arg(), weights = missing_arg(), ## cp = double(1), maxdepth = integer(1), minsplit = min_rows(0L, ## data))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_rpart.html","id":"translation-from-parsnip-to-the-original-package-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (regression)","title":"Decision trees via CART — details_decision_tree_rpart","text":"","code":"decision_tree(tree_depth = integer(1), min_n = integer(1), cost_complexity = double(1)) %>% set_engine(\"rpart\") %>% set_mode(\"regression\") %>% translate() ## Decision Tree Model Specification (regression) ## ## Main Arguments: ## cost_complexity = double(1) ## tree_depth = integer(1) ## min_n = integer(1) ## ## Computational engine: rpart ## ## Model fit template: ## rpart::rpart(formula = missing_arg(), data = missing_arg(), weights = missing_arg(), ## cp = double(1), maxdepth = integer(1), minsplit = min_rows(0L, ## data))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_rpart.html","id":"translation-from-parsnip-to-the-original-package-censored-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (censored regression)","title":"Decision trees via CART — details_decision_tree_rpart","text":"censored extension package required fit model.","code":"library(censored) decision_tree( tree_depth = integer(1), min_n = integer(1), cost_complexity = double(1) ) %>% set_engine(\"rpart\") %>% set_mode(\"censored regression\") %>% translate() ## Decision Tree Model Specification (censored regression) ## ## Main Arguments: ## cost_complexity = double(1) ## tree_depth = integer(1) ## min_n = integer(1) ## ## Computational engine: rpart ## ## Model fit template: ## pec::pecRpart(formula = missing_arg(), data = missing_arg(), ## weights = missing_arg(), cp = double(1), maxdepth = integer(1), ## minsplit = min_rows(0L, data))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_rpart.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Decision trees via CART — details_decision_tree_rpart","text":"engine require special encoding predictors. Categorical predictors can partitioned groups factor levels (e.g. {, c} vs {b, d}) splitting node. Dummy variables required model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_rpart.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Decision trees via CART — details_decision_tree_rpart","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_rpart.html","id":"other-details","dir":"Reference","previous_headings":"","what":"Other details","title":"Decision trees via CART — details_decision_tree_rpart","text":"Predictions type \"time\" predictions mean survival time.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_rpart.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Decision trees via CART — details_decision_tree_rpart","text":"model object contains data required make predictions. saving model purpose prediction, size saved object might substantially reduced using functions butcher package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_rpart.html","id":"examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Decision trees via CART — details_decision_tree_rpart","text":"“Fitting Predicting parsnip” article contains examples decision_tree() \"rpart\" engine.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_rpart.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Decision trees via CART — details_decision_tree_rpart","text":"Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_spark.html","id":null,"dir":"Reference","previous_headings":"","what":"Decision trees via Spark — details_decision_tree_spark","title":"Decision trees via Spark — details_decision_tree_spark","text":"sparklyr::ml_decision_tree() fits model set /statements creates tree-based structure.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_spark.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Decision trees via Spark — details_decision_tree_spark","text":"engine, multiple modes: classification regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_spark.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Decision trees via Spark — details_decision_tree_spark","text":"model 2 tuning parameters: tree_depth: Tree Depth (type: integer, default: 5L) min_n: Minimal Node Size (type: integer, default: 1L)","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_spark.html","id":"translation-from-parsnip-to-the-original-package-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (classification)","title":"Decision trees via Spark — details_decision_tree_spark","text":"","code":"decision_tree(tree_depth = integer(1), min_n = integer(1)) %>% set_engine(\"spark\") %>% set_mode(\"classification\") %>% translate() ## Decision Tree Model Specification (classification) ## ## Main Arguments: ## tree_depth = integer(1) ## min_n = integer(1) ## ## Computational engine: spark ## ## Model fit template: ## sparklyr::ml_decision_tree_classifier(x = missing_arg(), formula = missing_arg(), ## max_depth = integer(1), min_instances_per_node = min_rows(0L, ## x), seed = sample.int(10^5, 1))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_spark.html","id":"translation-from-parsnip-to-the-original-package-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (regression)","title":"Decision trees via Spark — details_decision_tree_spark","text":"","code":"decision_tree(tree_depth = integer(1), min_n = integer(1)) %>% set_engine(\"spark\") %>% set_mode(\"regression\") %>% translate() ## Decision Tree Model Specification (regression) ## ## Main Arguments: ## tree_depth = integer(1) ## min_n = integer(1) ## ## Computational engine: spark ## ## Model fit template: ## sparklyr::ml_decision_tree_regressor(x = missing_arg(), formula = missing_arg(), ## max_depth = integer(1), min_instances_per_node = min_rows(0L, ## x), seed = sample.int(10^5, 1))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_spark.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Decision trees via Spark — details_decision_tree_spark","text":"engine require special encoding predictors. Categorical predictors can partitioned groups factor levels (e.g. {, c} vs {b, d}) splitting node. Dummy variables required model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_spark.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Decision trees via Spark — details_decision_tree_spark","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights. Note , spark engines, case_weight argument value character string specify column numeric case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_spark.html","id":"other-details","dir":"Reference","previous_headings":"","what":"Other details","title":"Decision trees via Spark — details_decision_tree_spark","text":"models created using \"spark\" engine, several things consider. formula interface via fit() available; using fit_xy() generate error. predictions always Spark table format. names documented without dots. equivalent factor columns Spark tables class predictions returned character columns. retain model object new R session (via save()), model$fit element parsnip object serialized via ml_save(object$fit) separately saved disk. new session, object can reloaded reattached parsnip object.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_spark.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Decision trees via Spark — details_decision_tree_spark","text":"Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_flexible_earth.html","id":null,"dir":"Reference","previous_headings":"","what":"Flexible discriminant analysis via earth — details_discrim_flexible_earth","title":"Flexible discriminant analysis via earth — details_discrim_flexible_earth","text":"mda::fda() (conjunction earth::earth() can fit nonlinear discriminant analysis model uses nonlinear features created using multivariate adaptive regression splines (MARS). function can fit classification models.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_flexible_earth.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Flexible discriminant analysis via earth — details_discrim_flexible_earth","text":"engine, single mode: classification","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_flexible_earth.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Flexible discriminant analysis via earth — details_discrim_flexible_earth","text":"model 3 tuning parameter: num_terms: # Model Terms (type: integer, default: (see )) prod_degree: Degree Interaction (type: integer, default: 1L) prune_method: Pruning Method (type: character, default: ‘backward’) default value num_terms depends number columns (p): min(200, max(20, 2 * p)) + 1. Note num_terms = 1 intercept-model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_flexible_earth.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Flexible discriminant analysis via earth — details_discrim_flexible_earth","text":"discrim extension package required fit model.","code":"library(discrim) discrim_flexible( num_terms = integer(0), prod_degree = integer(0), prune_method = character(0) ) %>% translate() ## Flexible Discriminant Model Specification (classification) ## ## Main Arguments: ## num_terms = integer(0) ## prod_degree = integer(0) ## prune_method = character(0) ## ## Computational engine: earth ## ## Model fit template: ## mda::fda(formula = missing_arg(), data = missing_arg(), weights = missing_arg(), ## nprune = integer(0), degree = integer(0), pmethod = character(0), ## method = earth::earth)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_flexible_earth.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Flexible discriminant analysis via earth — details_discrim_flexible_earth","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_flexible_earth.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Flexible discriminant analysis via earth — details_discrim_flexible_earth","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_flexible_earth.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Flexible discriminant analysis via earth — details_discrim_flexible_earth","text":"Hastie, Tibshirani & Buja (1994) Flexible Discriminant Analysis Optimal Scoring, Journal American Statistical Association, 89:428, 1255-1270 Friedman (1991). Multivariate Adaptive Regression Splines. Annals Statistics, 19(1), 1-67.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_linear_MASS.html","id":null,"dir":"Reference","previous_headings":"","what":"Linear discriminant analysis via MASS — details_discrim_linear_MASS","title":"Linear discriminant analysis via MASS — details_discrim_linear_MASS","text":"MASS::lda() fits model estimates multivariate distribution predictors separately data class (Gaussian common covariance matrix). Bayes' theorem used compute probability class, given predictor values.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_linear_MASS.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Linear discriminant analysis via MASS — details_discrim_linear_MASS","text":"engine, single mode: classification","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_linear_MASS.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Linear discriminant analysis via MASS — details_discrim_linear_MASS","text":"engine tuning parameters.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_linear_MASS.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Linear discriminant analysis via MASS — details_discrim_linear_MASS","text":"discrim extension package required fit model.","code":"library(discrim) discrim_linear() %>% set_engine(\"MASS\") %>% translate() ## Linear Discriminant Model Specification (classification) ## ## Computational engine: MASS ## ## Model fit template: ## MASS::lda(formula = missing_arg(), data = missing_arg())"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_linear_MASS.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Linear discriminant analysis via MASS — details_discrim_linear_MASS","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Variance calculations used computations zero-variance predictors (.e., single unique value) eliminated fitting model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_linear_MASS.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Linear discriminant analysis via MASS — details_discrim_linear_MASS","text":"underlying model implementation allow case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_linear_MASS.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Linear discriminant analysis via MASS — details_discrim_linear_MASS","text":"Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_linear_mda.html","id":null,"dir":"Reference","previous_headings":"","what":"Linear discriminant analysis via flexible discriminant analysis — details_discrim_linear_mda","title":"Linear discriminant analysis via flexible discriminant analysis — details_discrim_linear_mda","text":"mda::fda() (conjunction mda::gen.ridge() can fit linear discriminant analysis model penalizes predictor coefficients quadratic penalty (.e., ridge weight decay approach).","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_linear_mda.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Linear discriminant analysis via flexible discriminant analysis — details_discrim_linear_mda","text":"engine, single mode: classification","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_linear_mda.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Linear discriminant analysis via flexible discriminant analysis — details_discrim_linear_mda","text":"model 1 tuning parameter: penalty: Amount Regularization (type: double, default: 1.0)","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_linear_mda.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Linear discriminant analysis via flexible discriminant analysis — details_discrim_linear_mda","text":"discrim extension package required fit model.","code":"library(discrim) discrim_linear(penalty = numeric(0)) %>% set_engine(\"mda\") %>% translate() ## Linear Discriminant Model Specification (classification) ## ## Main Arguments: ## penalty = numeric(0) ## ## Computational engine: mda ## ## Model fit template: ## mda::fda(formula = missing_arg(), data = missing_arg(), weights = missing_arg(), ## lambda = numeric(0), method = mda::gen.ridge, keep.fitted = FALSE)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_linear_mda.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Linear discriminant analysis via flexible discriminant analysis — details_discrim_linear_mda","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Variance calculations used computations zero-variance predictors (.e., single unique value) eliminated fitting model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_linear_mda.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Linear discriminant analysis via flexible discriminant analysis — details_discrim_linear_mda","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_linear_mda.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Linear discriminant analysis via flexible discriminant analysis — details_discrim_linear_mda","text":"Hastie, Tibshirani & Buja (1994) Flexible Discriminant Analysis Optimal Scoring, Journal American Statistical Association, 89:428, 1255-1270","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_linear_sda.html","id":null,"dir":"Reference","previous_headings":"","what":"Linear discriminant analysis via James-Stein-type shrinkage estimation — details_discrim_linear_sda","title":"Linear discriminant analysis via James-Stein-type shrinkage estimation — details_discrim_linear_sda","text":"sda::sda() can fit linear discriminant analysis model can fit models classical discriminant analysis diagonal discriminant analysis.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_linear_sda.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Linear discriminant analysis via James-Stein-type shrinkage estimation — details_discrim_linear_sda","text":"engine, single mode: classification","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_linear_sda.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Linear discriminant analysis via James-Stein-type shrinkage estimation — details_discrim_linear_sda","text":"engine tuning parameter arguments discrim_linear(). However, engine-specific parameters can set optimized calling set_engine(): lambda: shrinkage parameters correlation matrix. maps parameter dials::shrinkage_correlation(). lambda.var: shrinkage parameters predictor variances. maps dials::shrinkage_variance(). lambda.freqs: shrinkage parameters class frequencies. maps dials::shrinkage_frequencies(). diagonal: logical make model covariance diagonal . maps dials::diagonal_covariance().","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_linear_sda.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Linear discriminant analysis via James-Stein-type shrinkage estimation — details_discrim_linear_sda","text":"discrim extension package required fit model.","code":"library(discrim) discrim_linear() %>% set_engine(\"sda\") %>% translate() ## Linear Discriminant Model Specification (classification) ## ## Computational engine: sda ## ## Model fit template: ## sda::sda(Xtrain = missing_arg(), L = missing_arg(), verbose = FALSE)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_linear_sda.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Linear discriminant analysis via James-Stein-type shrinkage estimation — details_discrim_linear_sda","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Variance calculations used computations zero-variance predictors (.e., single unique value) eliminated fitting model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_linear_sda.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Linear discriminant analysis via James-Stein-type shrinkage estimation — details_discrim_linear_sda","text":"underlying model implementation allow case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_linear_sda.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Linear discriminant analysis via James-Stein-type shrinkage estimation — details_discrim_linear_sda","text":"Ahdesmaki, ., K. Strimmer. 2010. Feature selection omics prediction problems using cat scores false non-discovery rate control. Ann. Appl. Stat. 4: 503-519. Preprint.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_linear_sparsediscrim.html","id":null,"dir":"Reference","previous_headings":"","what":"Linear discriminant analysis via regularization — details_discrim_linear_sparsediscrim","title":"Linear discriminant analysis via regularization — details_discrim_linear_sparsediscrim","text":"Functions sparsediscrim package fit different types linear discriminant analysis model regularize estimates (like mean covariance).","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_linear_sparsediscrim.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Linear discriminant analysis via regularization — details_discrim_linear_sparsediscrim","text":"engine, single mode: classification","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_linear_sparsediscrim.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Linear discriminant analysis via regularization — details_discrim_linear_sparsediscrim","text":"model 1 tuning parameter: regularization_method: Regularization Method (type: character, default: ‘diagonal’) possible values parameter, functions execute, : \"diagonal\": sparsediscrim::lda_diag() \"min_distance\": sparsediscrim::lda_emp_bayes_eigen() \"shrink_mean\": sparsediscrim::lda_shrink_mean() \"shrink_cov\": sparsediscrim::lda_shrink_cov()","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_linear_sparsediscrim.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Linear discriminant analysis via regularization — details_discrim_linear_sparsediscrim","text":"discrim extension package required fit model.","code":"library(discrim) discrim_linear(regularization_method = character(0)) %>% set_engine(\"sparsediscrim\") %>% translate() ## Linear Discriminant Model Specification (classification) ## ## Main Arguments: ## regularization_method = character(0) ## ## Computational engine: sparsediscrim ## ## Model fit template: ## discrim::fit_regularized_linear(x = missing_arg(), y = missing_arg(), ## method = character(0))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_linear_sparsediscrim.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Linear discriminant analysis via regularization — details_discrim_linear_sparsediscrim","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Variance calculations used computations zero-variance predictors (.e., single unique value) eliminated fitting model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_linear_sparsediscrim.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Linear discriminant analysis via regularization — details_discrim_linear_sparsediscrim","text":"underlying model implementation allow case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_linear_sparsediscrim.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Linear discriminant analysis via regularization — details_discrim_linear_sparsediscrim","text":"lda_diag(): Dudoit, Fridlyand Speed (2002) Comparison Discrimination Methods Classification Tumors Using Gene Expression Data, Journal American Statistical Association, 97:457, 77-87. lda_shrink_mean(): Tong, Chen, Zhao, Improved mean estimation application diagonal discriminant analysis, Bioinformatics, Volume 28, Issue 4, 15 February 2012, Pages 531-537. lda_shrink_cov(): Pang, Tong Zhao (2009), Shrinkage-based Diagonal Discriminant Analysis Applications High-Dimensional Data. Biometrics, 65, 1021-1029. lda_emp_bayes_eigen(): Srivistava Kubokawa (2007), Comparison Discrimination Methods High Dimensional Data, Journal Japan Statistical Society, 37:1, 123-134.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_quad_MASS.html","id":null,"dir":"Reference","previous_headings":"","what":"Quadratic discriminant analysis via MASS — details_discrim_quad_MASS","title":"Quadratic discriminant analysis via MASS — details_discrim_quad_MASS","text":"MASS::qda() fits model estimates multivariate distribution predictors separately data class (Gaussian separate covariance matrices). Bayes' theorem used compute probability class, given predictor values.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_quad_MASS.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Quadratic discriminant analysis via MASS — details_discrim_quad_MASS","text":"engine, single mode: classification","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_quad_MASS.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Quadratic discriminant analysis via MASS — details_discrim_quad_MASS","text":"engine tuning parameters.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_quad_MASS.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Quadratic discriminant analysis via MASS — details_discrim_quad_MASS","text":"discrim extension package required fit model.","code":"library(discrim) discrim_quad() %>% set_engine(\"MASS\") %>% translate() ## Quadratic Discriminant Model Specification (classification) ## ## Computational engine: MASS ## ## Model fit template: ## MASS::qda(formula = missing_arg(), data = missing_arg())"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_quad_MASS.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Quadratic discriminant analysis via MASS — details_discrim_quad_MASS","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Variance calculations used computations within outcome class. reason, zero-variance predictors (.e., single unique value) within class eliminated fitting model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_quad_MASS.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Quadratic discriminant analysis via MASS — details_discrim_quad_MASS","text":"underlying model implementation allow case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_quad_MASS.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Quadratic discriminant analysis via MASS — details_discrim_quad_MASS","text":"Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_quad_sparsediscrim.html","id":null,"dir":"Reference","previous_headings":"","what":"Quadratic discriminant analysis via regularization — details_discrim_quad_sparsediscrim","title":"Quadratic discriminant analysis via regularization — details_discrim_quad_sparsediscrim","text":"Functions sparsediscrim package fit different types quadratic discriminant analysis model regularize estimates (like mean covariance).","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_quad_sparsediscrim.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Quadratic discriminant analysis via regularization — details_discrim_quad_sparsediscrim","text":"engine, single mode: classification","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_quad_sparsediscrim.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Quadratic discriminant analysis via regularization — details_discrim_quad_sparsediscrim","text":"model 1 tuning parameter: regularization_method: Regularization Method (type: character, default: ‘diagonal’) possible values parameter, functions execute, : \"diagonal\": sparsediscrim::qda_diag() \"shrink_mean\": sparsediscrim::qda_shrink_mean() \"shrink_cov\": sparsediscrim::qda_shrink_cov()","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_quad_sparsediscrim.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Quadratic discriminant analysis via regularization — details_discrim_quad_sparsediscrim","text":"discrim extension package required fit model.","code":"library(discrim) discrim_quad(regularization_method = character(0)) %>% set_engine(\"sparsediscrim\") %>% translate() ## Quadratic Discriminant Model Specification (classification) ## ## Main Arguments: ## regularization_method = character(0) ## ## Computational engine: sparsediscrim ## ## Model fit template: ## discrim::fit_regularized_quad(x = missing_arg(), y = missing_arg(), ## method = character(0))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_quad_sparsediscrim.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Quadratic discriminant analysis via regularization — details_discrim_quad_sparsediscrim","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Variance calculations used computations within outcome class. reason, zero-variance predictors (.e., single unique value) within class eliminated fitting model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_quad_sparsediscrim.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Quadratic discriminant analysis via regularization — details_discrim_quad_sparsediscrim","text":"underlying model implementation allow case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_quad_sparsediscrim.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Quadratic discriminant analysis via regularization — details_discrim_quad_sparsediscrim","text":"qda_diag(): Dudoit, Fridlyand Speed (2002) Comparison Discrimination Methods Classification Tumors Using Gene Expression Data, Journal American Statistical Association, 97:457, 77-87. qda_shrink_mean(): Tong, Chen, Zhao, Improved mean estimation application diagonal discriminant analysis, Bioinformatics, Volume 28, Issue 4, 15 February 2012, Pages 531-537. qda_shrink_cov(): Pang, Tong Zhao (2009), Shrinkage-based Diagonal Discriminant Analysis Applications High-Dimensional Data. Biometrics, 65, 1021-1029.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_regularized_klaR.html","id":null,"dir":"Reference","previous_headings":"","what":"Regularized discriminant analysis via klaR — details_discrim_regularized_klaR","title":"Regularized discriminant analysis via klaR — details_discrim_regularized_klaR","text":"klaR::rda() fits model estimates multivariate distribution predictors separately data class. structure model can LDA, QDA, amalgam two. Bayes' theorem used compute probability class, given predictor values.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_regularized_klaR.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Regularized discriminant analysis via klaR — details_discrim_regularized_klaR","text":"engine, single mode: classification","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_regularized_klaR.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Regularized discriminant analysis via klaR — details_discrim_regularized_klaR","text":"model 2 tuning parameter: frac_common_cov: Fraction Common Covariance Matrix (type: double, default: (see )) frac_identity: Fraction Identity Matrix (type: double, default: (see )) special cases RDA model: frac_identity = 0 frac_common_cov = 1 linear discriminant analysis (LDA) model. frac_identity = 0 frac_common_cov = 0 quadratic discriminant analysis (QDA) model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_regularized_klaR.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Regularized discriminant analysis via klaR — details_discrim_regularized_klaR","text":"discrim extension package required fit model.","code":"library(discrim) discrim_regularized(frac_identity = numeric(0), frac_common_cov = numeric(0)) %>% set_engine(\"klaR\") %>% translate() ## Regularized Discriminant Model Specification (classification) ## ## Main Arguments: ## frac_common_cov = numeric(0) ## frac_identity = numeric(0) ## ## Computational engine: klaR ## ## Model fit template: ## klaR::rda(formula = missing_arg(), data = missing_arg(), lambda = numeric(0), ## gamma = numeric(0))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_regularized_klaR.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Regularized discriminant analysis via klaR — details_discrim_regularized_klaR","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Variance calculations used computations within outcome class. reason, zero-variance predictors (.e., single unique value) within class eliminated fitting model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_regularized_klaR.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Regularized discriminant analysis via klaR — details_discrim_regularized_klaR","text":"underlying model implementation allow case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_regularized_klaR.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Regularized discriminant analysis via klaR — details_discrim_regularized_klaR","text":"Friedman, J (1989). Regularized Discriminant Analysis. Journal American Statistical Association, 84, 165-175. Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_gen_additive_mod_mgcv.html","id":null,"dir":"Reference","previous_headings":"","what":"Generalized additive models via mgcv — details_gen_additive_mod_mgcv","title":"Generalized additive models via mgcv — details_gen_additive_mod_mgcv","text":"mgcv::gam() fits generalized linear model additive smoother terms continuous predictors.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_gen_additive_mod_mgcv.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Generalized additive models via mgcv — details_gen_additive_mod_mgcv","text":"engine, multiple modes: regression classification","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_gen_additive_mod_mgcv.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Generalized additive models via mgcv — details_gen_additive_mod_mgcv","text":"model 2 tuning parameters: select_features: Select Features? (type: logical, default: FALSE) adjust_deg_free: Smoothness Adjustment (type: double, default: 1.0)","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_gen_additive_mod_mgcv.html","id":"translation-from-parsnip-to-the-original-package-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (regression)","title":"Generalized additive models via mgcv — details_gen_additive_mod_mgcv","text":"","code":"gen_additive_mod(adjust_deg_free = numeric(1), select_features = logical(1)) %>% set_engine(\"mgcv\") %>% set_mode(\"regression\") %>% translate() ## GAM Model Specification (regression) ## ## Main Arguments: ## select_features = logical(1) ## adjust_deg_free = numeric(1) ## ## Computational engine: mgcv ## ## Model fit template: ## mgcv::gam(formula = missing_arg(), data = missing_arg(), weights = missing_arg(), ## select = logical(1), gamma = numeric(1))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_gen_additive_mod_mgcv.html","id":"translation-from-parsnip-to-the-original-package-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (classification)","title":"Generalized additive models via mgcv — details_gen_additive_mod_mgcv","text":"","code":"gen_additive_mod(adjust_deg_free = numeric(1), select_features = logical(1)) %>% set_engine(\"mgcv\") %>% set_mode(\"classification\") %>% translate() ## GAM Model Specification (classification) ## ## Main Arguments: ## select_features = logical(1) ## adjust_deg_free = numeric(1) ## ## Computational engine: mgcv ## ## Model fit template: ## mgcv::gam(formula = missing_arg(), data = missing_arg(), weights = missing_arg(), ## select = logical(1), gamma = numeric(1), family = stats::binomial(link = \"logit\"))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_gen_additive_mod_mgcv.html","id":"model-fitting","dir":"Reference","previous_headings":"","what":"Model fitting","title":"Generalized additive models via mgcv — details_gen_additive_mod_mgcv","text":"model used model formula smooth terms can specified. example: smoothness terms need manually specified (e.g., using s(x, df = 10)) formula. Tuning can accomplished using adjust_deg_free parameter. using workflow, pass model formula add_model()’s formula argument, simplified preprocessing formula elsewhere. learn differences formulas, see ?model_formula.","code":"library(mgcv) gen_additive_mod() %>% set_engine(\"mgcv\") %>% set_mode(\"regression\") %>% fit(mpg ~ wt + gear + cyl + s(disp, k = 10), data = mtcars) ## parsnip model object ## ## ## Family: gaussian ## Link function: identity ## ## Formula: ## mpg ~ wt + gear + cyl + s(disp, k = 10) ## ## Estimated degrees of freedom: ## 7.52 total = 11.52 ## ## GCV score: 4.225228 spec <- gen_additive_mod() %>% set_engine(\"mgcv\") %>% set_mode(\"regression\") workflow() %>% add_model(spec, formula = mpg ~ wt + gear + cyl + s(disp, k = 10)) %>% add_formula(mpg ~ wt + gear + cyl + disp) %>% fit(data = mtcars) %>% extract_fit_engine() ## ## Family: gaussian ## Link function: identity ## ## Formula: ## mpg ~ wt + gear + cyl + s(disp, k = 10) ## ## Estimated degrees of freedom: ## 7.52 total = 11.52 ## ## GCV score: 4.225228"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_gen_additive_mod_mgcv.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Generalized additive models via mgcv — details_gen_additive_mod_mgcv","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_gen_additive_mod_mgcv.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Generalized additive models via mgcv — details_gen_additive_mod_mgcv","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_gen_additive_mod_mgcv.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Generalized additive models via mgcv — details_gen_additive_mod_mgcv","text":"model object contains data required make predictions. saving model purpose prediction, size saved object might substantially reduced using functions butcher package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_gen_additive_mod_mgcv.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Generalized additive models via mgcv — details_gen_additive_mod_mgcv","text":"Ross, W. 2021. Generalized Additive Models R: Free, Interactive Course using mgcv Wood, S. 2017. Generalized Additive Models: Introduction R. Chapman Hall/CRC.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_brulee.html","id":null,"dir":"Reference","previous_headings":"","what":"Linear regression via brulee — details_linear_reg_brulee","title":"Linear regression via brulee — details_linear_reg_brulee","text":"brulee::brulee_linear_reg() uses ordinary least squares fit models numeric outcomes.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_brulee.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Linear regression via brulee — details_linear_reg_brulee","text":"engine, single mode: regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_brulee.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Linear regression via brulee — details_linear_reg_brulee","text":"model 2 tuning parameter: penalty: Amount Regularization (type: double, default: 0.001) mixture: Proportion Lasso Penalty (type: double, default: 0.0) use L1 penalty (.k.. lasso penalty) force parameters strictly zero (packages glmnet). zeroing parameters specific feature optimization method used packages. engine arguments interest: optimizer(): optimization method. See brulee::brulee_linear_reg(). epochs(): integer number passes training set. lean_rate(): number used accelerate gradient decsent process. momentum(): number used use historical gradient infomration optimization (optimizer = \"SGD\" ). batch_size(): integer number training set points batch. stop_iter(): non-negative integer many iterations improvement stopping. (default: 5L).","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_brulee.html","id":"translation-from-parsnip-to-the-original-package-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (regression)","title":"Linear regression via brulee — details_linear_reg_brulee","text":"","code":"linear_reg(penalty = double(1)) %>% set_engine(\"brulee\") %>% translate() ## Linear Regression Model Specification (regression) ## ## Main Arguments: ## penalty = double(1) ## ## Computational engine: brulee ## ## Model fit template: ## brulee::brulee_linear_reg(x = missing_arg(), y = missing_arg(), ## penalty = double(1))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_brulee.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Linear regression via brulee — details_linear_reg_brulee","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Predictors scale. One way achieve center scale predictor mean zero variance one.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_brulee.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Linear regression via brulee — details_linear_reg_brulee","text":"underlying model implementation allow case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_brulee.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Linear regression via brulee — details_linear_reg_brulee","text":"Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_gee.html","id":null,"dir":"Reference","previous_headings":"","what":"Linear regression via generalized estimating equations (GEE) — details_linear_reg_gee","title":"Linear regression via generalized estimating equations (GEE) — details_linear_reg_gee","text":"gee::gee() uses generalized least squares fit different types models errors independent.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_gee.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Linear regression via generalized estimating equations (GEE) — details_linear_reg_gee","text":"engine, single mode: regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_gee.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Linear regression via generalized estimating equations (GEE) — details_linear_reg_gee","text":"model formal tuning parameters. may beneficial determine appropriate correlation structure use, typically affect predicted value model. effect inferential results parameter covariance values.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_gee.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Linear regression via generalized estimating equations (GEE) — details_linear_reg_gee","text":"multilevelmod extension package required fit model. multilevelmod::gee_fit() wrapper model around gee::gee().","code":"library(multilevelmod) linear_reg() %>% set_engine(\"gee\") %>% set_mode(\"regression\") %>% translate() ## Linear Regression Model Specification (regression) ## ## Computational engine: gee ## ## Model fit template: ## multilevelmod::gee_fit(formula = missing_arg(), data = missing_arg(), ## family = gaussian)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_gee.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Linear regression via generalized estimating equations (GEE) — details_linear_reg_gee","text":"specific preprocessing needs. However, helpful keep clustering/subject identifier column factor character (instead making dummy variables). See examples next section.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_gee.html","id":"other-details","dir":"Reference","previous_headings":"","what":"Other details","title":"Linear regression via generalized estimating equations (GEE) — details_linear_reg_gee","text":"model accept case weights. gee:gee() gee:geepack() specify id/cluster variable using argument id requires vector. parsnip doesn’t work way enable model fit using artificial function id_var() used formula. , original package, call look like: parsnip, suggest using formula method fitting: using tidymodels infrastructure, may better use workflow. case, can add appropriate columns using add_variables() supply GEE formula adding model: gee::gee() function always prints warnings output even silent = TRUE. parsnip \"gee\" engine, contrast, silences console output coming gee::gee(), even silent = FALSE. Also, issues gee() function, supplementary call glm() needed get rank QR decomposition objects predict() can used.","code":"gee(breaks ~ tension, id = wool, data = warpbreaks, corstr = \"exchangeable\") library(tidymodels) linear_reg() %>% set_engine(\"gee\", corstr = \"exchangeable\") %>% fit(breaks ~ tension + id_var(wool), data = warpbreaks) library(tidymodels) gee_spec <- linear_reg() %>% set_engine(\"gee\", corstr = \"exchangeable\") gee_wflow <- workflow() %>% # The data are included as-is using: add_variables(outcomes = breaks, predictors = c(tension, wool)) %>% add_model(gee_spec, formula = breaks ~ tension + id_var(wool)) fit(gee_wflow, data = warpbreaks)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_gee.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Linear regression via generalized estimating equations (GEE) — details_linear_reg_gee","text":"underlying model implementation allow case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_gee.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Linear regression via generalized estimating equations (GEE) — details_linear_reg_gee","text":"Liang, K.Y. Zeger, S.L. (1986) Longitudinal data analysis using generalized linear models. Biometrika, 73 13–22. Zeger, S.L. Liang, K.Y. (1986) Longitudinal data analysis discrete continuous outcomes. Biometrics, 42 121–130.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_glm.html","id":null,"dir":"Reference","previous_headings":"","what":"Linear regression via glm — details_linear_reg_glm","title":"Linear regression via glm — details_linear_reg_glm","text":"stats::glm() fits generalized linear model numeric outcomes. linear combination predictors used model numeric outcome via link function.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_glm.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Linear regression via glm — details_linear_reg_glm","text":"engine, single mode: regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_glm.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Linear regression via glm — details_linear_reg_glm","text":"engine tuning parameters can set family parameter (/link) engine argument (see ).","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_glm.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Linear regression via glm — details_linear_reg_glm","text":"use non-default family /link, pass argument set_engine():","code":"linear_reg() %>% set_engine(\"glm\") %>% translate() ## Linear Regression Model Specification (regression) ## ## Computational engine: glm ## ## Model fit template: ## stats::glm(formula = missing_arg(), data = missing_arg(), weights = missing_arg(), ## family = stats::gaussian) linear_reg() %>% set_engine(\"glm\", family = stats::poisson(link = \"sqrt\")) %>% translate() ## Linear Regression Model Specification (regression) ## ## Engine-Specific Arguments: ## family = stats::poisson(link = \"sqrt\") ## ## Computational engine: glm ## ## Model fit template: ## stats::glm(formula = missing_arg(), data = missing_arg(), weights = missing_arg(), ## family = stats::poisson(link = \"sqrt\"))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_glm.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Linear regression via glm — details_linear_reg_glm","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_glm.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Linear regression via glm — details_linear_reg_glm","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights. However, documentation stats::glm() assumes specific type case weights used:“Non-NULL weights can used indicate different observations different dispersions (values weights inversely proportional dispersions); equivalently, elements weights positive integers w_i, response y_i mean w_i unit-weight observations. binomial GLM prior weights used give number trials response proportion successes: rarely used Poisson GLM.”","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_glm.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Linear regression via glm — details_linear_reg_glm","text":"model object contains data required make predictions. saving model purpose prediction, size saved object might substantially reduced using functions butcher package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_glm.html","id":"examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Linear regression via glm — details_linear_reg_glm","text":"“Fitting Predicting parsnip” article contains examples linear_reg() \"glm\" engine.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_glm.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Linear regression via glm — details_linear_reg_glm","text":"Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_glmer.html","id":null,"dir":"Reference","previous_headings":"","what":"Linear regression via generalized mixed models — details_linear_reg_glmer","title":"Linear regression via generalized mixed models — details_linear_reg_glmer","text":"\"glmer\" engine estimates fixed random effect regression parameters using maximum likelihood (restricted maximum likelihood) estimation.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_glmer.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Linear regression via generalized mixed models — details_linear_reg_glmer","text":"engine, single mode: regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_glmer.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Linear regression via generalized mixed models — details_linear_reg_glmer","text":"model tuning parameters.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_glmer.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Linear regression via generalized mixed models — details_linear_reg_glmer","text":"multilevelmod extension package required fit model. Note using engine linear link function result warning:","code":"library(multilevelmod) linear_reg() %>% set_engine(\"glmer\") %>% set_mode(\"regression\") %>% translate() ## Linear Regression Model Specification (regression) ## ## Computational engine: glmer ## ## Model fit template: ## lme4::glmer(formula = missing_arg(), data = missing_arg(), weights = missing_arg(), ## family = stats::gaussian) calling glmer() with family=gaussian (identity link) as a shortcut to lmer() is deprecated; please call lmer() directly"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_glmer.html","id":"predicting-new-samples","dir":"Reference","previous_headings":"","what":"Predicting new samples","title":"Linear regression via generalized mixed models — details_linear_reg_glmer","text":"model can use subject-specific coefficient estimates make predictions (.e. partial pooling). example, equation shows linear predictor (\\eta) random intercept: $$ denotes ith independent experimental unit (e.g. subject). model seen subject , can use subject’s data adjust population intercept specific subjects results. happens data predicted subject used model fit? case, package uses population parameter estimates prediction: Depending covariates model, might effect making prediction new samples. population parameters “best estimate” subject included model fit. tidymodels framework deliberately constrains predictions new data use training set data (prevent information leakage).","code":"\\eta_{i} = (\\beta_0 + b_{0i}) + \\beta_1x_{i1} \\hat{\\eta}_{i'} = \\hat{\\beta}_0+ \\hat{\\beta}x_{i'1}"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_glmer.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Linear regression via generalized mixed models — details_linear_reg_glmer","text":"specific preprocessing needs. However, helpful keep clustering/subject identifier column factor character (instead making dummy variables). See examples next section.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_glmer.html","id":"other-details","dir":"Reference","previous_headings":"","what":"Other details","title":"Linear regression via generalized mixed models — details_linear_reg_glmer","text":"model can accept case weights. parsnip, suggest using formula method fitting: using tidymodels infrastructure, may better use workflow. case, can add appropriate columns using add_variables() supply typical formula adding model:","code":"library(tidymodels) data(\"riesby\") linear_reg() %>% set_engine(\"glmer\") %>% fit(depr_score ~ week + (1|subject), data = riesby) library(tidymodels) glmer_spec <- linear_reg() %>% set_engine(\"glmer\") glmer_wflow <- workflow() %>% # The data are included as-is using: add_variables(outcomes = depr_score, predictors = c(week, subject)) %>% add_model(glmer_spec, formula = depr_score ~ week + (1|subject)) fit(glmer_wflow, data = riesby)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_glmer.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Linear regression via generalized mixed models — details_linear_reg_glmer","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_glmer.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Linear regression via generalized mixed models — details_linear_reg_glmer","text":"J Pinheiro, D Bates. 2000. Mixed-effects models S S-PLUS. Springer, New York, NY West, K, Band Welch, Galecki. 2014. Linear Mixed Models: Practical Guide Using Statistical Software. CRC Press. Thorson, J, Minto, C. 2015, Mixed effects: unifying framework statistical modelling fisheries biology. ICES Journal Marine Science, Volume 72, Issue 5, Pages 1245–1256. Harrison, XA, Donaldson, L, Correa-Cano, , Evans, J, Fisher, DN, Goodwin, CED, Robinson, BS, Hodgson, DJ, Inger, R. 2018. brief introduction mixed effects modelling multi-model inference ecology. PeerJ 6:e4794. DeBruine LM, Barr DJ. Understanding Mixed-Effects Models Data Simulation. 2021. Advances Methods Practices Psychological Science.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_glmnet.html","id":null,"dir":"Reference","previous_headings":"","what":"Linear regression via glmnet — details_linear_reg_glmnet","title":"Linear regression via glmnet — details_linear_reg_glmnet","text":"glmnet::glmnet() uses regularized least squares fit models numeric outcomes.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_glmnet.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Linear regression via glmnet — details_linear_reg_glmnet","text":"engine, single mode: regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_glmnet.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Linear regression via glmnet — details_linear_reg_glmnet","text":"model 2 tuning parameters: penalty: Amount Regularization (type: double, default: see ) mixture: Proportion Lasso Penalty (type: double, default: 1.0) value mixture = 1 corresponds pure lasso model, mixture = 0 indicates ridge regression. penalty parameter default requires single numeric value. details , glmnet model general, see glmnet-details.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_glmnet.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Linear regression via glmnet — details_linear_reg_glmnet","text":"","code":"linear_reg(penalty = double(1), mixture = double(1)) %>% set_engine(\"glmnet\") %>% translate() ## Linear Regression Model Specification (regression) ## ## Main Arguments: ## penalty = 0 ## mixture = double(1) ## ## Computational engine: glmnet ## ## Model fit template: ## glmnet::glmnet(x = missing_arg(), y = missing_arg(), weights = missing_arg(), ## alpha = double(1), family = \"gaussian\")"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_glmnet.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Linear regression via glmnet — details_linear_reg_glmnet","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Predictors scale. One way achieve center scale predictor mean zero variance one. default, glmnet::glmnet() uses argument standardize = TRUE center scale data.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_glmnet.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Linear regression via glmnet — details_linear_reg_glmnet","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_glmnet.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Linear regression via glmnet — details_linear_reg_glmnet","text":"model object contains data required make predictions. saving model purpose prediction, size saved object might substantially reduced using functions butcher package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_glmnet.html","id":"examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Linear regression via glmnet — details_linear_reg_glmnet","text":"“Fitting Predicting parsnip” article contains examples linear_reg() \"glmnet\" engine.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_glmnet.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Linear regression via glmnet — details_linear_reg_glmnet","text":"Hastie, T, R Tibshirani, M Wainwright. 2015. Statistical Learning Sparsity. CRC Press. Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_gls.html","id":null,"dir":"Reference","previous_headings":"","what":"Linear regression via generalized least squares — details_linear_reg_gls","title":"Linear regression via generalized least squares — details_linear_reg_gls","text":"\"gls\" engine estimates linear regression models rows data independent.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_gls.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Linear regression via generalized least squares — details_linear_reg_gls","text":"engine, single mode: regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_gls.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Linear regression via generalized least squares — details_linear_reg_gls","text":"model tuning parameters.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_gls.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Linear regression via generalized least squares — details_linear_reg_gls","text":"multilevelmod extension package required fit model.","code":"library(multilevelmod) linear_reg() %>% set_engine(\"gls\") %>% set_mode(\"regression\") %>% translate() ## Linear Regression Model Specification (regression) ## ## Computational engine: gls ## ## Model fit template: ## nlme::gls(formula = missing_arg(), data = missing_arg())"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_gls.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Linear regression via generalized least squares — details_linear_reg_gls","text":"specific preprocessing needs. However, helpful keep clustering/subject identifier column factor character (instead making dummy variables). See examples next section.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_gls.html","id":"other-details","dir":"Reference","previous_headings":"","what":"Other details","title":"Linear regression via generalized least squares — details_linear_reg_gls","text":"model can accept case weights. parsnip, suggest using fixed effects formula method fitting, details correlation structure passed set_engine() since irregular (required) argument: using tidymodels infrastructure, may better use workflow. case, can add appropriate columns using add_variables() supply typical formula adding model:","code":"library(tidymodels) # load nlme to be able to use the `cor*()` functions library(nlme) data(\"riesby\") linear_reg() %>% set_engine(\"gls\", correlation = corCompSymm(form = ~ 1 | subject)) %>% fit(depr_score ~ week, data = riesby) ## parsnip model object ## ## Generalized least squares fit by REML ## Model: depr_score ~ week ## Data: data ## Log-restricted-likelihood: -765.0148 ## ## Coefficients: ## (Intercept) week ## -4.953439 -2.119678 ## ## Correlation Structure: Compound symmetry ## Formula: ~1 | subject ## Parameter estimate(s): ## Rho ## 0.6820145 ## Degrees of freedom: 250 total; 248 residual ## Residual standard error: 6.868785 library(tidymodels) gls_spec <- linear_reg() %>% set_engine(\"gls\", correlation = corCompSymm(form = ~ 1 | subject)) gls_wflow <- workflow() %>% # The data are included as-is using: add_variables(outcomes = depr_score, predictors = c(week, subject)) %>% add_model(gls_spec, formula = depr_score ~ week) fit(gls_wflow, data = riesby)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_gls.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Linear regression via generalized least squares — details_linear_reg_gls","text":"underlying model implementation allow case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_gls.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Linear regression via generalized least squares — details_linear_reg_gls","text":"J Pinheiro, D Bates. 2000. Mixed-effects models S S-PLUS. Springer, New York, NY","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_h2o.html","id":null,"dir":"Reference","previous_headings":"","what":"Linear regression via h2o — details_linear_reg_h2o","title":"Linear regression via h2o — details_linear_reg_h2o","text":"model uses regularized least squares fit models numeric outcomes.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_h2o.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Linear regression via h2o — details_linear_reg_h2o","text":"engine, single mode: regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_h2o.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Linear regression via h2o — details_linear_reg_h2o","text":"model 2 tuning parameters: mixture: Proportion Lasso Penalty (type: double, default: see ) penalty: Amount Regularization (type: double, default: see ) default, given fixed penalty, h2o::h2o.glm() uses heuristic approach select optimal value penalty based training data. Setting engine parameter lambda_search TRUE enables efficient version grid search, see details https://docs.h2o.ai/h2o/latest-stable/h2o-docs/data-science/algo-params/lambda_search.html. choice mixture depends engine parameter solver, automatically chosen given training data specification model parameters. solver set 'L-BFGS', mixture defaults 0 (ridge regression) 0.5 otherwise.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_h2o.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Linear regression via h2o — details_linear_reg_h2o","text":"agua::h2o_train_glm() linear_reg() wrapper around h2o::h2o.glm() family = \"gaussian\".","code":"linear_reg(penalty = 1, mixture = 0.5) %>% set_engine(\"h2o\") %>% translate() ## Linear Regression Model Specification (regression) ## ## Main Arguments: ## penalty = 1 ## mixture = 0.5 ## ## Computational engine: h2o ## ## Model fit template: ## agua::h2o_train_glm(x = missing_arg(), y = missing_arg(), weights = missing_arg(), ## validation_frame = missing_arg(), lambda = 1, alpha = 0.5, ## family = \"gaussian\")"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_h2o.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Linear regression via h2o — details_linear_reg_h2o","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Predictors scale. One way achieve center scale predictor mean zero variance one. default, h2o::h2o.glm() uses argument standardize = TRUE center scale data.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_h2o.html","id":"initializing-h-o","dir":"Reference","previous_headings":"","what":"Initializing h2o","title":"Linear regression via h2o — details_linear_reg_h2o","text":"use h2o engine tidymodels, please run h2o::h2o.init() first. default, connects R local h2o server. needs done every new R session. can also connect remote h2o server IP address, details see h2o::h2o.init(). can control number threads thread pool used h2o nthreads argument. default, uses CPUs host. different usual parallel processing mechanism tidymodels tuning, tidymodels parallelizes resamples, h2o parallelizes hyperparameter combinations given resample. h2o automatically shut local h2o instance started R R terminated. manually stop h2o server, run h2o::h2o.shutdown().","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_h2o.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Linear regression via h2o — details_linear_reg_h2o","text":"Models fitted engine may require native serialization methods properly saved /passed R sessions. learn preparing fitted models serialization, see bundle package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_keras.html","id":null,"dir":"Reference","previous_headings":"","what":"Linear regression via keras/tensorflow — details_linear_reg_keras","title":"Linear regression via keras/tensorflow — details_linear_reg_keras","text":"model uses regularized least squares fit models numeric outcomes.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_keras.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Linear regression via keras/tensorflow — details_linear_reg_keras","text":"engine, single mode: regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_keras.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Linear regression via keras/tensorflow — details_linear_reg_keras","text":"model one tuning parameter: penalty: Amount Regularization (type: double, default: 0.0) penalty, amount regularization L2 penalty (.e., ridge weight decay).","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_keras.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Linear regression via keras/tensorflow — details_linear_reg_keras","text":"keras_mlp() parsnip wrapper around keras code neural networks. model fits linear regression network single hidden unit.","code":"linear_reg(penalty = double(1)) %>% set_engine(\"keras\") %>% translate() ## Linear Regression Model Specification (regression) ## ## Main Arguments: ## penalty = double(1) ## ## Computational engine: keras ## ## Model fit template: ## parsnip::keras_mlp(x = missing_arg(), y = missing_arg(), penalty = double(1), ## hidden_units = 1, act = \"linear\")"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_keras.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Linear regression via keras/tensorflow — details_linear_reg_keras","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Predictors scale. One way achieve center scale predictor mean zero variance one.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_keras.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Linear regression via keras/tensorflow — details_linear_reg_keras","text":"underlying model implementation allow case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_keras.html","id":"examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Linear regression via keras/tensorflow — details_linear_reg_keras","text":"“Fitting Predicting parsnip” article contains examples linear_reg() \"keras\" engine.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_keras.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Linear regression via keras/tensorflow — details_linear_reg_keras","text":"Hoerl, ., & Kennard, R. (2000). Ridge Regression: Biased Estimation Nonorthogonal Problems. Technometrics, 42(1), 80-86.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_lm.html","id":null,"dir":"Reference","previous_headings":"","what":"Linear regression via lm — details_linear_reg_lm","title":"Linear regression via lm — details_linear_reg_lm","text":"stats::lm() uses ordinary least squares fit models numeric outcomes.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_lm.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Linear regression via lm — details_linear_reg_lm","text":"engine, single mode: regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_lm.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Linear regression via lm — details_linear_reg_lm","text":"engine tuning parameters.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_lm.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Linear regression via lm — details_linear_reg_lm","text":"","code":"linear_reg() %>% set_engine(\"lm\") %>% translate() ## Linear Regression Model Specification (regression) ## ## Computational engine: lm ## ## Model fit template: ## stats::lm(formula = missing_arg(), data = missing_arg(), weights = missing_arg())"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_lm.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Linear regression via lm — details_linear_reg_lm","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_lm.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Linear regression via lm — details_linear_reg_lm","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights. However, documentation stats::lm() assumes specific type case weights used: “Non-NULL weights can used indicate different observations different variances (values weights inversely proportional variances); equivalently, elements weights positive integers w_i, response y_i mean w_i unit-weight observations (including case w_i observations equal y_i data summarized). However, latter case, notice within-group variation used. Therefore, sigma estimate residual degrees freedom may suboptimal; case replication weights, even wrong. Hence, standard errors analysis variance tables treated care” (emphasis added) Depending application, degrees freedown model (statistics) might incorrect.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_lm.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Linear regression via lm — details_linear_reg_lm","text":"model object contains data required make predictions. saving model purpose prediction, size saved object might substantially reduced using functions butcher package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_lm.html","id":"examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Linear regression via lm — details_linear_reg_lm","text":"“Fitting Predicting parsnip” article contains examples linear_reg() \"lm\" engine.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_lm.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Linear regression via lm — details_linear_reg_lm","text":"Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_lme.html","id":null,"dir":"Reference","previous_headings":"","what":"Linear regression via mixed models — details_linear_reg_lme","title":"Linear regression via mixed models — details_linear_reg_lme","text":"\"lme\" engine estimates fixed random effect regression parameters using maximum likelihood (restricted maximum likelihood) estimation.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_lme.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Linear regression via mixed models — details_linear_reg_lme","text":"engine, single mode: regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_lme.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Linear regression via mixed models — details_linear_reg_lme","text":"model tuning parameters.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_lme.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Linear regression via mixed models — details_linear_reg_lme","text":"multilevelmod extension package required fit model.","code":"library(multilevelmod) linear_reg() %>% set_engine(\"lme\") %>% set_mode(\"regression\") %>% translate() ## Linear Regression Model Specification (regression) ## ## Computational engine: lme ## ## Model fit template: ## nlme::lme(fixed = missing_arg(), data = missing_arg())"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_lme.html","id":"predicting-new-samples","dir":"Reference","previous_headings":"","what":"Predicting new samples","title":"Linear regression via mixed models — details_linear_reg_lme","text":"model can use subject-specific coefficient estimates make predictions (.e. partial pooling). example, equation shows linear predictor (\\eta) random intercept: $$ denotes ith independent experimental unit (e.g. subject). model seen subject , can use subject’s data adjust population intercept specific subjects results. happens data predicted subject used model fit? case, package uses population parameter estimates prediction: Depending covariates model, might effect making prediction new samples. population parameters “best estimate” subject included model fit. tidymodels framework deliberately constrains predictions new data use training set data (prevent information leakage).","code":"\\eta_{i} = (\\beta_0 + b_{0i}) + \\beta_1x_{i1} \\hat{\\eta}_{i'} = \\hat{\\beta}_0+ \\hat{\\beta}x_{i'1}"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_lme.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Linear regression via mixed models — details_linear_reg_lme","text":"specific preprocessing needs. However, helpful keep clustering/subject identifier column factor character (instead making dummy variables). See examples next section.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_lme.html","id":"other-details","dir":"Reference","previous_headings":"","what":"Other details","title":"Linear regression via mixed models — details_linear_reg_lme","text":"model can accept case weights. parsnip, suggest using fixed effects formula method fitting, random effects formula passed set_engine() since irregular (required) argument: using tidymodels infrastructure, may better use workflow. case, can add appropriate columns using add_variables() supply typical formula adding model:","code":"library(tidymodels) data(\"riesby\") linear_reg() %>% set_engine(\"lme\", random = ~ 1|subject) %>% fit(depr_score ~ week, data = riesby) library(tidymodels) lme_spec <- linear_reg() %>% set_engine(\"lme\", random = ~ 1|subject) lme_wflow <- workflow() %>% # The data are included as-is using: add_variables(outcomes = depr_score, predictors = c(week, subject)) %>% add_model(lme_spec, formula = depr_score ~ week) fit(lme_wflow, data = riesby)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_lme.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Linear regression via mixed models — details_linear_reg_lme","text":"underlying model implementation allow case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_lme.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Linear regression via mixed models — details_linear_reg_lme","text":"J Pinheiro, D Bates. 2000. Mixed-effects models S S-PLUS. Springer, New York, NY West, K, Band Welch, Galecki. 2014. Linear Mixed Models: Practical Guide Using Statistical Software. CRC Press. Thorson, J, Minto, C. 2015, Mixed effects: unifying framework statistical modelling fisheries biology. ICES Journal Marine Science, Volume 72, Issue 5, Pages 1245–1256. Harrison, XA, Donaldson, L, Correa-Cano, , Evans, J, Fisher, DN, Goodwin, CED, Robinson, BS, Hodgson, DJ, Inger, R. 2018. brief introduction mixed effects modelling multi-model inference ecology. PeerJ 6:e4794. DeBruine LM, Barr DJ. Understanding Mixed-Effects Models Data Simulation. 2021. Advances Methods Practices Psychological Science.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_lmer.html","id":null,"dir":"Reference","previous_headings":"","what":"Linear regression via mixed models — details_linear_reg_lmer","title":"Linear regression via mixed models — details_linear_reg_lmer","text":"\"lmer\" engine estimates fixed random effect regression parameters using maximum likelihood (restricted maximum likelihood) estimation.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_lmer.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Linear regression via mixed models — details_linear_reg_lmer","text":"engine, single mode: regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_lmer.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Linear regression via mixed models — details_linear_reg_lmer","text":"model tuning parameters.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_lmer.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Linear regression via mixed models — details_linear_reg_lmer","text":"multilevelmod extension package required fit model.","code":"library(multilevelmod) linear_reg() %>% set_engine(\"lmer\") %>% set_mode(\"regression\") %>% translate() ## Linear Regression Model Specification (regression) ## ## Computational engine: lmer ## ## Model fit template: ## lme4::lmer(formula = missing_arg(), data = missing_arg(), weights = missing_arg())"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_lmer.html","id":"predicting-new-samples","dir":"Reference","previous_headings":"","what":"Predicting new samples","title":"Linear regression via mixed models — details_linear_reg_lmer","text":"model can use subject-specific coefficient estimates make predictions (.e. partial pooling). example, equation shows linear predictor (\\eta) random intercept: $$ denotes ith independent experimental unit (e.g. subject). model seen subject , can use subject’s data adjust population intercept specific subjects results. happens data predicted subject used model fit? case, package uses population parameter estimates prediction: Depending covariates model, might effect making prediction new samples. population parameters “best estimate” subject included model fit. tidymodels framework deliberately constrains predictions new data use training set data (prevent information leakage).","code":"\\eta_{i} = (\\beta_0 + b_{0i}) + \\beta_1x_{i1} \\hat{\\eta}_{i'} = \\hat{\\beta}_0+ \\hat{\\beta}x_{i'1}"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_lmer.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Linear regression via mixed models — details_linear_reg_lmer","text":"specific preprocessing needs. However, helpful keep clustering/subject identifier column factor character (instead making dummy variables). See examples next section.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_lmer.html","id":"other-details","dir":"Reference","previous_headings":"","what":"Other details","title":"Linear regression via mixed models — details_linear_reg_lmer","text":"model can accept case weights. parsnip, suggest using formula method fitting: using tidymodels infrastructure, may better use workflow. case, can add appropriate columns using add_variables() supply typical formula adding model:","code":"library(tidymodels) data(\"riesby\") linear_reg() %>% set_engine(\"lmer\") %>% fit(depr_score ~ week + (1|subject), data = riesby) library(tidymodels) lmer_spec <- linear_reg() %>% set_engine(\"lmer\") lmer_wflow <- workflow() %>% # The data are included as-is using: add_variables(outcomes = depr_score, predictors = c(week, subject)) %>% add_model(lmer_spec, formula = depr_score ~ week + (1|subject)) fit(lmer_wflow, data = riesby)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_lmer.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Linear regression via mixed models — details_linear_reg_lmer","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_lmer.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Linear regression via mixed models — details_linear_reg_lmer","text":"J Pinheiro, D Bates. 2000. Mixed-effects models S S-PLUS. Springer, New York, NY West, K, Band Welch, Galecki. 2014. Linear Mixed Models: Practical Guide Using Statistical Software. CRC Press. Thorson, J, Minto, C. 2015, Mixed effects: unifying framework statistical modelling fisheries biology. ICES Journal Marine Science, Volume 72, Issue 5, Pages 1245–1256. Harrison, XA, Donaldson, L, Correa-Cano, , Evans, J, Fisher, DN, Goodwin, CED, Robinson, BS, Hodgson, DJ, Inger, R. 2018. brief introduction mixed effects modelling multi-model inference ecology. PeerJ 6:e4794. DeBruine LM, Barr DJ. Understanding Mixed-Effects Models Data Simulation. 2021. Advances Methods Practices Psychological Science.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_spark.html","id":null,"dir":"Reference","previous_headings":"","what":"Linear regression via spark — details_linear_reg_spark","title":"Linear regression via spark — details_linear_reg_spark","text":"sparklyr::ml_linear_regression() uses regularized least squares fit models numeric outcomes.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_spark.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Linear regression via spark — details_linear_reg_spark","text":"engine, single mode: regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_spark.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Linear regression via spark — details_linear_reg_spark","text":"model 2 tuning parameters: penalty: Amount Regularization (type: double, default: 0.0) mixture: Proportion Lasso Penalty (type: double, default: 0.0) penalty, amount regularization includes L1 penalty (.e., lasso) L2 penalty (.e., ridge weight decay). mixture: mixture = 1 specifies pure lasso model, mixture = 0 specifies ridge regression model, 0 < mixture < 1 specifies elastic net model, interpolating lasso ridge.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_spark.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Linear regression via spark — details_linear_reg_spark","text":"","code":"linear_reg(penalty = double(1), mixture = double(1)) %>% set_engine(\"spark\") %>% translate() ## Linear Regression Model Specification (regression) ## ## Main Arguments: ## penalty = double(1) ## mixture = double(1) ## ## Computational engine: spark ## ## Model fit template: ## sparklyr::ml_linear_regression(x = missing_arg(), formula = missing_arg(), ## weights = missing_arg(), reg_param = double(1), elastic_net_param = double(1))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_spark.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Linear regression via spark — details_linear_reg_spark","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Predictors scale. One way achieve center scale predictor mean zero variance one. default, ml_linear_regression() uses argument standardization = TRUE center scale data.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_spark.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Linear regression via spark — details_linear_reg_spark","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights. Note , spark engines, case_weight argument value character string specify column numeric case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_spark.html","id":"other-details","dir":"Reference","previous_headings":"","what":"Other details","title":"Linear regression via spark — details_linear_reg_spark","text":"models created using \"spark\" engine, several things consider. formula interface via fit() available; using fit_xy() generate error. predictions always Spark table format. names documented without dots. equivalent factor columns Spark tables class predictions returned character columns. retain model object new R session (via save()), model$fit element parsnip object serialized via ml_save(object$fit) separately saved disk. new session, object can reloaded reattached parsnip object.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_spark.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Linear regression via spark — details_linear_reg_spark","text":"Luraschi, J, K Kuo, E Ruiz. 2019. Mastering Spark R. O’Reilly Media Hastie, T, R Tibshirani, M Wainwright. 2015. Statistical Learning Sparsity. CRC Press. Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_stan.html","id":null,"dir":"Reference","previous_headings":"","what":"Linear regression via Bayesian Methods — details_linear_reg_stan","title":"Linear regression via Bayesian Methods — details_linear_reg_stan","text":"\"stan\" engine estimates regression parameters using Bayesian estimation.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_stan.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Linear regression via Bayesian Methods — details_linear_reg_stan","text":"engine, single mode: regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_stan.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Linear regression via Bayesian Methods — details_linear_reg_stan","text":"engine tuning parameters.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_stan.html","id":"important-engine-specific-options","dir":"Reference","previous_headings":"","what":"Important engine-specific options","title":"Linear regression via Bayesian Methods — details_linear_reg_stan","text":"relevant arguments can passed set_engine(): chains: positive integer specifying number Markov chains. default 4. iter: positive integer specifying number iterations chain (including warmup). default 2000. seed: seed random number generation. cores: Number cores use executing chains parallel. prior: prior distribution (non-hierarchical) regression coefficients. \"stan\" engine fit hierarchical terms. See \"stan_glmer\" engine multilevelmod package type model. prior_intercept: prior distribution intercept (centering predictors). See rstan::sampling() rstanarm::priors() information options.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_stan.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Linear regression via Bayesian Methods — details_linear_reg_stan","text":"Note refresh default prevents logging estimation process. Change value set_engine() show MCMC logs.","code":"linear_reg() %>% set_engine(\"stan\") %>% translate() ## Linear Regression Model Specification (regression) ## ## Computational engine: stan ## ## Model fit template: ## rstanarm::stan_glm(formula = missing_arg(), data = missing_arg(), ## weights = missing_arg(), family = stats::gaussian, refresh = 0)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_stan.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Linear regression via Bayesian Methods — details_linear_reg_stan","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_stan.html","id":"other-details","dir":"Reference","previous_headings":"","what":"Other details","title":"Linear regression via Bayesian Methods — details_linear_reg_stan","text":"prediction, \"stan\" engine can compute posterior intervals analogous confidence prediction intervals. instances, units original outcome std_error = TRUE, standard deviation posterior distribution (posterior predictive distribution appropriate) returned.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_stan.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Linear regression via Bayesian Methods — details_linear_reg_stan","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_stan.html","id":"examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Linear regression via Bayesian Methods — details_linear_reg_stan","text":"“Fitting Predicting parsnip” article contains examples linear_reg() \"stan\" engine.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_stan.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Linear regression via Bayesian Methods — details_linear_reg_stan","text":"McElreath, R. 2020 Statistical Rethinking. CRC Press.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_stan_glmer.html","id":null,"dir":"Reference","previous_headings":"","what":"Linear regression via hierarchical Bayesian methods — details_linear_reg_stan_glmer","title":"Linear regression via hierarchical Bayesian methods — details_linear_reg_stan_glmer","text":"\"stan_glmer\" engine estimates hierarchical regression parameters using Bayesian estimation.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_stan_glmer.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Linear regression via hierarchical Bayesian methods — details_linear_reg_stan_glmer","text":"engine, single mode: regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_stan_glmer.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Linear regression via hierarchical Bayesian methods — details_linear_reg_stan_glmer","text":"model tuning parameters.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_stan_glmer.html","id":"important-engine-specific-options","dir":"Reference","previous_headings":"","what":"Important engine-specific options","title":"Linear regression via hierarchical Bayesian methods — details_linear_reg_stan_glmer","text":"relevant arguments can passed set_engine(): chains: positive integer specifying number Markov chains. default 4. iter: positive integer specifying number iterations chain (including warmup). default 2000. seed: seed random number generation. cores: Number cores use executing chains parallel. prior: prior distribution (non-hierarchical) regression coefficients. prior_intercept: prior distribution intercept (centering predictors). See ?rstanarm::stan_glmer ?rstan::sampling information.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_stan_glmer.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Linear regression via hierarchical Bayesian methods — details_linear_reg_stan_glmer","text":"multilevelmod extension package required fit model.","code":"library(multilevelmod) linear_reg() %>% set_engine(\"stan_glmer\") %>% set_mode(\"regression\") %>% translate() ## Linear Regression Model Specification (regression) ## ## Computational engine: stan_glmer ## ## Model fit template: ## rstanarm::stan_glmer(formula = missing_arg(), data = missing_arg(), ## weights = missing_arg(), family = stats::gaussian, refresh = 0)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_stan_glmer.html","id":"predicting-new-samples","dir":"Reference","previous_headings":"","what":"Predicting new samples","title":"Linear regression via hierarchical Bayesian methods — details_linear_reg_stan_glmer","text":"model can use subject-specific coefficient estimates make predictions (.e. partial pooling). example, equation shows linear predictor (\\eta) random intercept: $$ denotes ith independent experimental unit (e.g. subject). model seen subject , can use subject’s data adjust population intercept specific subjects results. happens data predicted subject used model fit? case, package uses population parameter estimates prediction: Depending covariates model, might effect making prediction new samples. population parameters “best estimate” subject included model fit. tidymodels framework deliberately constrains predictions new data use training set data (prevent information leakage).","code":"\\eta_{i} = (\\beta_0 + b_{0i}) + \\beta_1x_{i1} \\hat{\\eta}_{i'} = \\hat{\\beta}_0+ \\hat{\\beta}x_{i'1}"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_stan_glmer.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Linear regression via hierarchical Bayesian methods — details_linear_reg_stan_glmer","text":"specific preprocessing needs. However, helpful keep clustering/subject identifier column factor character (instead making dummy variables). See examples next section.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_stan_glmer.html","id":"other-details","dir":"Reference","previous_headings":"","what":"Other details","title":"Linear regression via hierarchical Bayesian methods — details_linear_reg_stan_glmer","text":"model can accept case weights. parsnip, suggest using formula method fitting: using tidymodels infrastructure, may better use workflow. case, can add appropriate columns using add_variables() supply typical formula adding model: prediction, \"stan_glmer\" engine can compute posterior intervals analogous confidence prediction intervals. instances, units original outcome. std_error = TRUE, standard deviation posterior distribution (posterior predictive distribution appropriate) returned.","code":"library(tidymodels) data(\"riesby\") linear_reg() %>% set_engine(\"stan_glmer\") %>% fit(depr_score ~ week + (1|subject), data = riesby) library(tidymodels) glmer_spec <- linear_reg() %>% set_engine(\"stan_glmer\") glmer_wflow <- workflow() %>% # The data are included as-is using: add_variables(outcomes = depr_score, predictors = c(week, subject)) %>% add_model(glmer_spec, formula = depr_score ~ week + (1|subject)) fit(glmer_wflow, data = riesby)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_stan_glmer.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Linear regression via hierarchical Bayesian methods — details_linear_reg_stan_glmer","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_stan_glmer.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Linear regression via hierarchical Bayesian methods — details_linear_reg_stan_glmer","text":"McElreath, R. 2020 Statistical Rethinking. CRC Press. Sorensen, T, Vasishth, S. 2016. Bayesian linear mixed models using Stan: tutorial psychologists, linguists, cognitive scientists, arXiv:1506.06201.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_LiblineaR.html","id":null,"dir":"Reference","previous_headings":"","what":"Logistic regression via LiblineaR — details_logistic_reg_LiblineaR","title":"Logistic regression via LiblineaR — details_logistic_reg_LiblineaR","text":"LiblineaR::LiblineaR() fits generalized linear model binary outcomes. linear combination predictors used model log odds event.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_LiblineaR.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Logistic regression via LiblineaR — details_logistic_reg_LiblineaR","text":"engine, single mode: classification","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_LiblineaR.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Logistic regression via LiblineaR — details_logistic_reg_LiblineaR","text":"model 2 tuning parameters: penalty: Amount Regularization (type: double, default: see ) mixture: Proportion Lasso Penalty (type: double, default: 0) LiblineaR models, value mixture can either 0 (ridge) 1 (lasso) intermediate values. LiblineaR::LiblineaR() documentation, correspond types 0 (L2-regularized) 6 (L1-regularized). aware LiblineaR engine regularizes intercept. regularized regression models , result different parameter estimates.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_LiblineaR.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Logistic regression via LiblineaR — details_logistic_reg_LiblineaR","text":"","code":"logistic_reg(penalty = double(1), mixture = double(1)) %>% set_engine(\"LiblineaR\") %>% translate() ## Logistic Regression Model Specification (classification) ## ## Main Arguments: ## penalty = double(1) ## mixture = double(1) ## ## Computational engine: LiblineaR ## ## Model fit template: ## LiblineaR::LiblineaR(x = missing_arg(), y = missing_arg(), cost = Inf, ## type = double(1), verbose = FALSE)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_LiblineaR.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Logistic regression via LiblineaR — details_logistic_reg_LiblineaR","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Predictors scale. One way achieve center scale predictor mean zero variance one.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_LiblineaR.html","id":"examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Logistic regression via LiblineaR — details_logistic_reg_LiblineaR","text":"“Fitting Predicting parsnip” article contains examples logistic_reg() \"LiblineaR\" engine.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_LiblineaR.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Logistic regression via LiblineaR — details_logistic_reg_LiblineaR","text":"Hastie, T, R Tibshirani, M Wainwright. 2015. Statistical Learning Sparsity. CRC Press. Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_brulee.html","id":null,"dir":"Reference","previous_headings":"","what":"Logistic regression via brulee — details_logistic_reg_brulee","title":"Logistic regression via brulee — details_logistic_reg_brulee","text":"brulee::brulee_logistic_reg() fits generalized linear model binary outcomes. linear combination predictors used model log odds event.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_brulee.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Logistic regression via brulee — details_logistic_reg_brulee","text":"engine, single mode: classification","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_brulee.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Logistic regression via brulee — details_logistic_reg_brulee","text":"model 2 tuning parameter: penalty: Amount Regularization (type: double, default: 0.001) mixture: Proportion Lasso Penalty (type: double, default: 0.0) use L1 penalty (.k.. lasso penalty) force parameters strictly zero (packages glmnet). zeroing parameters specific feature optimization method used packages. engine arguments interest: optimizer(): optimization method. See brulee::brulee_linear_reg(). epochs(): integer number passes training set. lean_rate(): number used accelerate gradient decsent process. momentum(): number used use historical gradient information optimization (optimizer = \"SGD\" ). batch_size(): integer number training set points batch. stop_iter(): non-negative integer many iterations improvement stopping. (default: 5L). class_weights(): Numeric class weights. See brulee::brulee_logistic_reg().","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_brulee.html","id":"translation-from-parsnip-to-the-original-package-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (classification)","title":"Logistic regression via brulee — details_logistic_reg_brulee","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Predictors scale. One way achieve center scale predictor mean zero variance one.","code":"logistic_reg(penalty = double(1)) %>% set_engine(\"brulee\") %>% translate() ## Logistic Regression Model Specification (classification) ## ## Main Arguments: ## penalty = double(1) ## ## Computational engine: brulee ## ## Model fit template: ## brulee::brulee_logistic_reg(x = missing_arg(), y = missing_arg(), ## penalty = double(1))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_brulee.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Logistic regression via brulee — details_logistic_reg_brulee","text":"underlying model implementation allow case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_brulee.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Logistic regression via brulee — details_logistic_reg_brulee","text":"Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_gee.html","id":null,"dir":"Reference","previous_headings":"","what":"Logistic regression via generalized estimating equations (GEE) — details_logistic_reg_gee","title":"Logistic regression via generalized estimating equations (GEE) — details_logistic_reg_gee","text":"gee::gee() uses generalized least squares fit different types models errors independent.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_gee.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Logistic regression via generalized estimating equations (GEE) — details_logistic_reg_gee","text":"engine, single mode: classification","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_gee.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Logistic regression via generalized estimating equations (GEE) — details_logistic_reg_gee","text":"model formal tuning parameters. may beneficial determine appropriate correlation structure use, typically affect predicted value model. effect inferential results parameter covariance values.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_gee.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Logistic regression via generalized estimating equations (GEE) — details_logistic_reg_gee","text":"multilevelmod extension package required fit model. multilevelmod::gee_fit() wrapper model around gee::gee().","code":"library(multilevelmod) logistic_reg() %>% set_engine(\"gee\") %>% translate() ## Logistic Regression Model Specification (classification) ## ## Computational engine: gee ## ## Model fit template: ## multilevelmod::gee_fit(formula = missing_arg(), data = missing_arg(), ## family = binomial)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_gee.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Logistic regression via generalized estimating equations (GEE) — details_logistic_reg_gee","text":"specific preprocessing needs. However, helpful keep clustering/subject identifier column factor character (instead making dummy variables). See examples next section.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_gee.html","id":"other-details","dir":"Reference","previous_headings":"","what":"Other details","title":"Logistic regression via generalized estimating equations (GEE) — details_logistic_reg_gee","text":"model accept case weights. gee:gee() gee:geepack() specify id/cluster variable using argument id requires vector. parsnip doesn’t work way enable model fit using artificial function id_var() used formula. , original package, call look like: parsnip, suggest using formula method fitting: using tidymodels infrastructure, may better use workflow. case, can add appropriate columns using add_variables() supply GEE formula adding model: gee::gee() function always prints warnings output even silent = TRUE. parsnip \"gee\" engine, contrast, silences console output coming gee::gee(), even silent = FALSE. Also, issues gee() function, supplementary call glm() needed get rank QR decomposition objects predict() can used.","code":"gee(breaks ~ tension, id = wool, data = warpbreaks, corstr = \"exchangeable\") library(tidymodels) data(\"toenail\", package = \"HSAUR3\") logistic_reg() %>% set_engine(\"gee\", corstr = \"exchangeable\") %>% fit(outcome ~ treatment * visit + id_var(patientID), data = toenail) library(tidymodels) gee_spec <- logistic_reg() %>% set_engine(\"gee\", corstr = \"exchangeable\") gee_wflow <- workflow() %>% # The data are included as-is using: add_variables(outcomes = outcome, predictors = c(treatment, visit, patientID)) %>% add_model(gee_spec, formula = outcome ~ treatment * visit + id_var(patientID)) fit(gee_wflow, data = toenail)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_gee.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Logistic regression via generalized estimating equations (GEE) — details_logistic_reg_gee","text":"underlying model implementation allow case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_gee.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Logistic regression via generalized estimating equations (GEE) — details_logistic_reg_gee","text":"Liang, K.Y. Zeger, S.L. (1986) Longitudinal data analysis using generalized linear models. Biometrika, 73 13–22. Zeger, S.L. Liang, K.Y. (1986) Longitudinal data analysis discrete continuous outcomes. Biometrics, 42 121–130.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_glm.html","id":null,"dir":"Reference","previous_headings":"","what":"Logistic regression via glm — details_logistic_reg_glm","title":"Logistic regression via glm — details_logistic_reg_glm","text":"stats::glm() fits generalized linear model binary outcomes. linear combination predictors used model log odds event.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_glm.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Logistic regression via glm — details_logistic_reg_glm","text":"engine, single mode: classification","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_glm.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Logistic regression via glm — details_logistic_reg_glm","text":"engine tuning parameters can set family parameter (/link) engine argument (see ).","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_glm.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Logistic regression via glm — details_logistic_reg_glm","text":"use non-default family /link, pass argument set_engine():","code":"logistic_reg() %>% set_engine(\"glm\") %>% translate() ## Logistic Regression Model Specification (classification) ## ## Computational engine: glm ## ## Model fit template: ## stats::glm(formula = missing_arg(), data = missing_arg(), weights = missing_arg(), ## family = stats::binomial) linear_reg() %>% set_engine(\"glm\", family = stats::binomial(link = \"probit\")) %>% translate() ## Linear Regression Model Specification (regression) ## ## Engine-Specific Arguments: ## family = stats::binomial(link = \"probit\") ## ## Computational engine: glm ## ## Model fit template: ## stats::glm(formula = missing_arg(), data = missing_arg(), weights = missing_arg(), ## family = stats::binomial(link = \"probit\"))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_glm.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Logistic regression via glm — details_logistic_reg_glm","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_glm.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Logistic regression via glm — details_logistic_reg_glm","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights. However, documentation stats::glm() assumes specific type case weights used:“Non-NULL weights can used indicate different observations different dispersions (values weights inversely proportional dispersions); equivalently, elements weights positive integers w_i, response y_i mean w_i unit-weight observations. binomial GLM prior weights used give number trials response proportion successes: rarely used Poisson GLM.”","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_glm.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Logistic regression via glm — details_logistic_reg_glm","text":"model object contains data required make predictions. saving model purpose prediction, size saved object might substantially reduced using functions butcher package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_glm.html","id":"examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Logistic regression via glm — details_logistic_reg_glm","text":"“Fitting Predicting parsnip” article contains examples logistic_reg() \"glm\" engine.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_glm.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Logistic regression via glm — details_logistic_reg_glm","text":"Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_glmer.html","id":null,"dir":"Reference","previous_headings":"","what":"Logistic regression via mixed models — details_logistic_reg_glmer","title":"Logistic regression via mixed models — details_logistic_reg_glmer","text":"\"glmer\" engine estimates fixed random effect regression parameters using maximum likelihood (restricted maximum likelihood) estimation.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_glmer.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Logistic regression via mixed models — details_logistic_reg_glmer","text":"engine, single mode: classification","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_glmer.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Logistic regression via mixed models — details_logistic_reg_glmer","text":"model tuning parameters.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_glmer.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Logistic regression via mixed models — details_logistic_reg_glmer","text":"multilevelmod extension package required fit model.","code":"library(multilevelmod) logistic_reg() %>% set_engine(\"glmer\") %>% translate() ## Logistic Regression Model Specification (classification) ## ## Computational engine: glmer ## ## Model fit template: ## lme4::glmer(formula = missing_arg(), data = missing_arg(), weights = missing_arg(), ## family = binomial)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_glmer.html","id":"predicting-new-samples","dir":"Reference","previous_headings":"","what":"Predicting new samples","title":"Logistic regression via mixed models — details_logistic_reg_glmer","text":"model can use subject-specific coefficient estimates make predictions (.e. partial pooling). example, equation shows linear predictor (\\eta) random intercept: $$ denotes ith independent experimental unit (e.g. subject). model seen subject , can use subject’s data adjust population intercept specific subjects results. happens data predicted subject used model fit? case, package uses population parameter estimates prediction: Depending covariates model, might effect making prediction new samples. population parameters “best estimate” subject included model fit. tidymodels framework deliberately constrains predictions new data use training set data (prevent information leakage).","code":"\\eta_{i} = (\\beta_0 + b_{0i}) + \\beta_1x_{i1} \\hat{\\eta}_{i'} = \\hat{\\beta}_0+ \\hat{\\beta}x_{i'1}"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_glmer.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Logistic regression via mixed models — details_logistic_reg_glmer","text":"specific preprocessing needs. However, helpful keep clustering/subject identifier column factor character (instead making dummy variables). See examples next section.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_glmer.html","id":"other-details","dir":"Reference","previous_headings":"","what":"Other details","title":"Logistic regression via mixed models — details_logistic_reg_glmer","text":"model can accept case weights. parsnip, suggest using formula method fitting: using tidymodels infrastructure, may better use workflow. case, can add appropriate columns using add_variables() supply typical formula adding model:","code":"library(tidymodels) data(\"toenail\", package = \"HSAUR3\") logistic_reg() %>% set_engine(\"glmer\") %>% fit(outcome ~ treatment * visit + (1 | patientID), data = toenail) library(tidymodels) glmer_spec <- logistic_reg() %>% set_engine(\"glmer\") glmer_wflow <- workflow() %>% # The data are included as-is using: add_variables(outcomes = outcome, predictors = c(treatment, visit, patientID)) %>% add_model(glmer_spec, formula = outcome ~ treatment * visit + (1 | patientID)) fit(glmer_wflow, data = toenail)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_glmer.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Logistic regression via mixed models — details_logistic_reg_glmer","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_glmer.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Logistic regression via mixed models — details_logistic_reg_glmer","text":"J Pinheiro, D Bates. 2000. Mixed-effects models S S-PLUS. Springer, New York, NY West, K, Band Welch, Galecki. 2014. Linear Mixed Models: Practical Guide Using Statistical Software. CRC Press. Thorson, J, Minto, C. 2015, Mixed effects: unifying framework statistical modelling fisheries biology. ICES Journal Marine Science, Volume 72, Issue 5, Pages 1245–1256. Harrison, XA, Donaldson, L, Correa-Cano, , Evans, J, Fisher, DN, Goodwin, CED, Robinson, BS, Hodgson, DJ, Inger, R. 2018. brief introduction mixed effects modelling multi-model inference ecology. PeerJ 6:e4794. DeBruine LM, Barr DJ. Understanding Mixed-Effects Models Data Simulation. 2021. Advances Methods Practices Psychological Science.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_glmnet.html","id":null,"dir":"Reference","previous_headings":"","what":"Logistic regression via glmnet — details_logistic_reg_glmnet","title":"Logistic regression via glmnet — details_logistic_reg_glmnet","text":"glmnet::glmnet() fits generalized linear model binary outcomes. linear combination predictors used model log odds event.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_glmnet.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Logistic regression via glmnet — details_logistic_reg_glmnet","text":"engine, single mode: classification","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_glmnet.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Logistic regression via glmnet — details_logistic_reg_glmnet","text":"model 2 tuning parameters: penalty: Amount Regularization (type: double, default: see ) mixture: Proportion Lasso Penalty (type: double, default: 1.0) penalty parameter default requires single numeric value. details , glmnet model general, see glmnet-details. mixture: mixture = 1 specifies pure lasso model, mixture = 0 specifies ridge regression model, 0 < mixture < 1 specifies elastic net model, interpolating lasso ridge.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_glmnet.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Logistic regression via glmnet — details_logistic_reg_glmnet","text":"","code":"logistic_reg(penalty = double(1), mixture = double(1)) %>% set_engine(\"glmnet\") %>% translate() ## Logistic Regression Model Specification (classification) ## ## Main Arguments: ## penalty = 0 ## mixture = double(1) ## ## Computational engine: glmnet ## ## Model fit template: ## glmnet::glmnet(x = missing_arg(), y = missing_arg(), weights = missing_arg(), ## alpha = double(1), family = \"binomial\")"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_glmnet.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Logistic regression via glmnet — details_logistic_reg_glmnet","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Predictors scale. One way achieve center scale predictor mean zero variance one. default, glmnet::glmnet() uses argument standardize = TRUE center scale data.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_glmnet.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Logistic regression via glmnet — details_logistic_reg_glmnet","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_glmnet.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Logistic regression via glmnet — details_logistic_reg_glmnet","text":"model object contains data required make predictions. saving model purpose prediction, size saved object might substantially reduced using functions butcher package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_glmnet.html","id":"examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Logistic regression via glmnet — details_logistic_reg_glmnet","text":"“Fitting Predicting parsnip” article contains examples logistic_reg() \"glmnet\" engine.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_glmnet.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Logistic regression via glmnet — details_logistic_reg_glmnet","text":"Hastie, T, R Tibshirani, M Wainwright. 2015. Statistical Learning Sparsity. CRC Press. Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_h2o.html","id":null,"dir":"Reference","previous_headings":"","what":"Logistic regression via h2o — details_logistic_reg_h2o","title":"Logistic regression via h2o — details_logistic_reg_h2o","text":"h2o::h2o.glm() fits generalized linear model binary outcomes. linear combination predictors used model log odds event.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_h2o.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Logistic regression via h2o — details_logistic_reg_h2o","text":"engine, single mode: classification","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_h2o.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Logistic regression via h2o — details_logistic_reg_h2o","text":"model 2 tuning parameters: mixture: Proportion Lasso Penalty (type: double, default: see ) penalty: Amount Regularization (type: double, default: see ) default, given fixed penalty, h2o::h2o.glm() uses heuristic approach select optimal value penalty based training data. Setting engine parameter lambda_search TRUE enables efficient version grid search, see details https://docs.h2o.ai/h2o/latest-stable/h2o-docs/data-science/algo-params/lambda_search.html. choice mixture depends engine parameter solver, automatically chosen given training data specification model parameters. solver set 'L-BFGS', mixture defaults 0 (ridge regression) 0.5 otherwise.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_h2o.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Logistic regression via h2o — details_logistic_reg_h2o","text":"agua::h2o_train_glm() logistic_reg() wrapper around h2o::h2o.glm(). h2o automatically picks link function distribution family binomial responses. use non-default argument h2o::h2o.glm(), pass engine argument set_engine():","code":"logistic_reg() %>% set_engine(\"h2o\") %>% translate() ## Logistic Regression Model Specification (classification) ## ## Computational engine: h2o ## ## Model fit template: ## agua::h2o_train_glm(x = missing_arg(), y = missing_arg(), weights = missing_arg(), ## validation_frame = missing_arg(), family = \"binomial\") logistic_reg() %>% set_engine(\"h2o\", compute_p_values = TRUE) %>% translate() ## Logistic Regression Model Specification (classification) ## ## Engine-Specific Arguments: ## compute_p_values = TRUE ## ## Computational engine: h2o ## ## Model fit template: ## agua::h2o_train_glm(x = missing_arg(), y = missing_arg(), weights = missing_arg(), ## validation_frame = missing_arg(), compute_p_values = TRUE, ## family = \"binomial\")"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_h2o.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Logistic regression via h2o — details_logistic_reg_h2o","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Predictors scale. One way achieve center scale predictor mean zero variance one. default, h2o::h2o.glm() uses argument standardize = TRUE center scale numeric columns.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_h2o.html","id":"initializing-h-o","dir":"Reference","previous_headings":"","what":"Initializing h2o","title":"Logistic regression via h2o — details_logistic_reg_h2o","text":"use h2o engine tidymodels, please run h2o::h2o.init() first. default, connects R local h2o server. needs done every new R session. can also connect remote h2o server IP address, details see h2o::h2o.init(). can control number threads thread pool used h2o nthreads argument. default, uses CPUs host. different usual parallel processing mechanism tidymodels tuning, tidymodels parallelizes resamples, h2o parallelizes hyperparameter combinations given resample. h2o automatically shut local h2o instance started R R terminated. manually stop h2o server, run h2o::h2o.shutdown().","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_h2o.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Logistic regression via h2o — details_logistic_reg_h2o","text":"Models fitted engine may require native serialization methods properly saved /passed R sessions. learn preparing fitted models serialization, see bundle package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_keras.html","id":null,"dir":"Reference","previous_headings":"","what":"Logistic regression via keras — details_logistic_reg_keras","title":"Logistic regression via keras — details_logistic_reg_keras","text":"keras_mlp() fits generalized linear model binary outcomes. linear combination predictors used model log odds event.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_keras.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Logistic regression via keras — details_logistic_reg_keras","text":"engine, single mode: classification","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_keras.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Logistic regression via keras — details_logistic_reg_keras","text":"model one tuning parameter: penalty: Amount Regularization (type: double, default: 0.0) penalty, amount regularization L2 penalty (.e., ridge weight decay).","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_keras.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Logistic regression via keras — details_logistic_reg_keras","text":"keras_mlp() parsnip wrapper around keras code neural networks. model fits linear regression network single hidden unit.","code":"logistic_reg(penalty = double(1)) %>% set_engine(\"keras\") %>% translate() ## Logistic Regression Model Specification (classification) ## ## Main Arguments: ## penalty = double(1) ## ## Computational engine: keras ## ## Model fit template: ## parsnip::keras_mlp(x = missing_arg(), y = missing_arg(), penalty = double(1), ## hidden_units = 1, act = \"linear\")"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_keras.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Logistic regression via keras — details_logistic_reg_keras","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Predictors scale. One way achieve center scale predictor mean zero variance one.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_keras.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Logistic regression via keras — details_logistic_reg_keras","text":"underlying model implementation allow case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_keras.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Logistic regression via keras — details_logistic_reg_keras","text":"Models fitted engine may require native serialization methods properly saved /passed R sessions. learn preparing fitted models serialization, see bundle package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_keras.html","id":"examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Logistic regression via keras — details_logistic_reg_keras","text":"“Fitting Predicting parsnip” article contains examples logistic_reg() \"keras\" engine.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_keras.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Logistic regression via keras — details_logistic_reg_keras","text":"Hoerl, ., & Kennard, R. (2000). Ridge Regression: Biased Estimation Nonorthogonal Problems. Technometrics, 42(1), 80-86.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_spark.html","id":null,"dir":"Reference","previous_headings":"","what":"Logistic regression via spark — details_logistic_reg_spark","title":"Logistic regression via spark — details_logistic_reg_spark","text":"sparklyr::ml_logistic_regression() fits generalized linear model binary outcomes. linear combination predictors used model log odds event.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_spark.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Logistic regression via spark — details_logistic_reg_spark","text":"engine, single mode: classification","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_spark.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Logistic regression via spark — details_logistic_reg_spark","text":"model 2 tuning parameters: penalty: Amount Regularization (type: double, default: 0.0) mixture: Proportion Lasso Penalty (type: double, default: 0.0) penalty, amount regularization includes L1 penalty (.e., lasso) L2 penalty (.e., ridge weight decay). mixture: mixture = 1 specifies pure lasso model, mixture = 0 specifies ridge regression model, 0 < mixture < 1 specifies elastic net model, interpolating lasso ridge.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_spark.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Logistic regression via spark — details_logistic_reg_spark","text":"","code":"logistic_reg(penalty = double(1), mixture = double(1)) %>% set_engine(\"spark\") %>% translate() ## Logistic Regression Model Specification (classification) ## ## Main Arguments: ## penalty = double(1) ## mixture = double(1) ## ## Computational engine: spark ## ## Model fit template: ## sparklyr::ml_logistic_regression(x = missing_arg(), formula = missing_arg(), ## weights = missing_arg(), reg_param = double(1), elastic_net_param = double(1), ## family = \"binomial\")"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_spark.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Logistic regression via spark — details_logistic_reg_spark","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Predictors scale. One way achieve center scale predictor mean zero variance one. default, ml_logistic_regression() uses argument standardization = TRUE center scale data.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_spark.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Logistic regression via spark — details_logistic_reg_spark","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights. Note , spark engines, case_weight argument value character string specify column numeric case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_spark.html","id":"other-details","dir":"Reference","previous_headings":"","what":"Other details","title":"Logistic regression via spark — details_logistic_reg_spark","text":"models created using \"spark\" engine, several things consider. formula interface via fit() available; using fit_xy() generate error. predictions always Spark table format. names documented without dots. equivalent factor columns Spark tables class predictions returned character columns. retain model object new R session (via save()), model$fit element parsnip object serialized via ml_save(object$fit) separately saved disk. new session, object can reloaded reattached parsnip object.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_spark.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Logistic regression via spark — details_logistic_reg_spark","text":"Luraschi, J, K Kuo, E Ruiz. 2019. Mastering Spark R. O’Reilly Media Hastie, T, R Tibshirani, M Wainwright. 2015. Statistical Learning Sparsity. CRC Press. Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_stan.html","id":null,"dir":"Reference","previous_headings":"","what":"Logistic regression via stan — details_logistic_reg_stan","title":"Logistic regression via stan — details_logistic_reg_stan","text":"rstanarm::stan_glm() fits generalized linear model binary outcomes. linear combination predictors used model log odds event.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_stan.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Logistic regression via stan — details_logistic_reg_stan","text":"engine, single mode: classification","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_stan.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Logistic regression via stan — details_logistic_reg_stan","text":"engine tuning parameters.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_stan.html","id":"important-engine-specific-options","dir":"Reference","previous_headings":"","what":"Important engine-specific options","title":"Logistic regression via stan — details_logistic_reg_stan","text":"relevant arguments can passed set_engine(): chains: positive integer specifying number Markov chains. default 4. iter: positive integer specifying number iterations chain (including warmup). default 2000. seed: seed random number generation. cores: Number cores use executing chains parallel. prior: prior distribution (non-hierarchical) regression coefficients. \"stan\" engine fit hierarchical terms. prior_intercept: prior distribution intercept (centering predictors). See rstan::sampling() rstanarm::priors() information options.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_stan.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Logistic regression via stan — details_logistic_reg_stan","text":"Note refresh default prevents logging estimation process. Change value set_engine() show MCMC logs.","code":"logistic_reg() %>% set_engine(\"stan\") %>% translate() ## Logistic Regression Model Specification (classification) ## ## Computational engine: stan ## ## Model fit template: ## rstanarm::stan_glm(formula = missing_arg(), data = missing_arg(), ## weights = missing_arg(), family = stats::binomial, refresh = 0)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_stan.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Logistic regression via stan — details_logistic_reg_stan","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_stan.html","id":"other-details","dir":"Reference","previous_headings":"","what":"Other details","title":"Logistic regression via stan — details_logistic_reg_stan","text":"prediction, \"stan\" engine can compute posterior intervals analogous confidence prediction intervals. instances, units original outcome std_error = TRUE, standard deviation posterior distribution (posterior predictive distribution appropriate) returned.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_stan.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Logistic regression via stan — details_logistic_reg_stan","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_stan.html","id":"examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Logistic regression via stan — details_logistic_reg_stan","text":"“Fitting Predicting parsnip” article contains examples logistic_reg() \"stan\" engine.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_stan.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Logistic regression via stan — details_logistic_reg_stan","text":"McElreath, R. 2020 Statistical Rethinking. CRC Press.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_stan_glmer.html","id":null,"dir":"Reference","previous_headings":"","what":"Logistic regression via hierarchical Bayesian methods — details_logistic_reg_stan_glmer","title":"Logistic regression via hierarchical Bayesian methods — details_logistic_reg_stan_glmer","text":"\"stan_glmer\" engine estimates hierarchical regression parameters using Bayesian estimation.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_stan_glmer.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Logistic regression via hierarchical Bayesian methods — details_logistic_reg_stan_glmer","text":"engine, single mode: classification","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_stan_glmer.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Logistic regression via hierarchical Bayesian methods — details_logistic_reg_stan_glmer","text":"model tuning parameters.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_stan_glmer.html","id":"important-engine-specific-options","dir":"Reference","previous_headings":"","what":"Important engine-specific options","title":"Logistic regression via hierarchical Bayesian methods — details_logistic_reg_stan_glmer","text":"relevant arguments can passed set_engine(): chains: positive integer specifying number Markov chains. default 4. iter: positive integer specifying number iterations chain (including warmup). default 2000. seed: seed random number generation. cores: Number cores use executing chains parallel. prior: prior distribution (non-hierarchical) regression coefficients. prior_intercept: prior distribution intercept (centering predictors). See ?rstanarm::stan_glmer ?rstan::sampling information.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_stan_glmer.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Logistic regression via hierarchical Bayesian methods — details_logistic_reg_stan_glmer","text":"multilevelmod extension package required fit model.","code":"library(multilevelmod) logistic_reg() %>% set_engine(\"stan_glmer\") %>% translate() ## Logistic Regression Model Specification (classification) ## ## Computational engine: stan_glmer ## ## Model fit template: ## rstanarm::stan_glmer(formula = missing_arg(), data = missing_arg(), ## weights = missing_arg(), family = stats::binomial, refresh = 0)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_stan_glmer.html","id":"predicting-new-samples","dir":"Reference","previous_headings":"","what":"Predicting new samples","title":"Logistic regression via hierarchical Bayesian methods — details_logistic_reg_stan_glmer","text":"model can use subject-specific coefficient estimates make predictions (.e. partial pooling). example, equation shows linear predictor (\\eta) random intercept: $$ denotes ith independent experimental unit (e.g. subject). model seen subject , can use subject’s data adjust population intercept specific subjects results. happens data predicted subject used model fit? case, package uses population parameter estimates prediction: Depending covariates model, might effect making prediction new samples. population parameters “best estimate” subject included model fit. tidymodels framework deliberately constrains predictions new data use training set data (prevent information leakage).","code":"\\eta_{i} = (\\beta_0 + b_{0i}) + \\beta_1x_{i1} \\hat{\\eta}_{i'} = \\hat{\\beta}_0+ \\hat{\\beta}x_{i'1}"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_stan_glmer.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Logistic regression via hierarchical Bayesian methods — details_logistic_reg_stan_glmer","text":"specific preprocessing needs. However, helpful keep clustering/subject identifier column factor character (instead making dummy variables). See examples next section.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_stan_glmer.html","id":"other-details","dir":"Reference","previous_headings":"","what":"Other details","title":"Logistic regression via hierarchical Bayesian methods — details_logistic_reg_stan_glmer","text":"model can accept case weights. parsnip, suggest using formula method fitting: using tidymodels infrastructure, may better use workflow. case, can add appropriate columns using add_variables() supply typical formula adding model: prediction, \"stan_glmer\" engine can compute posterior intervals analogous confidence prediction intervals. instances, units original outcome. std_error = TRUE, standard deviation posterior distribution (posterior predictive distribution appropriate) returned.","code":"library(tidymodels) data(\"toenail\", package = \"HSAUR3\") logistic_reg() %>% set_engine(\"stan_glmer\") %>% fit(outcome ~ treatment * visit + (1 | patientID), data = toenail) library(tidymodels) glmer_spec <- logistic_reg() %>% set_engine(\"stan_glmer\") glmer_wflow <- workflow() %>% # The data are included as-is using: add_variables(outcomes = outcome, predictors = c(treatment, visit, patientID)) %>% add_model(glmer_spec, formula = outcome ~ treatment * visit + (1 | patientID)) fit(glmer_wflow, data = toenail)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_stan_glmer.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Logistic regression via hierarchical Bayesian methods — details_logistic_reg_stan_glmer","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_stan_glmer.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Logistic regression via hierarchical Bayesian methods — details_logistic_reg_stan_glmer","text":"McElreath, R. 2020 Statistical Rethinking. CRC Press. Sorensen, T, Vasishth, S. 2016. Bayesian linear mixed models using Stan: tutorial psychologists, linguists, cognitive scientists, arXiv:1506.06201.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mars_earth.html","id":null,"dir":"Reference","previous_headings":"","what":"Multivariate adaptive regression splines (MARS) via earth — details_mars_earth","title":"Multivariate adaptive regression splines (MARS) via earth — details_mars_earth","text":"earth::earth() fits generalized linear model uses artificial features predictors. features resemble hinge functions result model segmented regression small dimensions.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mars_earth.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Multivariate adaptive regression splines (MARS) via earth — details_mars_earth","text":"engine, multiple modes: classification regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mars_earth.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Multivariate adaptive regression splines (MARS) via earth — details_mars_earth","text":"model 3 tuning parameters: num_terms: # Model Terms (type: integer, default: see ) prod_degree: Degree Interaction (type: integer, default: 1L) prune_method: Pruning Method (type: character, default: ‘backward’) default value num_terms depends number predictor columns. data frame x, default min(200, max(20, 2 * ncol(x))) + 1 (see earth::earth() reference ).","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mars_earth.html","id":"translation-from-parsnip-to-the-original-package-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (regression)","title":"Multivariate adaptive regression splines (MARS) via earth — details_mars_earth","text":"","code":"mars(num_terms = integer(1), prod_degree = integer(1), prune_method = character(1)) %>% set_engine(\"earth\") %>% set_mode(\"regression\") %>% translate() ## MARS Model Specification (regression) ## ## Main Arguments: ## num_terms = integer(1) ## prod_degree = integer(1) ## prune_method = character(1) ## ## Computational engine: earth ## ## Model fit template: ## earth::earth(formula = missing_arg(), data = missing_arg(), weights = missing_arg(), ## nprune = integer(1), degree = integer(1), pmethod = character(1), ## keepxy = TRUE)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mars_earth.html","id":"translation-from-parsnip-to-the-original-package-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (classification)","title":"Multivariate adaptive regression splines (MARS) via earth — details_mars_earth","text":"alternate method using MARs categorical outcomes can found discrim_flexible().","code":"mars(num_terms = integer(1), prod_degree = integer(1), prune_method = character(1)) %>% set_engine(\"earth\") %>% set_mode(\"classification\") %>% translate() ## MARS Model Specification (classification) ## ## Main Arguments: ## num_terms = integer(1) ## prod_degree = integer(1) ## prune_method = character(1) ## ## Engine-Specific Arguments: ## glm = list(family = stats::binomial) ## ## Computational engine: earth ## ## Model fit template: ## earth::earth(formula = missing_arg(), data = missing_arg(), weights = missing_arg(), ## nprune = integer(1), degree = integer(1), pmethod = character(1), ## glm = list(family = stats::binomial), keepxy = TRUE)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mars_earth.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Multivariate adaptive regression splines (MARS) via earth — details_mars_earth","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mars_earth.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Multivariate adaptive regression splines (MARS) via earth — details_mars_earth","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights. Note earth package documentation : “current implementation, building models weights can slow.”","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mars_earth.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Multivariate adaptive regression splines (MARS) via earth — details_mars_earth","text":"model object contains data required make predictions. saving model purpose prediction, size saved object might substantially reduced using functions butcher package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mars_earth.html","id":"examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Multivariate adaptive regression splines (MARS) via earth — details_mars_earth","text":"“Fitting Predicting parsnip” article contains examples mars() \"earth\" engine.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mars_earth.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Multivariate adaptive regression splines (MARS) via earth — details_mars_earth","text":"Friedman, J. 1991. “Multivariate Adaptive Regression Splines.” Annals Statistics, vol. 19, . 1, pp. 1-67. Milborrow, S. “Notes earth package.” Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mlp_brulee.html","id":null,"dir":"Reference","previous_headings":"","what":"Multilayer perceptron via brulee — details_mlp_brulee","title":"Multilayer perceptron via brulee — details_mlp_brulee","text":"brulee::brulee_mlp() fits neural network.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mlp_brulee.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Multilayer perceptron via brulee — details_mlp_brulee","text":"engine, multiple modes: classification regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mlp_brulee.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Multilayer perceptron via brulee — details_mlp_brulee","text":"model 7 tuning parameters: hidden_units: # Hidden Units (type: integer, default: 3L) penalty: Amount Regularization (type: double, default: 0.0) mixture: Proportion Lasso Penalty (type: double, default: 0.0) epochs: # Epochs (type: integer, default: 100L) dropout: Dropout Rate (type: double, default: 0.0) learn_rate: Learning Rate (type: double, default: 0.01) activation: Activation Function (type: character, default: ‘relu’) use L1 penalty (.k.. lasso penalty) force parameters strictly zero (packages glmnet). zeroing parameters specific feature optimization method used packages. penalty dropout used model. engine arguments interest: momentum(): number used use historical gradient infomration optimization. batch_size(): integer number training set points batch. class_weights(): Numeric class weights. See brulee::brulee_mlp(). stop_iter(): non-negative integer many iterations improvement stopping. (default: 5L).","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mlp_brulee.html","id":"translation-from-parsnip-to-the-original-package-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (regression)","title":"Multilayer perceptron via brulee — details_mlp_brulee","text":"Note parsnip automatically sets linear activation last layer.","code":"mlp( hidden_units = integer(1), penalty = double(1), dropout = double(1), epochs = integer(1), learn_rate = double(1), activation = character(1) ) %>% set_engine(\"brulee\") %>% set_mode(\"regression\") %>% translate() ## Single Layer Neural Network Model Specification (regression) ## ## Main Arguments: ## hidden_units = integer(1) ## penalty = double(1) ## dropout = double(1) ## epochs = integer(1) ## activation = character(1) ## learn_rate = double(1) ## ## Computational engine: brulee ## ## Model fit template: ## brulee::brulee_mlp(x = missing_arg(), y = missing_arg(), hidden_units = integer(1), ## penalty = double(1), dropout = double(1), epochs = integer(1), ## activation = character(1), learn_rate = double(1))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mlp_brulee.html","id":"translation-from-parsnip-to-the-original-package-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (classification)","title":"Multilayer perceptron via brulee — details_mlp_brulee","text":"","code":"mlp( hidden_units = integer(1), penalty = double(1), dropout = double(1), epochs = integer(1), learn_rate = double(1), activation = character(1) ) %>% set_engine(\"brulee\") %>% set_mode(\"classification\") %>% translate() ## Single Layer Neural Network Model Specification (classification) ## ## Main Arguments: ## hidden_units = integer(1) ## penalty = double(1) ## dropout = double(1) ## epochs = integer(1) ## activation = character(1) ## learn_rate = double(1) ## ## Computational engine: brulee ## ## Model fit template: ## brulee::brulee_mlp(x = missing_arg(), y = missing_arg(), hidden_units = integer(1), ## penalty = double(1), dropout = double(1), epochs = integer(1), ## activation = character(1), learn_rate = double(1))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mlp_brulee.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Multilayer perceptron via brulee — details_mlp_brulee","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Predictors scale. One way achieve center scale predictor mean zero variance one.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mlp_brulee.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Multilayer perceptron via brulee — details_mlp_brulee","text":"underlying model implementation allow case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mlp_brulee.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Multilayer perceptron via brulee — details_mlp_brulee","text":"Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mlp_h2o.html","id":null,"dir":"Reference","previous_headings":"","what":"Multilayer perceptron via h2o — details_mlp_h2o","title":"Multilayer perceptron via h2o — details_mlp_h2o","text":"h2o::h2o.deeplearning() fits feed-forward neural network.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mlp_h2o.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Multilayer perceptron via h2o — details_mlp_h2o","text":"engine, multiple modes: classification regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mlp_h2o.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Multilayer perceptron via h2o — details_mlp_h2o","text":"model 6 tuning parameters: hidden_units: # Hidden Units (type: integer, default: 200L) penalty: Amount Regularization (type: double, default: 0.0) dropout: Dropout Rate (type: double, default: 0.5) epochs: # Epochs (type: integer, default: 10) activation: Activation function (type: character, default: ‘see ’) learn_rate: Learning Rate (type: double, default: 0.005) naming activation functions h2o::h2o.deeplearning() differs parsnip’s conventions. Currently, “relu” “tanh” supported converted internally “Rectifier” “Tanh” passed fitting function. penalty corresponds l2 penalty. h2o::h2o.deeplearning() also supports specifying l1 penalty directly engine argument l1. engine arguments interest: stopping_rounds controls early stopping rounds based convergence another engine parameter stopping_metric. default, h2o::h2o.deeplearning stops training simple moving average length 5 stopping_metric improve 5 scoring events. mostly useful used alongside engine parameter validation, proportion train-validation split, parsnip split pass two data frames h2o. h2o::h2o.deeplearning evaluate metric early stopping criteria validation set. h2o uses 50% dropout ratio controlled dropout hidden layers default. h2o::h2o.deeplearning() provides engine argument input_dropout_ratio dropout ratios input layer, defaults 0.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mlp_h2o.html","id":"translation-from-parsnip-to-the-original-package-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (regression)","title":"Multilayer perceptron via h2o — details_mlp_h2o","text":"agua::h2o_train_mlp wrapper around h2o::h2o.deeplearning().","code":"mlp( hidden_units = integer(1), penalty = double(1), dropout = double(1), epochs = integer(1), learn_rate = double(1), activation = character(1) ) %>% set_engine(\"h2o\") %>% set_mode(\"regression\") %>% translate() ## Single Layer Neural Network Model Specification (regression) ## ## Main Arguments: ## hidden_units = integer(1) ## penalty = double(1) ## dropout = double(1) ## epochs = integer(1) ## activation = character(1) ## learn_rate = double(1) ## ## Computational engine: h2o ## ## Model fit template: ## agua::h2o_train_mlp(x = missing_arg(), y = missing_arg(), weights = missing_arg(), ## validation_frame = missing_arg(), hidden = integer(1), l2 = double(1), ## hidden_dropout_ratios = double(1), epochs = integer(1), activation = character(1), ## rate = double(1))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mlp_h2o.html","id":"translation-from-parsnip-to-the-original-package-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (classification)","title":"Multilayer perceptron via h2o — details_mlp_h2o","text":"","code":"mlp( hidden_units = integer(1), penalty = double(1), dropout = double(1), epochs = integer(1), learn_rate = double(1), activation = character(1) ) %>% set_engine(\"h2o\") %>% set_mode(\"classification\") %>% translate() ## Single Layer Neural Network Model Specification (classification) ## ## Main Arguments: ## hidden_units = integer(1) ## penalty = double(1) ## dropout = double(1) ## epochs = integer(1) ## activation = character(1) ## learn_rate = double(1) ## ## Computational engine: h2o ## ## Model fit template: ## agua::h2o_train_mlp(x = missing_arg(), y = missing_arg(), weights = missing_arg(), ## validation_frame = missing_arg(), hidden = integer(1), l2 = double(1), ## hidden_dropout_ratios = double(1), epochs = integer(1), activation = character(1), ## rate = double(1))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mlp_h2o.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Multilayer perceptron via h2o — details_mlp_h2o","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Predictors scale. One way achieve center scale predictor mean zero variance one. default, h2o::h2o.deeplearning() uses argument standardize = TRUE center scale numeric columns.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mlp_h2o.html","id":"initializing-h-o","dir":"Reference","previous_headings":"","what":"Initializing h2o","title":"Multilayer perceptron via h2o — details_mlp_h2o","text":"use h2o engine tidymodels, please run h2o::h2o.init() first. default, connects R local h2o server. needs done every new R session. can also connect remote h2o server IP address, details see h2o::h2o.init(). can control number threads thread pool used h2o nthreads argument. default, uses CPUs host. different usual parallel processing mechanism tidymodels tuning, tidymodels parallelizes resamples, h2o parallelizes hyperparameter combinations given resample. h2o automatically shut local h2o instance started R R terminated. manually stop h2o server, run h2o::h2o.shutdown().","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mlp_h2o.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Multilayer perceptron via h2o — details_mlp_h2o","text":"Models fitted engine may require native serialization methods properly saved /passed R sessions. learn preparing fitted models serialization, see bundle package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mlp_keras.html","id":null,"dir":"Reference","previous_headings":"","what":"Multilayer perceptron via keras — details_mlp_keras","title":"Multilayer perceptron via keras — details_mlp_keras","text":"keras_mlp() fits single layer, feed-forward neural network.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mlp_keras.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Multilayer perceptron via keras — details_mlp_keras","text":"engine, multiple modes: classification regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mlp_keras.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Multilayer perceptron via keras — details_mlp_keras","text":"model 5 tuning parameters: hidden_units: # Hidden Units (type: integer, default: 5L) penalty: Amount Regularization (type: double, default: 0.0) dropout: Dropout Rate (type: double, default: 0.0) epochs: # Epochs (type: integer, default: 20L) activation: Activation Function (type: character, default: ‘softmax’)","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mlp_keras.html","id":"translation-from-parsnip-to-the-original-package-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (regression)","title":"Multilayer perceptron via keras — details_mlp_keras","text":"","code":"mlp( hidden_units = integer(1), penalty = double(1), dropout = double(1), epochs = integer(1), activation = character(1) ) %>% set_engine(\"keras\") %>% set_mode(\"regression\") %>% translate() ## Single Layer Neural Network Model Specification (regression) ## ## Main Arguments: ## hidden_units = integer(1) ## penalty = double(1) ## dropout = double(1) ## epochs = integer(1) ## activation = character(1) ## ## Computational engine: keras ## ## Model fit template: ## parsnip::keras_mlp(x = missing_arg(), y = missing_arg(), hidden_units = integer(1), ## penalty = double(1), dropout = double(1), epochs = integer(1), ## activation = character(1))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mlp_keras.html","id":"translation-from-parsnip-to-the-original-package-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (classification)","title":"Multilayer perceptron via keras — details_mlp_keras","text":"","code":"mlp( hidden_units = integer(1), penalty = double(1), dropout = double(1), epochs = integer(1), activation = character(1) ) %>% set_engine(\"keras\") %>% set_mode(\"classification\") %>% translate() ## Single Layer Neural Network Model Specification (classification) ## ## Main Arguments: ## hidden_units = integer(1) ## penalty = double(1) ## dropout = double(1) ## epochs = integer(1) ## activation = character(1) ## ## Computational engine: keras ## ## Model fit template: ## parsnip::keras_mlp(x = missing_arg(), y = missing_arg(), hidden_units = integer(1), ## penalty = double(1), dropout = double(1), epochs = integer(1), ## activation = character(1))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mlp_keras.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Multilayer perceptron via keras — details_mlp_keras","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Predictors scale. One way achieve center scale predictor mean zero variance one.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mlp_keras.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Multilayer perceptron via keras — details_mlp_keras","text":"underlying model implementation allow case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mlp_keras.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Multilayer perceptron via keras — details_mlp_keras","text":"Models fitted engine may require native serialization methods properly saved /passed R sessions. learn preparing fitted models serialization, see bundle package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mlp_keras.html","id":"examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Multilayer perceptron via keras — details_mlp_keras","text":"“Fitting Predicting parsnip” article contains examples mlp() \"keras\" engine.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mlp_keras.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Multilayer perceptron via keras — details_mlp_keras","text":"Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mlp_nnet.html","id":null,"dir":"Reference","previous_headings":"","what":"Multilayer perceptron via nnet — details_mlp_nnet","title":"Multilayer perceptron via nnet — details_mlp_nnet","text":"nnet::nnet() fits single layer, feed-forward neural network.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mlp_nnet.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Multilayer perceptron via nnet — details_mlp_nnet","text":"engine, multiple modes: classification regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mlp_nnet.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Multilayer perceptron via nnet — details_mlp_nnet","text":"model 3 tuning parameters: hidden_units: # Hidden Units (type: integer, default: none) penalty: Amount Regularization (type: double, default: 0.0) epochs: # Epochs (type: integer, default: 100L) Note , nnet::nnet(), maximum number parameters argument fairly low value maxit = 1000. models, may need pass value via set_engine() model fail.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mlp_nnet.html","id":"translation-from-parsnip-to-the-original-package-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (regression)","title":"Multilayer perceptron via nnet — details_mlp_nnet","text":"Note parsnip automatically sets linear activation last layer.","code":"mlp( hidden_units = integer(1), penalty = double(1), epochs = integer(1) ) %>% set_engine(\"nnet\") %>% set_mode(\"regression\") %>% translate() ## Single Layer Neural Network Model Specification (regression) ## ## Main Arguments: ## hidden_units = integer(1) ## penalty = double(1) ## epochs = integer(1) ## ## Computational engine: nnet ## ## Model fit template: ## nnet::nnet(formula = missing_arg(), data = missing_arg(), size = integer(1), ## decay = double(1), maxit = integer(1), trace = FALSE, linout = TRUE)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mlp_nnet.html","id":"translation-from-parsnip-to-the-original-package-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (classification)","title":"Multilayer perceptron via nnet — details_mlp_nnet","text":"","code":"mlp( hidden_units = integer(1), penalty = double(1), epochs = integer(1) ) %>% set_engine(\"nnet\") %>% set_mode(\"classification\") %>% translate() ## Single Layer Neural Network Model Specification (classification) ## ## Main Arguments: ## hidden_units = integer(1) ## penalty = double(1) ## epochs = integer(1) ## ## Computational engine: nnet ## ## Model fit template: ## nnet::nnet(formula = missing_arg(), data = missing_arg(), size = integer(1), ## decay = double(1), maxit = integer(1), trace = FALSE, linout = FALSE)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mlp_nnet.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Multilayer perceptron via nnet — details_mlp_nnet","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Predictors scale. One way achieve center scale predictor mean zero variance one.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mlp_nnet.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Multilayer perceptron via nnet — details_mlp_nnet","text":"underlying model implementation allow case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mlp_nnet.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Multilayer perceptron via nnet — details_mlp_nnet","text":"model object contains data required make predictions. saving model purpose prediction, size saved object might substantially reduced using functions butcher package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mlp_nnet.html","id":"examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Multilayer perceptron via nnet — details_mlp_nnet","text":"“Fitting Predicting parsnip” article contains examples mlp() \"nnet\" engine.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mlp_nnet.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Multilayer perceptron via nnet — details_mlp_nnet","text":"Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_brulee.html","id":null,"dir":"Reference","previous_headings":"","what":"Multinomial regression via brulee — details_multinom_reg_brulee","title":"Multinomial regression via brulee — details_multinom_reg_brulee","text":"brulee::brulee_multinomial_reg() fits model uses linear predictors predict multiclass data using multinomial distribution.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_brulee.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Multinomial regression via brulee — details_multinom_reg_brulee","text":"engine, single mode: classification","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_brulee.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Multinomial regression via brulee — details_multinom_reg_brulee","text":"model 2 tuning parameter: penalty: Amount Regularization (type: double, default: 0.001) mixture: Proportion Lasso Penalty (type: double, default: 0.0) use L1 penalty (.k.. lasso penalty) force parameters strictly zero (packages glmnet). zeroing parameters specific feature optimization method used packages. engine arguments interest: optimizer(): optimization method. See brulee::brulee_linear_reg(). epochs(): integer number passes training set. lean_rate(): number used accelerate gradient decsent process. momentum(): number used use historical gradient information optimization (optimizer = \"SGD\" ). batch_size(): integer number training set points batch. stop_iter(): non-negative integer many iterations improvement stopping. (default: 5L). class_weights(): Numeric class weights. See brulee::brulee_multinomial_reg().","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_brulee.html","id":"translation-from-parsnip-to-the-original-package-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (classification)","title":"Multinomial regression via brulee — details_multinom_reg_brulee","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Predictors scale. One way achieve center scale predictor mean zero variance one.","code":"multinom_reg(penalty = double(1)) %>% set_engine(\"brulee\") %>% translate() ## Multinomial Regression Model Specification (classification) ## ## Main Arguments: ## penalty = double(1) ## ## Computational engine: brulee ## ## Model fit template: ## brulee::brulee_multinomial_reg(x = missing_arg(), y = missing_arg(), ## penalty = double(1))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_brulee.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Multinomial regression via brulee — details_multinom_reg_brulee","text":"underlying model implementation allow case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_brulee.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Multinomial regression via brulee — details_multinom_reg_brulee","text":"Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_glmnet.html","id":null,"dir":"Reference","previous_headings":"","what":"Multinomial regression via glmnet — details_multinom_reg_glmnet","title":"Multinomial regression via glmnet — details_multinom_reg_glmnet","text":"glmnet::glmnet() fits model uses linear predictors predict multiclass data using multinomial distribution.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_glmnet.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Multinomial regression via glmnet — details_multinom_reg_glmnet","text":"engine, single mode: classification","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_glmnet.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Multinomial regression via glmnet — details_multinom_reg_glmnet","text":"model 2 tuning parameters: penalty: Amount Regularization (type: double, default: see ) mixture: Proportion Lasso Penalty (type: double, default: 1.0) penalty parameter default requires single numeric value. details , glmnet model general, see glmnet-details. mixture: mixture = 1 specifies pure lasso model, mixture = 0 specifies ridge regression model, 0 < mixture < 1 specifies elastic net model, interpolating lasso ridge.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_glmnet.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Multinomial regression via glmnet — details_multinom_reg_glmnet","text":"","code":"multinom_reg(penalty = double(1), mixture = double(1)) %>% set_engine(\"glmnet\") %>% translate() ## Multinomial Regression Model Specification (classification) ## ## Main Arguments: ## penalty = 0 ## mixture = double(1) ## ## Computational engine: glmnet ## ## Model fit template: ## glmnet::glmnet(x = missing_arg(), y = missing_arg(), weights = missing_arg(), ## alpha = double(1), family = \"multinomial\")"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_glmnet.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Multinomial regression via glmnet — details_multinom_reg_glmnet","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Predictors scale. One way achieve center scale predictor mean zero variance one. default, glmnet::glmnet() uses argument standardize = TRUE center scale data.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_glmnet.html","id":"examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Multinomial regression via glmnet — details_multinom_reg_glmnet","text":"“Fitting Predicting parsnip” article contains examples multinom_reg() \"glmnet\" engine.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_glmnet.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Multinomial regression via glmnet — details_multinom_reg_glmnet","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_glmnet.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Multinomial regression via glmnet — details_multinom_reg_glmnet","text":"model object contains data required make predictions. saving model purpose prediction, size saved object might substantially reduced using functions butcher package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_glmnet.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Multinomial regression via glmnet — details_multinom_reg_glmnet","text":"Hastie, T, R Tibshirani, M Wainwright. 2015. Statistical Learning Sparsity. CRC Press. Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_h2o.html","id":null,"dir":"Reference","previous_headings":"","what":"Multinomial regression via h2o — details_multinom_reg_h2o","title":"Multinomial regression via h2o — details_multinom_reg_h2o","text":"h2o::h2o.glm() fits model uses linear predictors predict multiclass data multinomial responses.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_h2o.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Multinomial regression via h2o — details_multinom_reg_h2o","text":"engine, single mode: classification","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_h2o.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Multinomial regression via h2o — details_multinom_reg_h2o","text":"model 2 tuning parameters: mixture: Proportion Lasso Penalty (type: double, default: see ) penalty: Amount Regularization (type: double, default: see ) default, given fixed penalty, h2o::h2o.glm() uses heuristic approach select optimal value penalty based training data. Setting engine parameter lambda_search TRUE enables efficient version grid search, see details https://docs.h2o.ai/h2o/latest-stable/h2o-docs/data-science/algo-params/lambda_search.html. choice mixture depends engine parameter solver, automatically chosen given training data specification model parameters. solver set 'L-BFGS', mixture defaults 0 (ridge regression) 0.5 otherwise.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_h2o.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Multinomial regression via h2o — details_multinom_reg_h2o","text":"agua::h2o_train_glm() multinom_reg() wrapper around h2o::h2o.glm() family = 'multinomial'.","code":"multinom_reg(penalty = double(1), mixture = double(1)) %>% set_engine(\"h2o\") %>% translate() ## Multinomial Regression Model Specification (classification) ## ## Main Arguments: ## penalty = double(1) ## mixture = double(1) ## ## Computational engine: h2o ## ## Model fit template: ## agua::h2o_train_glm(x = missing_arg(), y = missing_arg(), weights = missing_arg(), ## validation_frame = missing_arg(), lambda = double(1), alpha = double(1), ## family = \"multinomial\")"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_h2o.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Multinomial regression via h2o — details_multinom_reg_h2o","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Predictors scale. One way achieve center scale predictor mean zero variance one. default, h2o::h2o.glm() uses argument standardize = TRUE center scale data.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_h2o.html","id":"initializing-h-o","dir":"Reference","previous_headings":"","what":"Initializing h2o","title":"Multinomial regression via h2o — details_multinom_reg_h2o","text":"use h2o engine tidymodels, please run h2o::h2o.init() first. default, connects R local h2o server. needs done every new R session. can also connect remote h2o server IP address, details see h2o::h2o.init(). can control number threads thread pool used h2o nthreads argument. default, uses CPUs host. different usual parallel processing mechanism tidymodels tuning, tidymodels parallelizes resamples, h2o parallelizes hyperparameter combinations given resample. h2o automatically shut local h2o instance started R R terminated. manually stop h2o server, run h2o::h2o.shutdown().","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_keras.html","id":null,"dir":"Reference","previous_headings":"","what":"Multinomial regression via keras — details_multinom_reg_keras","title":"Multinomial regression via keras — details_multinom_reg_keras","text":"keras_mlp() fits model uses linear predictors predict multiclass data using multinomial distribution.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_keras.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Multinomial regression via keras — details_multinom_reg_keras","text":"engine, single mode: classification","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_keras.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Multinomial regression via keras — details_multinom_reg_keras","text":"model one tuning parameter: penalty: Amount Regularization (type: double, default: 0.0) penalty, amount regularization L2 penalty (.e., ridge weight decay).","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_keras.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Multinomial regression via keras — details_multinom_reg_keras","text":"keras_mlp() parsnip wrapper around keras code neural networks. model fits linear regression network single hidden unit.","code":"multinom_reg(penalty = double(1)) %>% set_engine(\"keras\") %>% translate() ## Multinomial Regression Model Specification (classification) ## ## Main Arguments: ## penalty = double(1) ## ## Computational engine: keras ## ## Model fit template: ## parsnip::keras_mlp(x = missing_arg(), y = missing_arg(), penalty = double(1), ## hidden_units = 1, act = \"linear\")"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_keras.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Multinomial regression via keras — details_multinom_reg_keras","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Predictors scale. One way achieve center scale predictor mean zero variance one.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_keras.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Multinomial regression via keras — details_multinom_reg_keras","text":"underlying model implementation allow case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_keras.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Multinomial regression via keras — details_multinom_reg_keras","text":"Models fitted engine may require native serialization methods properly saved /passed R sessions. learn preparing fitted models serialization, see bundle package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_keras.html","id":"examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Multinomial regression via keras — details_multinom_reg_keras","text":"“Fitting Predicting parsnip” article contains examples multinom_reg() \"keras\" engine.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_keras.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Multinomial regression via keras — details_multinom_reg_keras","text":"Hoerl, ., & Kennard, R. (2000). Ridge Regression: Biased Estimation Nonorthogonal Problems. Technometrics, 42(1), 80-86.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_nnet.html","id":null,"dir":"Reference","previous_headings":"","what":"Multinomial regression via nnet — details_multinom_reg_nnet","title":"Multinomial regression via nnet — details_multinom_reg_nnet","text":"nnet::multinom() fits model uses linear predictors predict multiclass data using multinomial distribution.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_nnet.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Multinomial regression via nnet — details_multinom_reg_nnet","text":"engine, single mode: classification","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_nnet.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Multinomial regression via nnet — details_multinom_reg_nnet","text":"model 1 tuning parameters: penalty: Amount Regularization (type: double, default: 0.0) penalty, amount regularization includes L2 penalty (.e., ridge weight decay).","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_nnet.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Multinomial regression via nnet — details_multinom_reg_nnet","text":"","code":"multinom_reg(penalty = double(1)) %>% set_engine(\"nnet\") %>% translate() ## Multinomial Regression Model Specification (classification) ## ## Main Arguments: ## penalty = double(1) ## ## Computational engine: nnet ## ## Model fit template: ## nnet::multinom(formula = missing_arg(), data = missing_arg(), ## decay = double(1), trace = FALSE)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_nnet.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Multinomial regression via nnet — details_multinom_reg_nnet","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Predictors scale. One way achieve center scale predictor mean zero variance one.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_nnet.html","id":"examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Multinomial regression via nnet — details_multinom_reg_nnet","text":"“Fitting Predicting parsnip” article contains examples multinom_reg() \"nnet\" engine.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_nnet.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Multinomial regression via nnet — details_multinom_reg_nnet","text":"underlying model implementation allow case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_nnet.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Multinomial regression via nnet — details_multinom_reg_nnet","text":"model object contains data required make predictions. saving model purpose prediction, size saved object might substantially reduced using functions butcher package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_nnet.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Multinomial regression via nnet — details_multinom_reg_nnet","text":"Luraschi, J, K Kuo, E Ruiz. 2019. Mastering nnet R. O’Reilly Media Hastie, T, R Tibshirani, M Wainwright. 2015. Statistical Learning Sparsity. CRC Press. Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_spark.html","id":null,"dir":"Reference","previous_headings":"","what":"Multinomial regression via spark — details_multinom_reg_spark","title":"Multinomial regression via spark — details_multinom_reg_spark","text":"sparklyr::ml_logistic_regression() fits model uses linear predictors predict multiclass data using multinomial distribution.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_spark.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Multinomial regression via spark — details_multinom_reg_spark","text":"engine, single mode: classification","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_spark.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Multinomial regression via spark — details_multinom_reg_spark","text":"model 2 tuning parameters: penalty: Amount Regularization (type: double, default: 0.0) mixture: Proportion Lasso Penalty (type: double, default: 0.0) penalty, amount regularization includes L1 penalty (.e., lasso) L2 penalty (.e., ridge weight decay). mixture: mixture = 1 specifies pure lasso model, mixture = 0 specifies ridge regression model, 0 < mixture < 1 specifies elastic net model, interpolating lasso ridge.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_spark.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Multinomial regression via spark — details_multinom_reg_spark","text":"","code":"multinom_reg(penalty = double(1), mixture = double(1)) %>% set_engine(\"spark\") %>% translate() ## Multinomial Regression Model Specification (classification) ## ## Main Arguments: ## penalty = double(1) ## mixture = double(1) ## ## Computational engine: spark ## ## Model fit template: ## sparklyr::ml_logistic_regression(x = missing_arg(), formula = missing_arg(), ## weights = missing_arg(), reg_param = double(1), elastic_net_param = double(1), ## family = \"multinomial\")"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_spark.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Multinomial regression via spark — details_multinom_reg_spark","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Predictors scale. One way achieve center scale predictor mean zero variance one. default, ml_multinom_regression() uses argument standardization = TRUE center scale data.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_spark.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Multinomial regression via spark — details_multinom_reg_spark","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights. Note , spark engines, case_weight argument value character string specify column numeric case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_spark.html","id":"other-details","dir":"Reference","previous_headings":"","what":"Other details","title":"Multinomial regression via spark — details_multinom_reg_spark","text":"models created using \"spark\" engine, several things consider. formula interface via fit() available; using fit_xy() generate error. predictions always Spark table format. names documented without dots. equivalent factor columns Spark tables class predictions returned character columns. retain model object new R session (via save()), model$fit element parsnip object serialized via ml_save(object$fit) separately saved disk. new session, object can reloaded reattached parsnip object.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_spark.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Multinomial regression via spark — details_multinom_reg_spark","text":"Luraschi, J, K Kuo, E Ruiz. 2019. Mastering Spark R. O’Reilly Media Hastie, T, R Tibshirani, M Wainwright. 2015. Statistical Learning Sparsity. CRC Press. Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_naive_Bayes_h2o.html","id":null,"dir":"Reference","previous_headings":"","what":"Naive Bayes models via naivebayes — details_naive_Bayes_h2o","title":"Naive Bayes models via naivebayes — details_naive_Bayes_h2o","text":"h2o::h2o.naiveBayes() fits model uses Bayes' theorem compute probability class, given predictor values.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_naive_Bayes_h2o.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Naive Bayes models via naivebayes — details_naive_Bayes_h2o","text":"engine, single mode: classification","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_naive_Bayes_h2o.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Naive Bayes models via naivebayes — details_naive_Bayes_h2o","text":"model 1 tuning parameter: Laplace: Laplace Correction (type: double, default: 0.0) h2o::h2o.naiveBayes() provides several engine arguments deal imbalances rare classes: balance_classes logical value controlling /-sampling (imbalanced data). Defaults FALSE. class_sampling_factors /-sampling ratios per class (lexicographic order). specified, sampling factors automatically computed obtain class balance training. Require balance_classes TRUE. min_sdev: minimum standard deviation use observations without enough data, must greater 1e-10. min_prob: minimum probability use observations enough data.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_naive_Bayes_h2o.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Naive Bayes models via naivebayes — details_naive_Bayes_h2o","text":"agua extension package required fit model. agua::h2o_train_nb() wrapper around h2o::h2o.naiveBayes().","code":"naive_Bayes(Laplace = numeric(0)) %>% set_engine(\"h2o\") %>% translate() ## Naive Bayes Model Specification (classification) ## ## Main Arguments: ## Laplace = numeric(0) ## ## Computational engine: h2o ## ## Model fit template: ## agua::h2o_train_nb(x = missing_arg(), y = missing_arg(), weights = missing_arg(), ## validation_frame = missing_arg(), laplace = numeric(0))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_naive_Bayes_h2o.html","id":"initializing-h-o","dir":"Reference","previous_headings":"","what":"Initializing h2o","title":"Naive Bayes models via naivebayes — details_naive_Bayes_h2o","text":"use h2o engine tidymodels, please run h2o::h2o.init() first. default, connects R local h2o server. needs done every new R session. can also connect remote h2o server IP address, details see h2o::h2o.init(). can control number threads thread pool used h2o nthreads argument. default, uses CPUs host. different usual parallel processing mechanism tidymodels tuning, tidymodels parallelizes resamples, h2o parallelizes hyperparameter combinations given resample. h2o automatically shut local h2o instance started R R terminated. manually stop h2o server, run h2o::h2o.shutdown().","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_naive_Bayes_h2o.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Naive Bayes models via naivebayes — details_naive_Bayes_h2o","text":"Models fitted engine may require native serialization methods properly saved /passed R sessions. learn preparing fitted models serialization, see bundle package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_naive_Bayes_klaR.html","id":null,"dir":"Reference","previous_headings":"","what":"Naive Bayes models via klaR — details_naive_Bayes_klaR","title":"Naive Bayes models via klaR — details_naive_Bayes_klaR","text":"klaR::NaiveBayes() fits model uses Bayes' theorem compute probability class, given predictor values.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_naive_Bayes_klaR.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Naive Bayes models via klaR — details_naive_Bayes_klaR","text":"engine, single mode: classification","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_naive_Bayes_klaR.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Naive Bayes models via klaR — details_naive_Bayes_klaR","text":"model 2 tuning parameter: smoothness: Kernel Smoothness (type: double, default: 1.0) Laplace: Laplace Correction (type: double, default: 0.0) Note engine argument usekernel set TRUE default using klaR engine.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_naive_Bayes_klaR.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Naive Bayes models via klaR — details_naive_Bayes_klaR","text":"discrim extension package required fit model.","code":"library(discrim) naive_Bayes(smoothness = numeric(0), Laplace = numeric(0)) %>% set_engine(\"klaR\") %>% translate() ## Naive Bayes Model Specification (classification) ## ## Main Arguments: ## smoothness = numeric(0) ## Laplace = numeric(0) ## ## Computational engine: klaR ## ## Model fit template: ## discrim::klar_bayes_wrapper(x = missing_arg(), y = missing_arg(), ## adjust = numeric(0), fL = numeric(0), usekernel = TRUE)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_naive_Bayes_klaR.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Naive Bayes models via klaR — details_naive_Bayes_klaR","text":"columns qualitative predictors always represented factors (opposed dummy/indicator variables). predictors factors, underlying code treats multinomial data appropriately computes conditional distributions. Variance calculations used computations zero-variance predictors (.e., single unique value) eliminated fitting model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_naive_Bayes_klaR.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Naive Bayes models via klaR — details_naive_Bayes_klaR","text":"underlying model implementation allow case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_naive_Bayes_klaR.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Naive Bayes models via klaR — details_naive_Bayes_klaR","text":"Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_naive_Bayes_naivebayes.html","id":null,"dir":"Reference","previous_headings":"","what":"Naive Bayes models via naivebayes — details_naive_Bayes_naivebayes","title":"Naive Bayes models via naivebayes — details_naive_Bayes_naivebayes","text":"naivebayes::naive_bayes() fits model uses Bayes' theorem compute probability class, given predictor values.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_naive_Bayes_naivebayes.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Naive Bayes models via naivebayes — details_naive_Bayes_naivebayes","text":"engine, single mode: classification","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_naive_Bayes_naivebayes.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Naive Bayes models via naivebayes — details_naive_Bayes_naivebayes","text":"model 2 tuning parameter: smoothness: Kernel Smoothness (type: double, default: 1.0) Laplace: Laplace Correction (type: double, default: 0.0) Note engine argument usekernel set TRUE default using naivebayes engine.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_naive_Bayes_naivebayes.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Naive Bayes models via naivebayes — details_naive_Bayes_naivebayes","text":"discrim extension package required fit model.","code":"library(discrim) naive_Bayes(smoothness = numeric(0), Laplace = numeric(0)) %>% set_engine(\"naivebayes\") %>% translate() ## Naive Bayes Model Specification (classification) ## ## Main Arguments: ## smoothness = numeric(0) ## Laplace = numeric(0) ## ## Computational engine: naivebayes ## ## Model fit template: ## naivebayes::naive_bayes(x = missing_arg(), y = missing_arg(), ## adjust = numeric(0), laplace = numeric(0), usekernel = TRUE)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_naive_Bayes_naivebayes.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Naive Bayes models via naivebayes — details_naive_Bayes_naivebayes","text":"columns qualitative predictors always represented factors (opposed dummy/indicator variables). predictors factors, underlying code treats multinomial data appropriately computes conditional distributions. count data, integers can estimated using Poisson distribution argument usepoisson = TRUE passed engine argument. Variance calculations used computations zero-variance predictors (.e., single unique value) eliminated fitting model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_naive_Bayes_naivebayes.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Naive Bayes models via naivebayes — details_naive_Bayes_naivebayes","text":"underlying model implementation allow case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_naive_Bayes_naivebayes.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Naive Bayes models via naivebayes — details_naive_Bayes_naivebayes","text":"Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_nearest_neighbor_kknn.html","id":null,"dir":"Reference","previous_headings":"","what":"K-nearest neighbors via kknn — details_nearest_neighbor_kknn","title":"K-nearest neighbors via kknn — details_nearest_neighbor_kknn","text":"kknn::train.kknn() fits model uses K similar data points training set predict new samples.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_nearest_neighbor_kknn.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"K-nearest neighbors via kknn — details_nearest_neighbor_kknn","text":"engine, multiple modes: classification regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_nearest_neighbor_kknn.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"K-nearest neighbors via kknn — details_nearest_neighbor_kknn","text":"model 3 tuning parameters: neighbors: # Nearest Neighbors (type: integer, default: 5L) weight_func: Distance Weighting Function (type: character, default: ‘optimal’) dist_power: Minkowski Distance Order (type: double, default: 2.0)","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_nearest_neighbor_kknn.html","id":"translation-from-parsnip-to-the-original-package-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (regression)","title":"K-nearest neighbors via kknn — details_nearest_neighbor_kknn","text":"min_rows() adjust number neighbors chosen value consistent actual data dimensions.","code":"nearest_neighbor( neighbors = integer(1), weight_func = character(1), dist_power = double(1) ) %>% set_engine(\"kknn\") %>% set_mode(\"regression\") %>% translate() ## K-Nearest Neighbor Model Specification (regression) ## ## Main Arguments: ## neighbors = integer(1) ## weight_func = character(1) ## dist_power = double(1) ## ## Computational engine: kknn ## ## Model fit template: ## kknn::train.kknn(formula = missing_arg(), data = missing_arg(), ## ks = min_rows(0L, data, 5), kernel = character(1), distance = double(1))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_nearest_neighbor_kknn.html","id":"translation-from-parsnip-to-the-original-package-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (classification)","title":"K-nearest neighbors via kknn — details_nearest_neighbor_kknn","text":"","code":"nearest_neighbor( neighbors = integer(1), weight_func = character(1), dist_power = double(1) ) %>% set_engine(\"kknn\") %>% set_mode(\"classification\") %>% translate() ## K-Nearest Neighbor Model Specification (classification) ## ## Main Arguments: ## neighbors = integer(1) ## weight_func = character(1) ## dist_power = double(1) ## ## Computational engine: kknn ## ## Model fit template: ## kknn::train.kknn(formula = missing_arg(), data = missing_arg(), ## ks = min_rows(0L, data, 5), kernel = character(1), distance = double(1))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_nearest_neighbor_kknn.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"K-nearest neighbors via kknn — details_nearest_neighbor_kknn","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Predictors scale. One way achieve center scale predictor mean zero variance one.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_nearest_neighbor_kknn.html","id":"examples","dir":"Reference","previous_headings":"","what":"Examples","title":"K-nearest neighbors via kknn — details_nearest_neighbor_kknn","text":"“Fitting Predicting parsnip” article contains examples nearest_neighbor() \"kknn\" engine.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_nearest_neighbor_kknn.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"K-nearest neighbors via kknn — details_nearest_neighbor_kknn","text":"underlying model implementation allow case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_nearest_neighbor_kknn.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"K-nearest neighbors via kknn — details_nearest_neighbor_kknn","text":"model object contains data required make predictions. saving model purpose prediction, size saved object might substantially reduced using functions butcher package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_nearest_neighbor_kknn.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"K-nearest neighbors via kknn — details_nearest_neighbor_kknn","text":"Hechenbichler K. Schliep K.P. (2004) Weighted k-Nearest-Neighbor Techniques Ordinal Classification, Discussion Paper 399, SFB 386, Ludwig-Maximilians University Munich Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_pls_mixOmics.html","id":null,"dir":"Reference","previous_headings":"","what":"Partial least squares via mixOmics — details_pls_mixOmics","title":"Partial least squares via mixOmics — details_pls_mixOmics","text":"mixOmics package can fit several different types PLS models.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_pls_mixOmics.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Partial least squares via mixOmics — details_pls_mixOmics","text":"engine, multiple modes: classification regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_pls_mixOmics.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Partial least squares via mixOmics — details_pls_mixOmics","text":"model 2 tuning parameters: predictor_prop: Proportion Predictors (type: double, default: see ) num_comp: # Components (type: integer, default: 2L)","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_pls_mixOmics.html","id":"translation-from-parsnip-to-the-underlying-model-call-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the underlying model call (regression)","title":"Partial least squares via mixOmics — details_pls_mixOmics","text":"plsmod extension package required fit model. plsmod::pls_fit() function : Determines number predictors data. Adjusts num_comp value larger number factors. Determines whether sparsity required based value predictor_prop. Sets keepX argument mixOmics::spls() sparse models.","code":"library(plsmod) pls(num_comp = integer(1), predictor_prop = double(1)) %>% set_engine(\"mixOmics\") %>% set_mode(\"regression\") %>% translate() ## PLS Model Specification (regression) ## ## Main Arguments: ## predictor_prop = double(1) ## num_comp = integer(1) ## ## Computational engine: mixOmics ## ## Model fit template: ## plsmod::pls_fit(x = missing_arg(), y = missing_arg(), predictor_prop = double(1), ## ncomp = integer(1))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_pls_mixOmics.html","id":"translation-from-parsnip-to-the-underlying-model-call-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the underlying model call (classification)","title":"Partial least squares via mixOmics — details_pls_mixOmics","text":"plsmod extension package required fit model. case, plsmod::pls_fit() role eventually targets mixOmics::plsda() mixOmics::splsda().","code":"library(plsmod) pls(num_comp = integer(1), predictor_prop = double(1)) %>% set_engine(\"mixOmics\") %>% set_mode(\"classification\") %>% translate() ## PLS Model Specification (classification) ## ## Main Arguments: ## predictor_prop = double(1) ## num_comp = integer(1) ## ## Computational engine: mixOmics ## ## Model fit template: ## plsmod::pls_fit(x = missing_arg(), y = missing_arg(), predictor_prop = double(1), ## ncomp = integer(1))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_pls_mixOmics.html","id":"installing-mixomics","dir":"Reference","previous_headings":"","what":"Installing mixOmics","title":"Partial least squares via mixOmics — details_pls_mixOmics","text":"package available via Bioconductor repository accessible via CRAN. can install using:","code":"if (!require(\"remotes\", quietly = TRUE)) { install.packages(\"remotes\") } remotes::install_bioc(\"mixOmics\")"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_pls_mixOmics.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Partial least squares via mixOmics — details_pls_mixOmics","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Variance calculations used computations zero-variance predictors (.e., single unique value) eliminated fitting model. Predictors scale. One way achieve center scale predictor mean zero variance one.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_pls_mixOmics.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Partial least squares via mixOmics — details_pls_mixOmics","text":"underlying model implementation allow case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_pls_mixOmics.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Partial least squares via mixOmics — details_pls_mixOmics","text":"Rohart F Gautier B Singh Le Cao K-(2017). “mixOmics: R package ’omics feature selection multiple data integration.” PLoS computational biology, 13(11), e1005752.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_gee.html","id":null,"dir":"Reference","previous_headings":"","what":"Poisson regression via generalized estimating equations (GEE) — details_poisson_reg_gee","title":"Poisson regression via generalized estimating equations (GEE) — details_poisson_reg_gee","text":"gee::gee() uses generalized least squares fit different types models errors independent.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_gee.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Poisson regression via generalized estimating equations (GEE) — details_poisson_reg_gee","text":"engine, single mode: regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_gee.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Poisson regression via generalized estimating equations (GEE) — details_poisson_reg_gee","text":"model formal tuning parameters. may beneficial determine appropriate correlation structure use, typically affect predicted value model. effect inferential results parameter covariance values.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_gee.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Poisson regression via generalized estimating equations (GEE) — details_poisson_reg_gee","text":"multilevelmod extension package required fit model. multilevelmod::gee_fit() wrapper model around gee().","code":"library(multilevelmod) poisson_reg(engine = \"gee\") %>% set_engine(\"gee\") %>% translate() ## Poisson Regression Model Specification (regression) ## ## Computational engine: gee ## ## Model fit template: ## multilevelmod::gee_fit(formula = missing_arg(), data = missing_arg(), ## family = stats::poisson)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_gee.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Poisson regression via generalized estimating equations (GEE) — details_poisson_reg_gee","text":"specific preprocessing needs. However, helpful keep clustering/subject identifier column factor character (instead making dummy variables). See examples next section.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_gee.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Poisson regression via generalized estimating equations (GEE) — details_poisson_reg_gee","text":"underlying model implementation allow case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_gee.html","id":"other-details","dir":"Reference","previous_headings":"","what":"Other details","title":"Poisson regression via generalized estimating equations (GEE) — details_poisson_reg_gee","text":"gee:gee() gee:geepack() specify id/cluster variable using argument id requires vector. parsnip doesn’t work way enable model fit using artificial function id_var() used formula. , original package, call look like: parsnip, suggest using formula method fitting: using tidymodels infrastructure, may better use workflow. case, can add appropriate columns using add_variables() supply GEE formula adding model: gee::gee() function always prints warnings output even silent = TRUE. parsnip \"gee\" engine, contrast, silences console output coming gee::gee(), even silent = FALSE. Also, issues gee() function, supplementary call glm() needed get rank QR decomposition objects predict() can used.","code":"gee(breaks ~ tension, id = wool, data = warpbreaks, corstr = \"exchangeable\") library(tidymodels) poisson_reg() %>% set_engine(\"gee\", corstr = \"exchangeable\") %>% fit(y ~ time + x + id_var(subject), data = longitudinal_counts) library(tidymodels) gee_spec <- poisson_reg() %>% set_engine(\"gee\", corstr = \"exchangeable\") gee_wflow <- workflow() %>% # The data are included as-is using: add_variables(outcomes = y, predictors = c(time, x, subject)) %>% add_model(gee_spec, formula = y ~ time + x + id_var(subject)) fit(gee_wflow, data = longitudinal_counts)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_gee.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Poisson regression via generalized estimating equations (GEE) — details_poisson_reg_gee","text":"Liang, K.Y. Zeger, S.L. (1986) Longitudinal data analysis using generalized linear models. Biometrika, 73 13–22. Zeger, S.L. Liang, K.Y. (1986) Longitudinal data analysis discrete continuous outcomes. Biometrics, 42 121–130.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_glm.html","id":null,"dir":"Reference","previous_headings":"","what":"Poisson regression via glm — details_poisson_reg_glm","title":"Poisson regression via glm — details_poisson_reg_glm","text":"stats::glm() uses maximum likelihood fit model count data.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_glm.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Poisson regression via glm — details_poisson_reg_glm","text":"engine, single mode: regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_glm.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Poisson regression via glm — details_poisson_reg_glm","text":"engine tuning parameters.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_glm.html","id":"translation-from-parsnip-to-the-underlying-model-call-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the underlying model call (regression)","title":"Poisson regression via glm — details_poisson_reg_glm","text":"poissonreg extension package required fit model.","code":"library(poissonreg) poisson_reg() %>% set_engine(\"glm\") %>% translate() ## Poisson Regression Model Specification (regression) ## ## Computational engine: glm ## ## Model fit template: ## stats::glm(formula = missing_arg(), data = missing_arg(), weights = missing_arg(), ## family = stats::poisson)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_glm.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Poisson regression via glm — details_poisson_reg_glm","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_glm.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Poisson regression via glm — details_poisson_reg_glm","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_glm.html","id":"case-weights-1","dir":"Reference","previous_headings":"","what":"Case weights","title":"Poisson regression via glm — details_poisson_reg_glm","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights. However, documentation stats::glm() assumes specific type case weights used:“Non-NULL weights can used indicate different observations different dispersions (values weights inversely proportional dispersions); equivalently, elements weights positive integers w_i, response y_i mean w_i unit-weight observations. binomial GLM prior weights used give number trials response proportion successes: rarely used Poisson GLM.” frequency weights used application, glm_grouped() model (corresponding engine) may appropriate.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_glm.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Poisson regression via glm — details_poisson_reg_glm","text":"model object contains data required make predictions. saving model purpose prediction, size saved object might substantially reduced using functions butcher package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_glmer.html","id":null,"dir":"Reference","previous_headings":"","what":"Poisson regression via mixed models — details_poisson_reg_glmer","title":"Poisson regression via mixed models — details_poisson_reg_glmer","text":"\"glmer\" engine estimates fixed random effect regression parameters using maximum likelihood (restricted maximum likelihood) estimation.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_glmer.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Poisson regression via mixed models — details_poisson_reg_glmer","text":"engine, single mode: regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_glmer.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Poisson regression via mixed models — details_poisson_reg_glmer","text":"model tuning parameters.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_glmer.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Poisson regression via mixed models — details_poisson_reg_glmer","text":"multilevelmod extension package required fit model.","code":"library(multilevelmod) poisson_reg(engine = \"glmer\") %>% set_engine(\"glmer\") %>% translate() ## Poisson Regression Model Specification (regression) ## ## Computational engine: glmer ## ## Model fit template: ## lme4::glmer(formula = missing_arg(), data = missing_arg(), weights = missing_arg(), ## family = stats::poisson)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_glmer.html","id":"predicting-new-samples","dir":"Reference","previous_headings":"","what":"Predicting new samples","title":"Poisson regression via mixed models — details_poisson_reg_glmer","text":"model can use subject-specific coefficient estimates make predictions (.e. partial pooling). example, equation shows linear predictor (\\eta) random intercept: $$ denotes ith independent experimental unit (e.g. subject). model seen subject , can use subject’s data adjust population intercept specific subjects results. happens data predicted subject used model fit? case, package uses population parameter estimates prediction: Depending covariates model, might effect making prediction new samples. population parameters “best estimate” subject included model fit. tidymodels framework deliberately constrains predictions new data use training set data (prevent information leakage).","code":"\\eta_{i} = (\\beta_0 + b_{0i}) + \\beta_1x_{i1} \\hat{\\eta}_{i'} = \\hat{\\beta}_0+ \\hat{\\beta}x_{i'1}"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_glmer.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Poisson regression via mixed models — details_poisson_reg_glmer","text":"specific preprocessing needs. However, helpful keep clustering/subject identifier column factor character (instead making dummy variables). See examples next section.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_glmer.html","id":"other-details","dir":"Reference","previous_headings":"","what":"Other details","title":"Poisson regression via mixed models — details_poisson_reg_glmer","text":"model can accept case weights. parsnip, suggest using formula method fitting: using tidymodels infrastructure, may better use workflow. case, can add appropriate columns using add_variables() supply typical formula adding model:","code":"library(tidymodels) poisson_reg() %>% set_engine(\"glmer\") %>% fit(y ~ time + x + (1 | subject), data = longitudinal_counts) library(tidymodels) glmer_spec <- poisson_reg() %>% set_engine(\"glmer\") glmer_wflow <- workflow() %>% # The data are included as-is using: add_variables(outcomes = y, predictors = c(time, x, subject)) %>% add_model(glmer_spec, formula = y ~ time + x + (1 | subject)) fit(glmer_wflow, data = longitudinal_counts)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_glmer.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Poisson regression via mixed models — details_poisson_reg_glmer","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_glmer.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Poisson regression via mixed models — details_poisson_reg_glmer","text":"J Pinheiro, D Bates. 2000. Mixed-effects models S S-PLUS. Springer, New York, NY West, K, Band Welch, Galecki. 2014. Linear Mixed Models: Practical Guide Using Statistical Software. CRC Press. Thorson, J, Minto, C. 2015, Mixed effects: unifying framework statistical modelling fisheries biology. ICES Journal Marine Science, Volume 72, Issue 5, Pages 1245–1256. Harrison, XA, Donaldson, L, Correa-Cano, , Evans, J, Fisher, DN, Goodwin, CED, Robinson, BS, Hodgson, DJ, Inger, R. 2018. brief introduction mixed effects modelling multi-model inference ecology. PeerJ 6:e4794. DeBruine LM, Barr DJ. Understanding Mixed-Effects Models Data Simulation. 2021. Advances Methods Practices Psychological Science.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_glmnet.html","id":null,"dir":"Reference","previous_headings":"","what":"Poisson regression via glmnet — details_poisson_reg_glmnet","title":"Poisson regression via glmnet — details_poisson_reg_glmnet","text":"glmnet::glmnet() uses penalized maximum likelihood fit model count data.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_glmnet.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Poisson regression via glmnet — details_poisson_reg_glmnet","text":"engine, single mode: regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_glmnet.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Poisson regression via glmnet — details_poisson_reg_glmnet","text":"model 2 tuning parameters: penalty: Amount Regularization (type: double, default: see ) mixture: Proportion Lasso Penalty (type: double, default: 1.0) penalty parameter default requires single numeric value. details , glmnet model general, see glmnet-details. mixture: mixture = 1 specifies pure lasso model, mixture = 0 specifies ridge regression model, 0 < mixture < 1 specifies elastic net model, interpolating lasso ridge.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_glmnet.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Poisson regression via glmnet — details_poisson_reg_glmnet","text":"poissonreg extension package required fit model.","code":"library(poissonreg) poisson_reg(penalty = double(1), mixture = double(1)) %>% set_engine(\"glmnet\") %>% translate() ## Poisson Regression Model Specification (regression) ## ## Main Arguments: ## penalty = 0 ## mixture = double(1) ## ## Computational engine: glmnet ## ## Model fit template: ## glmnet::glmnet(x = missing_arg(), y = missing_arg(), weights = missing_arg(), ## alpha = double(1), family = \"poisson\")"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_glmnet.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Poisson regression via glmnet — details_poisson_reg_glmnet","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Predictors scale. One way achieve center scale predictor mean zero variance one. default, glmnet::glmnet() uses argument standardize = TRUE center scale data.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_glmnet.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Poisson regression via glmnet — details_poisson_reg_glmnet","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_glmnet.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Poisson regression via glmnet — details_poisson_reg_glmnet","text":"model object contains data required make predictions. saving model purpose prediction, size saved object might substantially reduced using functions butcher package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_h2o.html","id":null,"dir":"Reference","previous_headings":"","what":"Poisson regression via h2o — details_poisson_reg_h2o","title":"Poisson regression via h2o — details_poisson_reg_h2o","text":"h2o::h2o.glm() uses penalized maximum likelihood fit model count data.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_h2o.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Poisson regression via h2o — details_poisson_reg_h2o","text":"engine, single mode: regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_h2o.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Poisson regression via h2o — details_poisson_reg_h2o","text":"model 2 tuning parameters: mixture: Proportion Lasso Penalty (type: double, default: see ) penalty: Amount Regularization (type: double, default: see ) default, given fixed penalty, h2o::h2o.glm() uses heuristic approach select optimal value penalty based training data. Setting engine parameter lambda_search TRUE enables efficient version grid search, see details https://docs.h2o.ai/h2o/latest-stable/h2o-docs/data-science/algo-params/lambda_search.html. choice mixture depends engine parameter solver, automatically chosen given training data specification model parameters. solver set 'L-BFGS', mixture defaults 0 (ridge regression) 0.5 otherwise.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_h2o.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Poisson regression via h2o — details_poisson_reg_h2o","text":"agua::h2o_train_glm() poisson_reg() wrapper around h2o::h2o.glm() family = 'poisson'. agua extension package required fit model.","code":"library(poissonreg) poisson_reg(penalty = double(1), mixture = double(1)) %>% set_engine(\"h2o\") %>% translate() ## Poisson Regression Model Specification (regression) ## ## Main Arguments: ## penalty = double(1) ## mixture = double(1) ## ## Computational engine: h2o ## ## Model fit template: ## agua::h2o_train_glm(x = missing_arg(), y = missing_arg(), weights = missing_arg(), ## validation_frame = missing_arg(), lambda = double(1), alpha = double(1), ## family = \"poisson\")"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_h2o.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Poisson regression via h2o — details_poisson_reg_h2o","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Predictors scale. One way achieve center scale predictor mean zero variance one. default, h2o::h2o.glm() uses argument standardize = TRUE center scale numerical columns.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_h2o.html","id":"initializing-h-o","dir":"Reference","previous_headings":"","what":"Initializing h2o","title":"Poisson regression via h2o — details_poisson_reg_h2o","text":"use h2o engine tidymodels, please run h2o::h2o.init() first. default, connects R local h2o server. needs done every new R session. can also connect remote h2o server IP address, details see h2o::h2o.init(). can control number threads thread pool used h2o nthreads argument. default, uses CPUs host. different usual parallel processing mechanism tidymodels tuning, tidymodels parallelizes resamples, h2o parallelizes hyperparameter combinations given resample. h2o automatically shut local h2o instance started R R terminated. manually stop h2o server, run h2o::h2o.shutdown().","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_h2o.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Poisson regression via h2o — details_poisson_reg_h2o","text":"Models fitted engine may require native serialization methods properly saved /passed R sessions. learn preparing fitted models serialization, see bundle package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_hurdle.html","id":null,"dir":"Reference","previous_headings":"","what":"Poisson regression via pscl — details_poisson_reg_hurdle","title":"Poisson regression via pscl — details_poisson_reg_hurdle","text":"pscl::hurdle() uses maximum likelihood estimation fit model count data separate model terms predicting counts predicting probability zero count.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_hurdle.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Poisson regression via pscl — details_poisson_reg_hurdle","text":"engine, single mode: regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_hurdle.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Poisson regression via pscl — details_poisson_reg_hurdle","text":"engine tuning parameters.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_hurdle.html","id":"translation-from-parsnip-to-the-underlying-model-call-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the underlying model call (regression)","title":"Poisson regression via pscl — details_poisson_reg_hurdle","text":"poissonreg extension package required fit model.","code":"library(poissonreg) poisson_reg() %>% set_engine(\"hurdle\") %>% translate() ## Poisson Regression Model Specification (regression) ## ## Computational engine: hurdle ## ## Model fit template: ## pscl::hurdle(formula = missing_arg(), data = missing_arg(), weights = missing_arg())"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_hurdle.html","id":"preprocessing-and-special-formulas-for-zero-inflated-poisson-models","dir":"Reference","previous_headings":"","what":"Preprocessing and special formulas for zero-inflated Poisson models","title":"Poisson regression via pscl — details_poisson_reg_hurdle","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_hurdle.html","id":"specifying-the-statistical-model-details","dir":"Reference","previous_headings":"","what":"Specifying the statistical model details","title":"Poisson regression via pscl — details_poisson_reg_hurdle","text":"particular model, special formula used specify columns affect counts affect model probability zero counts. sets terms separated bar. example, y ~ x | z. type formula used base R infrastructure (e.g. model.matrix()) fitting parsnip model engine directly, formula method required formula just passed . example: However, using workflow, best approach avoid using workflows::add_formula() use workflows::add_variables() conjunction model formula: reason workflows::add_formula() try create model matrix either fail create dummy variables prematurely.","code":"library(tidymodels) tidymodels_prefer() data(\"bioChemists\", package = \"pscl\") poisson_reg() %>% set_engine(\"hurdle\") %>% fit(art ~ fem + mar | ment, data = bioChemists) ## parsnip model object ## ## ## Call: ## pscl::hurdle(formula = art ~ fem + mar | ment, data = data) ## ## Count model coefficients (truncated poisson with log link): ## (Intercept) femWomen marMarried ## 0.847598 -0.237351 0.008846 ## ## Zero hurdle model coefficients (binomial with logit link): ## (Intercept) ment ## 0.24871 0.08092 data(\"bioChemists\", package = \"pscl\") spec <- poisson_reg() %>% set_engine(\"hurdle\") workflow() %>% add_variables(outcomes = c(art), predictors = c(fem, mar, ment)) %>% add_model(spec, formula = art ~ fem + mar | ment) %>% fit(data = bioChemists) %>% extract_fit_engine() ## ## Call: ## pscl::hurdle(formula = art ~ fem + mar | ment, data = data) ## ## Count model coefficients (truncated poisson with log link): ## (Intercept) femWomen marMarried ## 0.847598 -0.237351 0.008846 ## ## Zero hurdle model coefficients (binomial with logit link): ## (Intercept) ment ## 0.24871 0.08092"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_hurdle.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Poisson regression via pscl — details_poisson_reg_hurdle","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_stan.html","id":null,"dir":"Reference","previous_headings":"","what":"Poisson regression via stan — details_poisson_reg_stan","title":"Poisson regression via stan — details_poisson_reg_stan","text":"rstanarm::stan_glm() uses Bayesian estimation fit model count data.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_stan.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Poisson regression via stan — details_poisson_reg_stan","text":"engine, single mode: regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_stan.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Poisson regression via stan — details_poisson_reg_stan","text":"engine tuning parameters.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_stan.html","id":"important-engine-specific-options","dir":"Reference","previous_headings":"","what":"Important engine-specific options","title":"Poisson regression via stan — details_poisson_reg_stan","text":"relevant arguments can passed set_engine(): chains: positive integer specifying number Markov chains. default 4. iter: positive integer specifying number iterations chain (including warmup). default 2000. seed: seed random number generation. cores: Number cores use executing chains parallel. prior: prior distribution (non-hierarchical) regression coefficients. \"stan\" engine fit hierarchical terms. prior_intercept: prior distribution intercept (centering predictors). See rstan::sampling() rstanarm::priors() information options.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_stan.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Poisson regression via stan — details_poisson_reg_stan","text":"poissonreg extension package required fit model. Note refresh default prevents logging estimation process. Change value set_engine() show MCMC logs.","code":"library(poissonreg) poisson_reg() %>% set_engine(\"stan\") %>% translate() ## Poisson Regression Model Specification (regression) ## ## Computational engine: stan ## ## Model fit template: ## rstanarm::stan_glm(formula = missing_arg(), data = missing_arg(), ## weights = missing_arg(), family = stats::poisson)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_stan.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Poisson regression via stan — details_poisson_reg_stan","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_stan.html","id":"other-details","dir":"Reference","previous_headings":"","what":"Other details","title":"Poisson regression via stan — details_poisson_reg_stan","text":"prediction, \"stan\" engine can compute posterior intervals analogous confidence prediction intervals. instances, units original outcome. std_error = TRUE, standard deviation posterior distribution (posterior predictive distribution appropriate) returned.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_stan.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Poisson regression via stan — details_poisson_reg_stan","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_stan.html","id":"examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Poisson regression via stan — details_poisson_reg_stan","text":"“Fitting Predicting parsnip” article contains examples poisson_reg() \"stan\" engine.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_stan.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Poisson regression via stan — details_poisson_reg_stan","text":"McElreath, R. 2020 Statistical Rethinking. CRC Press.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_stan_glmer.html","id":null,"dir":"Reference","previous_headings":"","what":"Poisson regression via hierarchical Bayesian methods — details_poisson_reg_stan_glmer","title":"Poisson regression via hierarchical Bayesian methods — details_poisson_reg_stan_glmer","text":"\"stan_glmer\" engine estimates hierarchical regression parameters using Bayesian estimation.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_stan_glmer.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Poisson regression via hierarchical Bayesian methods — details_poisson_reg_stan_glmer","text":"engine, single mode: regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_stan_glmer.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Poisson regression via hierarchical Bayesian methods — details_poisson_reg_stan_glmer","text":"model tuning parameters.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_stan_glmer.html","id":"important-engine-specific-options","dir":"Reference","previous_headings":"","what":"Important engine-specific options","title":"Poisson regression via hierarchical Bayesian methods — details_poisson_reg_stan_glmer","text":"relevant arguments can passed set_engine(): chains: positive integer specifying number Markov chains. default 4. iter: positive integer specifying number iterations chain (including warmup). default 2000. seed: seed random number generation. cores: Number cores use executing chains parallel. prior: prior distribution (non-hierarchical) regression coefficients. prior_intercept: prior distribution intercept (centering predictors). See ?rstanarm::stan_glmer ?rstan::sampling information.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_stan_glmer.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Poisson regression via hierarchical Bayesian methods — details_poisson_reg_stan_glmer","text":"multilevelmod extension package required fit model.","code":"library(multilevelmod) poisson_reg(engine = \"stan_glmer\") %>% set_engine(\"stan_glmer\") %>% translate() ## Poisson Regression Model Specification (regression) ## ## Computational engine: stan_glmer ## ## Model fit template: ## rstanarm::stan_glmer(formula = missing_arg(), data = missing_arg(), ## weights = missing_arg(), family = stats::poisson, refresh = 0)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_stan_glmer.html","id":"predicting-new-samples","dir":"Reference","previous_headings":"","what":"Predicting new samples","title":"Poisson regression via hierarchical Bayesian methods — details_poisson_reg_stan_glmer","text":"model can use subject-specific coefficient estimates make predictions (.e. partial pooling). example, equation shows linear predictor (\\eta) random intercept: $$ denotes ith independent experimental unit (e.g. subject). model seen subject , can use subject’s data adjust population intercept specific subjects results. happens data predicted subject used model fit? case, package uses population parameter estimates prediction: Depending covariates model, might effect making prediction new samples. population parameters “best estimate” subject included model fit. tidymodels framework deliberately constrains predictions new data use training set data (prevent information leakage).","code":"\\eta_{i} = (\\beta_0 + b_{0i}) + \\beta_1x_{i1} \\hat{\\eta}_{i'} = \\hat{\\beta}_0+ \\hat{\\beta}x_{i'1}"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_stan_glmer.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Poisson regression via hierarchical Bayesian methods — details_poisson_reg_stan_glmer","text":"specific preprocessing needs. However, helpful keep clustering/subject identifier column factor character (instead making dummy variables). See examples next section.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_stan_glmer.html","id":"other-details","dir":"Reference","previous_headings":"","what":"Other details","title":"Poisson regression via hierarchical Bayesian methods — details_poisson_reg_stan_glmer","text":"model can accept case weights. parsnip, suggest using formula method fitting: using tidymodels infrastructure, may better use workflow. case, can add appropriate columns using add_variables() supply typical formula adding model: prediction, \"stan_glmer\" engine can compute posterior intervals analogous confidence prediction intervals. instances, units original outcome. std_error = TRUE, standard deviation posterior distribution (posterior predictive distribution appropriate) returned.","code":"library(tidymodels) poisson_reg() %>% set_engine(\"stan_glmer\") %>% fit(y ~ time + x + (1 | subject), data = longitudinal_counts) library(tidymodels) glmer_spec <- poisson_reg() %>% set_engine(\"stan_glmer\") glmer_wflow <- workflow() %>% # The data are included as-is using: add_variables(outcomes = y, predictors = c(time, x, subject)) %>% add_model(glmer_spec, formula = y ~ time + x + (1 | subject)) fit(glmer_wflow, data = longitudinal_counts)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_stan_glmer.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Poisson regression via hierarchical Bayesian methods — details_poisson_reg_stan_glmer","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_stan_glmer.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Poisson regression via hierarchical Bayesian methods — details_poisson_reg_stan_glmer","text":"McElreath, R. 2020 Statistical Rethinking. CRC Press. Sorensen, T, Vasishth, S. 2016. Bayesian linear mixed models using Stan: tutorial psychologists, linguists, cognitive scientists, arXiv:1506.06201.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_zeroinfl.html","id":null,"dir":"Reference","previous_headings":"","what":"Poisson regression via pscl — details_poisson_reg_zeroinfl","title":"Poisson regression via pscl — details_poisson_reg_zeroinfl","text":"pscl::zeroinfl() uses maximum likelihood estimation fit model count data separate model terms predicting counts predicting probability zero count.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_zeroinfl.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Poisson regression via pscl — details_poisson_reg_zeroinfl","text":"engine, single mode: regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_zeroinfl.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Poisson regression via pscl — details_poisson_reg_zeroinfl","text":"engine tuning parameters.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_zeroinfl.html","id":"translation-from-parsnip-to-the-underlying-model-call-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the underlying model call (regression)","title":"Poisson regression via pscl — details_poisson_reg_zeroinfl","text":"poissonreg extension package required fit model.","code":"library(poissonreg) poisson_reg() %>% set_engine(\"zeroinfl\") %>% translate() ## Poisson Regression Model Specification (regression) ## ## Computational engine: zeroinfl ## ## Model fit template: ## pscl::zeroinfl(formula = missing_arg(), data = missing_arg(), ## weights = missing_arg())"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_zeroinfl.html","id":"preprocessing-and-special-formulas-for-zero-inflated-poisson-models","dir":"Reference","previous_headings":"","what":"Preprocessing and special formulas for zero-inflated Poisson models","title":"Poisson regression via pscl — details_poisson_reg_zeroinfl","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_zeroinfl.html","id":"specifying-the-statistical-model-details","dir":"Reference","previous_headings":"","what":"Specifying the statistical model details","title":"Poisson regression via pscl — details_poisson_reg_zeroinfl","text":"particular model, special formula used specify columns affect counts affect model probability zero counts. sets terms separated bar. example, y ~ x | z. type formula used base R infrastructure (e.g. model.matrix()) fitting parsnip model engine directly, formula method required formula just passed . example: However, using workflow, best approach avoid using workflows::add_formula() use workflows::add_variables() conjunction model formula: reason workflows::add_formula() try create model matrix either fail create dummy variables prematurely.","code":"library(tidymodels) tidymodels_prefer() data(\"bioChemists\", package = \"pscl\") poisson_reg() %>% set_engine(\"zeroinfl\") %>% fit(art ~ fem + mar | ment, data = bioChemists) ## parsnip model object ## ## ## Call: ## pscl::zeroinfl(formula = art ~ fem + mar | ment, data = data) ## ## Count model coefficients (poisson with log link): ## (Intercept) femWomen marMarried ## 0.82840 -0.21365 0.02576 ## ## Zero-inflation model coefficients (binomial with logit link): ## (Intercept) ment ## -0.363 -0.166 data(\"bioChemists\", package = \"pscl\") spec <- poisson_reg() %>% set_engine(\"zeroinfl\") workflow() %>% add_variables(outcomes = c(art), predictors = c(fem, mar, ment)) %>% add_model(spec, formula = art ~ fem + mar | ment) %>% fit(data = bioChemists) %>% extract_fit_engine() ## ## Call: ## pscl::zeroinfl(formula = art ~ fem + mar | ment, data = data) ## ## Count model coefficients (poisson with log link): ## (Intercept) femWomen marMarried ## 0.82840 -0.21365 0.02576 ## ## Zero-inflation model coefficients (binomial with logit link): ## (Intercept) ment ## -0.363 -0.166"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_zeroinfl.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Poisson regression via pscl — details_poisson_reg_zeroinfl","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_proportional_hazards_glmnet.html","id":null,"dir":"Reference","previous_headings":"","what":"Proportional hazards regression — details_proportional_hazards_glmnet","title":"Proportional hazards regression — details_proportional_hazards_glmnet","text":"glmnet::glmnet() fits regularized Cox proportional hazards model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_proportional_hazards_glmnet.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Proportional hazards regression — details_proportional_hazards_glmnet","text":"engine, single mode: censored regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_proportional_hazards_glmnet.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Proportional hazards regression — details_proportional_hazards_glmnet","text":"model 2 tuning parameters: penalty: Amount Regularization (type: double, default: see ) mixture: Proportion Lasso Penalty (type: double, default: 1.0) penalty parameter default requires single numeric value. details , glmnet model general, see glmnet-details. mixture: mixture = 1 specifies pure lasso model, mixture = 0 specifies ridge regression model, 0 < mixture < 1 specifies elastic net model, interpolating lasso ridge.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_proportional_hazards_glmnet.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Proportional hazards regression — details_proportional_hazards_glmnet","text":"censored extension package required fit model.","code":"library(censored) proportional_hazards(penalty = double(1), mixture = double(1)) %>% set_engine(\"glmnet\") %>% translate() ## Proportional Hazards Model Specification (censored regression) ## ## Main Arguments: ## penalty = 0 ## mixture = double(1) ## ## Computational engine: glmnet ## ## Model fit template: ## censored::coxnet_train(formula = missing_arg(), data = missing_arg(), ## weights = missing_arg(), alpha = double(1))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_proportional_hazards_glmnet.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Proportional hazards regression — details_proportional_hazards_glmnet","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Predictors scale. One way achieve center scale predictor mean zero variance one. default, glmnet::glmnet() uses argument standardize = TRUE center scale data.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_proportional_hazards_glmnet.html","id":"other-details","dir":"Reference","previous_headings":"","what":"Other details","title":"Proportional hazards regression — details_proportional_hazards_glmnet","text":"model fit intercept. model formula (required) can include special terms, survival::strata(). allows baseline hazard differ groups contained function. (learn using special terms formulas tidymodels, see ?model_formula.) column used inside strata() treated qualitative matter type. different syntax offered glmnet::glmnet() package (.e., glmnet::stratifySurv()) recommended . example, model, numeric column rx used estimate two different baseline hazards value column: Note columns used strata() function also estimated regular portion model (.e., within linear predictor). Predictions type \"time\" predictions mean survival time.","code":"library(survival) library(censored) library(dplyr) library(tidyr) mod <- proportional_hazards(penalty = 0.01) %>% set_engine(\"glmnet\", nlambda = 5) %>% fit(Surv(futime, fustat) ~ age + ecog.ps + strata(rx), data = ovarian) pred_data <- data.frame(age = c(50, 50), ecog.ps = c(1, 1), rx = c(1, 2)) # Different survival probabilities for different values of 'rx' predict(mod, pred_data, type = \"survival\", time = 500) %>% bind_cols(pred_data) %>% unnest(.pred) ## # A tibble: 2 × 5 ## .eval_time .pred_survival age ecog.ps rx ## ## 1 500 0.666 50 1 1 ## 2 500 0.769 50 1 2"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_proportional_hazards_glmnet.html","id":"linear-predictor-values","dir":"Reference","previous_headings":"","what":"Linear predictor values","title":"Proportional hazards regression — details_proportional_hazards_glmnet","text":"Since risk regression parametric survival models modeling different characteristics (e.g. relative hazard versus event time), linear predictors going opposite directions. example, parametric models, linear predictor increases time. proportional hazards models linear predictor decreases time (since hazard increasing). , linear predictors two quantities opposite signs. tidymodels treat different models differently computing performance metrics. standardize across model types, default proportional hazards models increasing values time. result, sign linear predictor opposite value produced predict() method engine package. behavior can changed using increasing argument calling predict() model object.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_proportional_hazards_glmnet.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Proportional hazards regression — details_proportional_hazards_glmnet","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_proportional_hazards_glmnet.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Proportional hazards regression — details_proportional_hazards_glmnet","text":"model object contains data required make predictions. saving model purpose prediction, size saved object might substantially reduced using functions butcher package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_proportional_hazards_glmnet.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Proportional hazards regression — details_proportional_hazards_glmnet","text":"Simon N, Friedman J, Hastie T, Tibshirani R. 2011. “Regularization Paths Cox’s Proportional Hazards Model via Coordinate Descent.” Journal Statistical Software, Articles 39 (5): 1–13. . Hastie T, Tibshirani R, Wainwright M. 2015. Statistical Learning Sparsity. CRC Press. Kuhn M, Johnson K. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_proportional_hazards_survival.html","id":null,"dir":"Reference","previous_headings":"","what":"Proportional hazards regression — details_proportional_hazards_survival","title":"Proportional hazards regression — details_proportional_hazards_survival","text":"survival::coxph() fits Cox proportional hazards model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_proportional_hazards_survival.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Proportional hazards regression — details_proportional_hazards_survival","text":"engine, single mode: censored regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_proportional_hazards_survival.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Proportional hazards regression — details_proportional_hazards_survival","text":"model tuning parameters.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_proportional_hazards_survival.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Proportional hazards regression — details_proportional_hazards_survival","text":"censored extension package required fit model.","code":"library(censored) proportional_hazards() %>% set_engine(\"survival\") %>% set_mode(\"censored regression\") %>% translate() ## Proportional Hazards Model Specification (censored regression) ## ## Computational engine: survival ## ## Model fit template: ## survival::coxph(formula = missing_arg(), data = missing_arg(), ## weights = missing_arg(), x = TRUE, model = TRUE)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_proportional_hazards_survival.html","id":"other-details","dir":"Reference","previous_headings":"","what":"Other details","title":"Proportional hazards regression — details_proportional_hazards_survival","text":"model fit intercept. main interface model uses formula method since model specification typically involved use survival::Surv(). model formula can include special terms, survival::strata(). allows baseline hazard differ groups contained function. column used inside strata() treated qualitative matter type. learn using special terms formulas tidymodels, see ?model_formula. example, model, numeric column rx used estimate two different baseline hazards value column: Note columns used strata() function estimated regular portion model (.e., within linear predictor). Predictions type \"time\" predictions mean survival time.","code":"library(survival) proportional_hazards() %>% fit(Surv(futime, fustat) ~ age + strata(rx), data = ovarian) %>% extract_fit_engine() %>% # Two different hazards for each value of 'rx' basehaz() ## hazard time strata ## 1 0.02250134 59 rx=1 ## 2 0.05088586 115 rx=1 ## 3 0.09467873 156 rx=1 ## 4 0.14809975 268 rx=1 ## 5 0.30670509 329 rx=1 ## 6 0.46962698 431 rx=1 ## 7 0.46962698 448 rx=1 ## 8 0.46962698 477 rx=1 ## 9 1.07680229 638 rx=1 ## 10 1.07680229 803 rx=1 ## 11 1.07680229 855 rx=1 ## 12 1.07680229 1040 rx=1 ## 13 1.07680229 1106 rx=1 ## 14 0.05843331 353 rx=2 ## 15 0.12750063 365 rx=2 ## 16 0.12750063 377 rx=2 ## 17 0.12750063 421 rx=2 ## 18 0.23449656 464 rx=2 ## 19 0.35593895 475 rx=2 ## 20 0.50804209 563 rx=2 ## 21 0.50804209 744 rx=2 ## 22 0.50804209 769 rx=2 ## 23 0.50804209 770 rx=2 ## 24 0.50804209 1129 rx=2 ## 25 0.50804209 1206 rx=2 ## 26 0.50804209 1227 rx=2"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_proportional_hazards_survival.html","id":"linear-predictor-values","dir":"Reference","previous_headings":"","what":"Linear predictor values","title":"Proportional hazards regression — details_proportional_hazards_survival","text":"Since risk regression parametric survival models modeling different characteristics (e.g. relative hazard versus event time), linear predictors going opposite directions. example, parametric models, linear predictor increases time. proportional hazards models linear predictor decreases time (since hazard increasing). , linear predictors two quantities opposite signs. tidymodels treat different models differently computing performance metrics. standardize across model types, default proportional hazards models increasing values time. result, sign linear predictor opposite value produced predict() method engine package. behavior can changed using increasing argument calling predict() model object.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_proportional_hazards_survival.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Proportional hazards regression — details_proportional_hazards_survival","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_proportional_hazards_survival.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Proportional hazards regression — details_proportional_hazards_survival","text":"Andersen P, Gill R. 1982. Cox’s regression model counting processes, large sample study. Annals Statistics 10, 1100-1120.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_aorsf.html","id":null,"dir":"Reference","previous_headings":"","what":"Oblique random survival forests via aorsf — details_rand_forest_aorsf","title":"Oblique random survival forests via aorsf — details_rand_forest_aorsf","text":"aorsf::orsf() fits model creates large number decision trees, de-correlated others. final prediction uses predictions individual trees combines .","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_aorsf.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Oblique random survival forests via aorsf — details_rand_forest_aorsf","text":"engine, single mode: censored regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_aorsf.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Oblique random survival forests via aorsf — details_rand_forest_aorsf","text":"model 3 tuning parameters: trees: # Trees (type: integer, default: 500L) min_n: Minimal Node Size (type: integer, default: 5L) mtry: # Randomly Selected Predictors (type: integer, default: ceiling(sqrt(n_predictors))) Additionally, model one engine-specific tuning parameter: split_min_stat: Minimum test statistic required split node. Default 3.841459 log-rank test, roughly p-value 0.05.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_aorsf.html","id":"translation-from-parsnip-to-the-original-package-censored-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (censored regression)","title":"Oblique random survival forests via aorsf — details_rand_forest_aorsf","text":"censored extension package required fit model.","code":"library(censored) rand_forest() %>% set_engine(\"aorsf\") %>% set_mode(\"censored regression\") %>% translate() ## Random Forest Model Specification (censored regression) ## ## Computational engine: aorsf ## ## Model fit template: ## aorsf::orsf(formula = missing_arg(), data = missing_arg(), weights = missing_arg())"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_aorsf.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Oblique random survival forests via aorsf — details_rand_forest_aorsf","text":"engine require special encoding predictors. Categorical predictors can partitioned groups factor levels (e.g. {, c} vs {b, d}) splitting node. Dummy variables required model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_aorsf.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Oblique random survival forests via aorsf — details_rand_forest_aorsf","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_aorsf.html","id":"other-details","dir":"Reference","previous_headings":"","what":"Other details","title":"Oblique random survival forests via aorsf — details_rand_forest_aorsf","text":"Predictions survival probability time exceeding maximum observed event time predicted survival probability maximum observed time training data.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_aorsf.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Oblique random survival forests via aorsf — details_rand_forest_aorsf","text":"Jaeger BC, Long DL, Long DM, Sims M, Szychowski JM, Min YI, Mcclure LA, Howard G, Simon N. Oblique random survival forests. Annals applied statistics 2019 Sep; 13(3):1847-83. DOI: 10.1214/19-AOAS1261 Jaeger BC, Welden S, Lenoir K, Pajewski NM. aorsf: R package supervised learning using oblique random survival forest. Journal Open Source Software 2022, 7(77), 1 4705. . Jaeger BC, Welden S, Lenoir K, Speiser JL, Segar MW, Pandey , Pajewski NM. Accelerated interpretable oblique random survival forests. arXiv e-prints 2022 Aug; arXiv-2208. URL: https://arxiv.org/abs/2208.01129","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_h2o.html","id":null,"dir":"Reference","previous_headings":"","what":"Random forests via h2o — details_rand_forest_h2o","title":"Random forests via h2o — details_rand_forest_h2o","text":"h2o::h2o.randomForest() fits model creates large number decision trees, independent others. final prediction uses predictions individual trees combines .","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_h2o.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Random forests via h2o — details_rand_forest_h2o","text":"engine, multiple modes: classification regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_h2o.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Random forests via h2o — details_rand_forest_h2o","text":"model 3 tuning parameters: trees: # Trees (type: integer, default: 50L) min_n: Minimal Node Size (type: integer, default: 1) mtry: # Randomly Selected Predictors (type: integer, default: see ) mtry depends number columns model mode. default h2o::h2o.randomForest() floor(sqrt(ncol(x))) classification floor(ncol(x)/3) regression.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_h2o.html","id":"translation-from-parsnip-to-the-original-package-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (regression)","title":"Random forests via h2o — details_rand_forest_h2o","text":"agua::h2o_train_rf() wrapper around h2o::h2o.randomForest(). min_rows() min_cols() adjust number neighbors chosen value consistent actual data dimensions.","code":"rand_forest( mtry = integer(1), trees = integer(1), min_n = integer(1) ) %>% set_engine(\"h2o\") %>% set_mode(\"regression\") %>% translate() ## Random Forest Model Specification (regression) ## ## Main Arguments: ## mtry = integer(1) ## trees = integer(1) ## min_n = integer(1) ## ## Computational engine: h2o ## ## Model fit template: ## agua::h2o_train_rf(x = missing_arg(), y = missing_arg(), weights = missing_arg(), ## validation_frame = missing_arg(), mtries = integer(1), ntrees = integer(1), ## min_rows = integer(1))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_h2o.html","id":"translation-from-parsnip-to-the-original-package-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (classification)","title":"Random forests via h2o — details_rand_forest_h2o","text":"","code":"rand_forest( mtry = integer(1), trees = integer(1), min_n = integer(1) ) %>% set_engine(\"h2o\") %>% set_mode(\"classification\") %>% translate() ## Random Forest Model Specification (classification) ## ## Main Arguments: ## mtry = integer(1) ## trees = integer(1) ## min_n = integer(1) ## ## Computational engine: h2o ## ## Model fit template: ## agua::h2o_train_rf(x = missing_arg(), y = missing_arg(), weights = missing_arg(), ## validation_frame = missing_arg(), mtries = integer(1), ntrees = integer(1), ## min_rows = integer(1))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_h2o.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Random forests via h2o — details_rand_forest_h2o","text":"engine require special encoding predictors. Categorical predictors can partitioned groups factor levels (e.g. {, c} vs {b, d}) splitting node. Dummy variables required model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_h2o.html","id":"initializing-h-o","dir":"Reference","previous_headings":"","what":"Initializing h2o","title":"Random forests via h2o — details_rand_forest_h2o","text":"use h2o engine tidymodels, please run h2o::h2o.init() first. default, connects R local h2o server. needs done every new R session. can also connect remote h2o server IP address, details see h2o::h2o.init(). can control number threads thread pool used h2o nthreads argument. default, uses CPUs host. different usual parallel processing mechanism tidymodels tuning, tidymodels parallelizes resamples, h2o parallelizes hyperparameter combinations given resample. h2o automatically shut local h2o instance started R R terminated. manually stop h2o server, run h2o::h2o.shutdown().","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_h2o.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Random forests via h2o — details_rand_forest_h2o","text":"Models fitted engine may require native serialization methods properly saved /passed R sessions. learn preparing fitted models serialization, see bundle package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_partykit.html","id":null,"dir":"Reference","previous_headings":"","what":"Random forests via partykit — details_rand_forest_partykit","title":"Random forests via partykit — details_rand_forest_partykit","text":"partykit::cforest() fits model creates large number decision trees, independent others. final prediction uses predictions individual trees combines .","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_partykit.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Random forests via partykit — details_rand_forest_partykit","text":"engine, multiple modes: censored regression, regression, classification","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_partykit.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Random forests via partykit — details_rand_forest_partykit","text":"model 3 tuning parameters: trees: # Trees (type: integer, default: 500L) min_n: Minimal Node Size (type: integer, default: 20L) mtry: # Randomly Selected Predictors (type: integer, default: 5L)","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_partykit.html","id":"translation-from-parsnip-to-the-original-package-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (regression)","title":"Random forests via partykit — details_rand_forest_partykit","text":"bonsai extension package required fit model.","code":"library(bonsai) rand_forest() %>% set_engine(\"partykit\") %>% set_mode(\"regression\") %>% translate() ## Random Forest Model Specification (regression) ## ## Computational engine: partykit ## ## Model fit template: ## parsnip::cforest_train(formula = missing_arg(), data = missing_arg(), ## weights = missing_arg())"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_partykit.html","id":"translation-from-parsnip-to-the-original-package-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (classification)","title":"Random forests via partykit — details_rand_forest_partykit","text":"bonsai extension package required fit model. parsnip::cforest_train() wrapper around partykit::cforest() (functions) makes easier run model.","code":"library(bonsai) rand_forest() %>% set_engine(\"partykit\") %>% set_mode(\"classification\") %>% translate() ## Random Forest Model Specification (classification) ## ## Computational engine: partykit ## ## Model fit template: ## parsnip::cforest_train(formula = missing_arg(), data = missing_arg(), ## weights = missing_arg())"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_partykit.html","id":"translation-from-parsnip-to-the-original-package-censored-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (censored regression)","title":"Random forests via partykit — details_rand_forest_partykit","text":"censored extension package required fit model. censored::cond_inference_surv_cforest() wrapper around partykit::cforest() (functions) makes easier run model.","code":"library(censored) rand_forest() %>% set_engine(\"partykit\") %>% set_mode(\"censored regression\") %>% translate() ## Random Forest Model Specification (censored regression) ## ## Computational engine: partykit ## ## Model fit template: ## parsnip::cforest_train(formula = missing_arg(), data = missing_arg(), ## weights = missing_arg())"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_partykit.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Random forests via partykit — details_rand_forest_partykit","text":"engine require special encoding predictors. Categorical predictors can partitioned groups factor levels (e.g. {, c} vs {b, d}) splitting node. Dummy variables required model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_partykit.html","id":"other-details","dir":"Reference","previous_headings":"","what":"Other details","title":"Random forests via partykit — details_rand_forest_partykit","text":"Predictions type \"time\" predictions median survival time.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_partykit.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Random forests via partykit — details_rand_forest_partykit","text":"partykit: Modular Toolkit Recursive Partytioning R Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_randomForest.html","id":null,"dir":"Reference","previous_headings":"","what":"Random forests via randomForest — details_rand_forest_randomForest","title":"Random forests via randomForest — details_rand_forest_randomForest","text":"randomForest::randomForest() fits model creates large number decision trees, independent others. final prediction uses predictions individual trees combines .","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_randomForest.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Random forests via randomForest — details_rand_forest_randomForest","text":"engine, multiple modes: classification regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_randomForest.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Random forests via randomForest — details_rand_forest_randomForest","text":"model 3 tuning parameters: mtry: # Randomly Selected Predictors (type: integer, default: see ) trees: # Trees (type: integer, default: 500L) min_n: Minimal Node Size (type: integer, default: see ) mtry depends number columns model mode. default randomForest::randomForest() floor(sqrt(ncol(x))) classification floor(ncol(x)/3) regression. min_n depends mode. regression, value 5 default. classification, value 10 used.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_randomForest.html","id":"translation-from-parsnip-to-the-original-package-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (regression)","title":"Random forests via randomForest — details_rand_forest_randomForest","text":"min_rows() min_cols() adjust number neighbors chosen value consistent actual data dimensions.","code":"rand_forest( mtry = integer(1), trees = integer(1), min_n = integer(1) ) %>% set_engine(\"randomForest\") %>% set_mode(\"regression\") %>% translate() ## Random Forest Model Specification (regression) ## ## Main Arguments: ## mtry = integer(1) ## trees = integer(1) ## min_n = integer(1) ## ## Computational engine: randomForest ## ## Model fit template: ## randomForest::randomForest(x = missing_arg(), y = missing_arg(), ## mtry = min_cols(~integer(1), x), ntree = integer(1), nodesize = min_rows(~integer(1), ## x))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_randomForest.html","id":"translation-from-parsnip-to-the-original-package-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (classification)","title":"Random forests via randomForest — details_rand_forest_randomForest","text":"","code":"rand_forest( mtry = integer(1), trees = integer(1), min_n = integer(1) ) %>% set_engine(\"randomForest\") %>% set_mode(\"classification\") %>% translate() ## Random Forest Model Specification (classification) ## ## Main Arguments: ## mtry = integer(1) ## trees = integer(1) ## min_n = integer(1) ## ## Computational engine: randomForest ## ## Model fit template: ## randomForest::randomForest(x = missing_arg(), y = missing_arg(), ## mtry = min_cols(~integer(1), x), ntree = integer(1), nodesize = min_rows(~integer(1), ## x))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_randomForest.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Random forests via randomForest — details_rand_forest_randomForest","text":"engine require special encoding predictors. Categorical predictors can partitioned groups factor levels (e.g. {, c} vs {b, d}) splitting node. Dummy variables required model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_randomForest.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Random forests via randomForest — details_rand_forest_randomForest","text":"model object contains data required make predictions. saving model purpose prediction, size saved object might substantially reduced using functions butcher package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_randomForest.html","id":"examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Random forests via randomForest — details_rand_forest_randomForest","text":"“Fitting Predicting parsnip” article contains examples rand_forest() \"randomForest\" engine.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_randomForest.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Random forests via randomForest — details_rand_forest_randomForest","text":"Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_ranger.html","id":null,"dir":"Reference","previous_headings":"","what":"Random forests via ranger — details_rand_forest_ranger","title":"Random forests via ranger — details_rand_forest_ranger","text":"ranger::ranger() fits model creates large number decision trees, independent others. final prediction uses predictions individual trees combines .","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_ranger.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Random forests via ranger — details_rand_forest_ranger","text":"engine, multiple modes: classification regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_ranger.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Random forests via ranger — details_rand_forest_ranger","text":"model 3 tuning parameters: mtry: # Randomly Selected Predictors (type: integer, default: see ) trees: # Trees (type: integer, default: 500L) min_n: Minimal Node Size (type: integer, default: see ) mtry depends number columns. default ranger::ranger() floor(sqrt(ncol(x))). min_n depends mode. regression, value 5 default. classification, value 10 used.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_ranger.html","id":"translation-from-parsnip-to-the-original-package-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (regression)","title":"Random forests via ranger — details_rand_forest_ranger","text":"min_rows() min_cols() adjust number neighbors chosen value consistent actual data dimensions.","code":"rand_forest( mtry = integer(1), trees = integer(1), min_n = integer(1) ) %>% set_engine(\"ranger\") %>% set_mode(\"regression\") %>% translate() ## Random Forest Model Specification (regression) ## ## Main Arguments: ## mtry = integer(1) ## trees = integer(1) ## min_n = integer(1) ## ## Computational engine: ranger ## ## Model fit template: ## ranger::ranger(x = missing_arg(), y = missing_arg(), weights = missing_arg(), ## mtry = min_cols(~integer(1), x), num.trees = integer(1), ## min.node.size = min_rows(~integer(1), x), num.threads = 1, ## verbose = FALSE, seed = sample.int(10^5, 1))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_ranger.html","id":"translation-from-parsnip-to-the-original-package-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (classification)","title":"Random forests via ranger — details_rand_forest_ranger","text":"Note ranger probability forest always fit (unless probability argument changed user via set_engine()).","code":"rand_forest( mtry = integer(1), trees = integer(1), min_n = integer(1) ) %>% set_engine(\"ranger\") %>% set_mode(\"classification\") %>% translate() ## Random Forest Model Specification (classification) ## ## Main Arguments: ## mtry = integer(1) ## trees = integer(1) ## min_n = integer(1) ## ## Computational engine: ranger ## ## Model fit template: ## ranger::ranger(x = missing_arg(), y = missing_arg(), weights = missing_arg(), ## mtry = min_cols(~integer(1), x), num.trees = integer(1), ## min.node.size = min_rows(~integer(1), x), num.threads = 1, ## verbose = FALSE, seed = sample.int(10^5, 1), probability = TRUE)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_ranger.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Random forests via ranger — details_rand_forest_ranger","text":"engine require special encoding predictors. Categorical predictors can partitioned groups factor levels (e.g. {, c} vs {b, d}) splitting node. Dummy variables required model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_ranger.html","id":"other-notes","dir":"Reference","previous_headings":"","what":"Other notes","title":"Random forests via ranger — details_rand_forest_ranger","text":"default, parallel processing turned . tuning, efficient parallelize resamples tuning parameters. parallelize construction trees within ranger model, change num.threads argument via set_engine(). ranger confidence intervals, intervals constructed using form estimate +/- z * std_error. classification probabilities, values can fall outside [0, 1] coerced range.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_ranger.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Random forests via ranger — details_rand_forest_ranger","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_ranger.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Random forests via ranger — details_rand_forest_ranger","text":"model object contains data required make predictions. saving model purpose prediction, size saved object might substantially reduced using functions butcher package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_ranger.html","id":"examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Random forests via ranger — details_rand_forest_ranger","text":"“Fitting Predicting parsnip” article contains examples rand_forest() \"ranger\" engine.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_ranger.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Random forests via ranger — details_rand_forest_ranger","text":"Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_spark.html","id":null,"dir":"Reference","previous_headings":"","what":"Random forests via spark — details_rand_forest_spark","title":"Random forests via spark — details_rand_forest_spark","text":"sparklyr::ml_random_forest() fits model creates large number decision trees, independent others. final prediction uses predictions individual trees combines .","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_spark.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Random forests via spark — details_rand_forest_spark","text":"engine, multiple modes: classification regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_spark.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Random forests via spark — details_rand_forest_spark","text":"model 3 tuning parameters: mtry: # Randomly Selected Predictors (type: integer, default: see ) trees: # Trees (type: integer, default: 20L) min_n: Minimal Node Size (type: integer, default: 1L) mtry depends number columns model mode. default sparklyr::ml_random_forest() floor(sqrt(ncol(x))) classification floor(ncol(x)/3) regression.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_spark.html","id":"translation-from-parsnip-to-the-original-package-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (regression)","title":"Random forests via spark — details_rand_forest_spark","text":"min_rows() min_cols() adjust number neighbors chosen value consistent actual data dimensions.","code":"rand_forest( mtry = integer(1), trees = integer(1), min_n = integer(1) ) %>% set_engine(\"spark\") %>% set_mode(\"regression\") %>% translate() ## Random Forest Model Specification (regression) ## ## Main Arguments: ## mtry = integer(1) ## trees = integer(1) ## min_n = integer(1) ## ## Computational engine: spark ## ## Model fit template: ## sparklyr::ml_random_forest(x = missing_arg(), formula = missing_arg(), ## type = \"regression\", feature_subset_strategy = integer(1), ## num_trees = integer(1), min_instances_per_node = min_rows(~integer(1), ## x), seed = sample.int(10^5, 1))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_spark.html","id":"translation-from-parsnip-to-the-original-package-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (classification)","title":"Random forests via spark — details_rand_forest_spark","text":"","code":"rand_forest( mtry = integer(1), trees = integer(1), min_n = integer(1) ) %>% set_engine(\"spark\") %>% set_mode(\"classification\") %>% translate() ## Random Forest Model Specification (classification) ## ## Main Arguments: ## mtry = integer(1) ## trees = integer(1) ## min_n = integer(1) ## ## Computational engine: spark ## ## Model fit template: ## sparklyr::ml_random_forest(x = missing_arg(), formula = missing_arg(), ## type = \"classification\", feature_subset_strategy = integer(1), ## num_trees = integer(1), min_instances_per_node = min_rows(~integer(1), ## x), seed = sample.int(10^5, 1))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_spark.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Random forests via spark — details_rand_forest_spark","text":"engine require special encoding predictors. Categorical predictors can partitioned groups factor levels (e.g. {, c} vs {b, d}) splitting node. Dummy variables required model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_spark.html","id":"other-details","dir":"Reference","previous_headings":"","what":"Other details","title":"Random forests via spark — details_rand_forest_spark","text":"models created using \"spark\" engine, several things consider. formula interface via fit() available; using fit_xy() generate error. predictions always Spark table format. names documented without dots. equivalent factor columns Spark tables class predictions returned character columns. retain model object new R session (via save()), model$fit element parsnip object serialized via ml_save(object$fit) separately saved disk. new session, object can reloaded reattached parsnip object.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_spark.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Random forests via spark — details_rand_forest_spark","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights. Note , spark engines, case_weight argument value character string specify column numeric case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_spark.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Random forests via spark — details_rand_forest_spark","text":"Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rule_fit_h2o.html","id":null,"dir":"Reference","previous_headings":"","what":"RuleFit models via h2o — details_rule_fit_h2o","title":"RuleFit models via h2o — details_rule_fit_h2o","text":"h2o::h2o.rulefit() fits model derives simple feature rules tree ensemble uses rules features regularized (LASSO) model. agua::h2o_train_rule() wrapper around function.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rule_fit_h2o.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"RuleFit models via h2o — details_rule_fit_h2o","text":"engine, multiple modes: classification regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rule_fit_h2o.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"RuleFit models via h2o — details_rule_fit_h2o","text":"model 3 tuning parameters: trees: # Trees (type: integer, default: 50L) tree_depth: Tree Depth (type: integer, default: 3L) penalty: Amount Regularization (type: double, default: 0) Note penalty h2o engine `rule_fit()`` corresponds L1 penalty (LASSO). engine arguments interest: algorithm: algorithm use generate rules. one “AUTO”, “DRF”, “GBM”, defaults “AUTO”. min_rule_length: Minimum length tree depth, opposite tree_dpeth, defaults 3. max_num_rules: maximum number rules return. default value -1 means number rules selected diminishing returns model deviance. model_type: type base learners ensemble, one : “rules_and_linear”, “rules”, “linear”, defaults “rules_and_linear”.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rule_fit_h2o.html","id":"translation-from-parsnip-to-the-underlying-model-call-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the underlying model call (regression)","title":"RuleFit models via h2o — details_rule_fit_h2o","text":"agua::h2o_train_rule() wrapper around h2o::h2o.rulefit(). agua extension package required fit model.","code":"library(rules) rule_fit( trees = integer(1), tree_depth = integer(1), penalty = numeric(1) ) %>% set_engine(\"h2o\") %>% set_mode(\"regression\") %>% translate() ## RuleFit Model Specification (regression) ## ## Main Arguments: ## trees = integer(1) ## tree_depth = integer(1) ## penalty = numeric(1) ## ## Computational engine: h2o ## ## Model fit template: ## agua::h2o_train_rule(x = missing_arg(), y = missing_arg(), weights = missing_arg(), ## validation_frame = missing_arg(), rule_generation_ntrees = integer(1), ## max_rule_length = integer(1), lambda = numeric(1))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rule_fit_h2o.html","id":"translation-from-parsnip-to-the-underlying-model-call-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the underlying model call (classification)","title":"RuleFit models via h2o — details_rule_fit_h2o","text":"agua::h2o_train_rule() rule_fit() wrapper around h2o::h2o.rulefit(). agua extension package required fit model.","code":"rule_fit( trees = integer(1), tree_depth = integer(1), penalty = numeric(1) ) %>% set_engine(\"h2o\") %>% set_mode(\"classification\") %>% translate() ## RuleFit Model Specification (classification) ## ## Main Arguments: ## trees = integer(1) ## tree_depth = integer(1) ## penalty = numeric(1) ## ## Computational engine: h2o ## ## Model fit template: ## agua::h2o_train_rule(x = missing_arg(), y = missing_arg(), weights = missing_arg(), ## validation_frame = missing_arg(), rule_generation_ntrees = integer(1), ## max_rule_length = integer(1), lambda = numeric(1))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rule_fit_h2o.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"RuleFit models via h2o — details_rule_fit_h2o","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rule_fit_h2o.html","id":"other-details","dir":"Reference","previous_headings":"","what":"Other details","title":"RuleFit models via h2o — details_rule_fit_h2o","text":"use h2o engine tidymodels, please run h2o::h2o.init() first. default, connects R local h2o server. needs done every new R session. can also connect remote h2o server IP address, details see h2o::h2o.init(). can control number threads thread pool used h2o nthreads argument. default, uses CPUs host. different usual parallel processing mechanism tidymodels tuning, tidymodels parallelizes resamples, h2o parallelizes hyperparameter combinations given resample. h2o automatically shut local h2o instance started R R terminated. manually stop h2o server, run h2o::h2o.shutdown().","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rule_fit_h2o.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"RuleFit models via h2o — details_rule_fit_h2o","text":"Models fitted engine may require native serialization methods properly saved /passed R sessions. learn preparing fitted models serialization, see bundle package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rule_fit_xrf.html","id":null,"dir":"Reference","previous_headings":"","what":"RuleFit models via xrf — details_rule_fit_xrf","title":"RuleFit models via xrf — details_rule_fit_xrf","text":"xrf::xrf() fits model derives simple feature rules tree ensemble uses rules features regularized model. rules::xrf_fit() wrapper around function.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rule_fit_xrf.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"RuleFit models via xrf — details_rule_fit_xrf","text":"engine, multiple modes: classification regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rule_fit_xrf.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"RuleFit models via xrf — details_rule_fit_xrf","text":"model 8 tuning parameters: mtry: Proportion Randomly Selected Predictors (type: double, default: see ) trees: # Trees (type: integer, default: 15L) min_n: Minimal Node Size (type: integer, default: 1L) tree_depth: Tree Depth (type: integer, default: 6L) learn_rate: Learning Rate (type: double, default: 0.3) loss_reduction: Minimum Loss Reduction (type: double, default: 0.0) sample_size: Proportion Observations Sampled (type: double, default: 1.0) penalty: Amount Regularization (type: double, default: 0.1)","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rule_fit_xrf.html","id":"translation-from-parsnip-to-the-underlying-model-call-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the underlying model call (regression)","title":"RuleFit models via xrf — details_rule_fit_xrf","text":"rules extension package required fit model.","code":"library(rules) rule_fit( mtry = numeric(1), trees = integer(1), min_n = integer(1), tree_depth = integer(1), learn_rate = numeric(1), loss_reduction = numeric(1), sample_size = numeric(1), penalty = numeric(1) ) %>% set_engine(\"xrf\") %>% set_mode(\"regression\") %>% translate() ## RuleFit Model Specification (regression) ## ## Main Arguments: ## mtry = numeric(1) ## trees = integer(1) ## min_n = integer(1) ## tree_depth = integer(1) ## learn_rate = numeric(1) ## loss_reduction = numeric(1) ## sample_size = numeric(1) ## penalty = numeric(1) ## ## Computational engine: xrf ## ## Model fit template: ## rules::xrf_fit(formula = missing_arg(), data = missing_arg(), ## xgb_control = missing_arg(), colsample_bynode = numeric(1), ## nrounds = integer(1), min_child_weight = integer(1), max_depth = integer(1), ## eta = numeric(1), gamma = numeric(1), subsample = numeric(1), ## lambda = numeric(1))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rule_fit_xrf.html","id":"translation-from-parsnip-to-the-underlying-model-call-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the underlying model call (classification)","title":"RuleFit models via xrf — details_rule_fit_xrf","text":"rules extension package required fit model.","code":"library(rules) rule_fit( mtry = numeric(1), trees = integer(1), min_n = integer(1), tree_depth = integer(1), learn_rate = numeric(1), loss_reduction = numeric(1), sample_size = numeric(1), penalty = numeric(1) ) %>% set_engine(\"xrf\") %>% set_mode(\"classification\") %>% translate() ## RuleFit Model Specification (classification) ## ## Main Arguments: ## mtry = numeric(1) ## trees = integer(1) ## min_n = integer(1) ## tree_depth = integer(1) ## learn_rate = numeric(1) ## loss_reduction = numeric(1) ## sample_size = numeric(1) ## penalty = numeric(1) ## ## Computational engine: xrf ## ## Model fit template: ## rules::xrf_fit(formula = missing_arg(), data = missing_arg(), ## xgb_control = missing_arg(), colsample_bynode = numeric(1), ## nrounds = integer(1), min_child_weight = integer(1), max_depth = integer(1), ## eta = numeric(1), gamma = numeric(1), subsample = numeric(1), ## lambda = numeric(1))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rule_fit_xrf.html","id":"differences-from-the-xrf-package","dir":"Reference","previous_headings":"","what":"Differences from the xrf package","title":"RuleFit models via xrf — details_rule_fit_xrf","text":"Note , per documentation ?xrf, transformations response variable supported. use rule_fit(), recommend using recipe instead formula method. Also, several configuration differences xrf() fit package wrapper used rules. differences default values : differences create disparity values penalty argument glmnet uses. Also, rules can also set penalty whereas xrf uses internal 5-fold cross-validation determine (default).","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rule_fit_xrf.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"RuleFit models via xrf — details_rule_fit_xrf","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators.","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rule_fit_xrf.html","id":"interpreting-mtry","dir":"Reference","previous_headings":"","what":"Interpreting mtry","title":"RuleFit models via xrf — details_rule_fit_xrf","text":"mtry argument denotes number predictors randomly sampled split creating tree models. engines, \"xgboost\", \"xrf\", \"lightgbm\", interpret analogue mtry argument proportion predictors randomly sampled split rather count. settings, tuning preprocessors influence number predictors, parameterization quite helpful—interpreting mtry proportion means [0, 1] always valid range parameter, regardless input data. parsnip extensions accommodate parameterization using counts argument: logical indicating whether mtry interpreted number predictors randomly sampled split. TRUE indicates mtry interpreted sense count, FALSE indicates argument interpreted sense proportion. mtry main model argument boost_tree() rand_forest(), thus engine-specific interface. , regardless engine, counts defaults TRUE. engines support proportion interpretation (currently \"xgboost\" \"xrf\", via rules package, \"lightgbm\" via bonsai package) user can pass counts = FALSE argument set_engine() supply mtry values within [0, 1].","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rule_fit_xrf.html","id":"early-stopping","dir":"Reference","previous_headings":"","what":"Early stopping","title":"RuleFit models via xrf — details_rule_fit_xrf","text":"stop_iter() argument allows model prematurely stop training objective function improve within early_stop iterations. best way use feature conjunction internal validation set. , pass validation parameter xgb_train() via parsnip set_engine() function. proportion training set reserved measuring performance (stopping early). model specification early_stop >= trees, early_stop converted trees - 1 warning issued.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rule_fit_xrf.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"RuleFit models via xrf — details_rule_fit_xrf","text":"underlying model implementation allow case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rule_fit_xrf.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"RuleFit models via xrf — details_rule_fit_xrf","text":"Friedman Popescu. “Predictive learning via rule ensembles.” Ann. Appl. Stat. 2 (3) 916- 954, September 2008","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_surv_reg_flexsurv.html","id":null,"dir":"Reference","previous_headings":"","what":"Parametric survival regression — details_surv_reg_flexsurv","title":"Parametric survival regression — details_surv_reg_flexsurv","text":"flexsurv::flexsurvreg() fits parametric survival model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_surv_reg_flexsurv.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Parametric survival regression — details_surv_reg_flexsurv","text":"engine, single mode: regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_surv_reg_flexsurv.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Parametric survival regression — details_surv_reg_flexsurv","text":"model 1 tuning parameters: dist: Distribution (type: character, default: ‘weibull’)","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_surv_reg_flexsurv.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Parametric survival regression — details_surv_reg_flexsurv","text":"","code":"surv_reg(dist = character(1)) %>% set_engine(\"flexsurv\") %>% set_mode(\"regression\") %>% translate() ## Parametric Survival Regression Model Specification (regression) ## ## Main Arguments: ## dist = character(1) ## ## Computational engine: flexsurv ## ## Model fit template: ## flexsurv::flexsurvreg(formula = missing_arg(), data = missing_arg(), ## weights = missing_arg(), dist = character(1))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_surv_reg_flexsurv.html","id":"other-details","dir":"Reference","previous_headings":"","what":"Other details","title":"Parametric survival regression — details_surv_reg_flexsurv","text":"main interface model uses formula method since model specification typically involved use survival::Surv(). engine, stratification specified via strata(), please see flexsurv::flexsurvreg() alternative specifications.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_surv_reg_flexsurv.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Parametric survival regression — details_surv_reg_flexsurv","text":"model object contains data required make predictions. saving model purpose prediction, size saved object might substantially reduced using functions butcher package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_surv_reg_flexsurv.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Parametric survival regression — details_surv_reg_flexsurv","text":"Jackson, C. 2016. flexsurv: Platform Parametric Survival Modeling R. Journal Statistical Software, 70(8), 1 - 33.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_surv_reg_survival.html","id":null,"dir":"Reference","previous_headings":"","what":"Parametric survival regression — details_surv_reg_survival","title":"Parametric survival regression — details_surv_reg_survival","text":"survival::survreg() fits parametric survival model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_surv_reg_survival.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Parametric survival regression — details_surv_reg_survival","text":"engine, single mode: regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_surv_reg_survival.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Parametric survival regression — details_surv_reg_survival","text":"model 1 tuning parameters: dist: Distribution (type: character, default: ‘weibull’)","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_surv_reg_survival.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Parametric survival regression — details_surv_reg_survival","text":"","code":"surv_reg(dist = character(1)) %>% set_engine(\"survival\") %>% set_mode(\"regression\") %>% translate() ## Parametric Survival Regression Model Specification (regression) ## ## Main Arguments: ## dist = character(1) ## ## Computational engine: survival ## ## Model fit template: ## survival::survreg(formula = missing_arg(), data = missing_arg(), ## weights = missing_arg(), dist = character(1), model = TRUE)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_surv_reg_survival.html","id":"other-details","dir":"Reference","previous_headings":"","what":"Other details","title":"Parametric survival regression — details_surv_reg_survival","text":"Note model = TRUE needed produce quantile predictions stratification variable can overridden cases. main interface model uses formula method since model specification typically involved use survival::Surv(). model formula can include special terms, survival::strata(). allows model scale parameter differ groups contained function. column used inside strata() treated qualitative matter type. learn using special terms formulas tidymodels, see ?model_formula. example, model, numeric column rx used estimate two different scale parameters value column:","code":"library(survival) surv_reg() %>% fit(Surv(futime, fustat) ~ age + strata(rx), data = ovarian) %>% extract_fit_engine() ## Call: ## survival::survreg(formula = Surv(futime, fustat) ~ age + strata(rx), ## data = data, model = TRUE) ## ## Coefficients: ## (Intercept) age ## 12.8734120 -0.1033569 ## ## Scale: ## rx=1 rx=2 ## 0.7695509 0.4703602 ## ## Loglik(model)= -89.4 Loglik(intercept only)= -97.1 ## Chisq= 15.36 on 1 degrees of freedom, p= 8.88e-05 ## n= 26"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_surv_reg_survival.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Parametric survival regression — details_surv_reg_survival","text":"Kalbfleisch, J. D. Prentice, R. L. 2002 statistical analysis failure time data, Wiley.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_survival_reg_flexsurv.html","id":null,"dir":"Reference","previous_headings":"","what":"Parametric survival regression — details_survival_reg_flexsurv","title":"Parametric survival regression — details_survival_reg_flexsurv","text":"flexsurv::flexsurvreg() fits parametric survival model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_survival_reg_flexsurv.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Parametric survival regression — details_survival_reg_flexsurv","text":"engine, single mode: censored regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_survival_reg_flexsurv.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Parametric survival regression — details_survival_reg_flexsurv","text":"model 1 tuning parameters: dist: Distribution (type: character, default: ‘weibull’)","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_survival_reg_flexsurv.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Parametric survival regression — details_survival_reg_flexsurv","text":"censored extension package required fit model.","code":"library(censored) survival_reg(dist = character(1)) %>% set_engine(\"flexsurv\") %>% set_mode(\"censored regression\") %>% translate() ## Parametric Survival Regression Model Specification (censored regression) ## ## Main Arguments: ## dist = character(1) ## ## Computational engine: flexsurv ## ## Model fit template: ## flexsurv::flexsurvreg(formula = missing_arg(), data = missing_arg(), ## weights = missing_arg(), dist = character(1))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_survival_reg_flexsurv.html","id":"other-details","dir":"Reference","previous_headings":"","what":"Other details","title":"Parametric survival regression — details_survival_reg_flexsurv","text":"main interface model uses formula method since model specification typically involved use survival::Surv(). engine, stratification specified via strata(), please see flexsurv::flexsurvreg() alternative specifications. Predictions type \"time\" predictions mean survival time.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_survival_reg_flexsurv.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Parametric survival regression — details_survival_reg_flexsurv","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_survival_reg_flexsurv.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Parametric survival regression — details_survival_reg_flexsurv","text":"model object contains data required make predictions. saving model purpose prediction, size saved object might substantially reduced using functions butcher package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_survival_reg_flexsurv.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Parametric survival regression — details_survival_reg_flexsurv","text":"Jackson, C. 2016. flexsurv: Platform Parametric Survival Modeling R. Journal Statistical Software, 70(8), 1 - 33.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_survival_reg_flexsurvspline.html","id":null,"dir":"Reference","previous_headings":"","what":"Flexible parametric survival regression — details_survival_reg_flexsurvspline","title":"Flexible parametric survival regression — details_survival_reg_flexsurvspline","text":"flexsurv::flexsurvspline() fits flexible parametric survival model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_survival_reg_flexsurvspline.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Flexible parametric survival regression — details_survival_reg_flexsurvspline","text":"engine, single mode: censored regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_survival_reg_flexsurvspline.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Flexible parametric survival regression — details_survival_reg_flexsurvspline","text":"model one engine-specific tuning parameter: k: Number knots spline. default k = 0.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_survival_reg_flexsurvspline.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Flexible parametric survival regression — details_survival_reg_flexsurvspline","text":"censored extension package required fit model.","code":"library(censored) survival_reg() %>% set_engine(\"flexsurvspline\") %>% set_mode(\"censored regression\") %>% translate() ## Parametric Survival Regression Model Specification (censored regression) ## ## Computational engine: flexsurvspline ## ## Model fit template: ## flexsurv::flexsurvspline(formula = missing_arg(), data = missing_arg(), ## weights = missing_arg())"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_survival_reg_flexsurvspline.html","id":"other-details","dir":"Reference","previous_headings":"","what":"Other details","title":"Flexible parametric survival regression — details_survival_reg_flexsurvspline","text":"main interface model uses formula method since model specification typically involved use survival::Surv(). engine, stratification specified via strata(), please see flexsurv::flexsurvspline() alternative specifications. Predictions type \"time\" predictions mean survival time.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_survival_reg_flexsurvspline.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Flexible parametric survival regression — details_survival_reg_flexsurvspline","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_survival_reg_flexsurvspline.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Flexible parametric survival regression — details_survival_reg_flexsurvspline","text":"model object contains data required make predictions. saving model purpose prediction, size saved object might substantially reduced using functions butcher package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_survival_reg_flexsurvspline.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Flexible parametric survival regression — details_survival_reg_flexsurvspline","text":"Jackson, C. 2016. flexsurv: Platform Parametric Survival Modeling R. Journal Statistical Software, 70(8), 1 - 33.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_survival_reg_survival.html","id":null,"dir":"Reference","previous_headings":"","what":"Parametric survival regression — details_survival_reg_survival","title":"Parametric survival regression — details_survival_reg_survival","text":"survival::survreg() fits parametric survival model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_survival_reg_survival.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Parametric survival regression — details_survival_reg_survival","text":"engine, single mode: censored regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_survival_reg_survival.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Parametric survival regression — details_survival_reg_survival","text":"model 1 tuning parameters: dist: Distribution (type: character, default: ‘weibull’)","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_survival_reg_survival.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Parametric survival regression — details_survival_reg_survival","text":"censored extension package required fit model.","code":"library(censored) survival_reg(dist = character(1)) %>% set_engine(\"survival\") %>% set_mode(\"censored regression\") %>% translate() ## Parametric Survival Regression Model Specification (censored regression) ## ## Main Arguments: ## dist = character(1) ## ## Computational engine: survival ## ## Model fit template: ## survival::survreg(formula = missing_arg(), data = missing_arg(), ## weights = missing_arg(), dist = character(1), model = TRUE)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_survival_reg_survival.html","id":"other-details","dir":"Reference","previous_headings":"","what":"Other details","title":"Parametric survival regression — details_survival_reg_survival","text":"translated syntax , note model = TRUE needed produce quantile predictions stratification variable can overridden cases. main interface model uses formula method since model specification typically involved use survival::Surv(). model formula can include special terms, survival::strata(). allows model scale parameter differ groups contained function. column used inside strata() treated qualitative matter type. learn using special terms formulas tidymodels, see ?model_formula. example, model, numeric column rx used estimate two different scale parameters value column: Predictions type \"time\" predictions mean survival time.","code":"library(survival) survival_reg() %>% fit(Surv(futime, fustat) ~ age + strata(rx), data = ovarian) %>% extract_fit_engine() ## Call: ## survival::survreg(formula = Surv(futime, fustat) ~ age + strata(rx), ## data = data, model = TRUE) ## ## Coefficients: ## (Intercept) age ## 12.8734120 -0.1033569 ## ## Scale: ## rx=1 rx=2 ## 0.7695509 0.4703602 ## ## Loglik(model)= -89.4 Loglik(intercept only)= -97.1 ## Chisq= 15.36 on 1 degrees of freedom, p= 8.88e-05 ## n= 26"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_survival_reg_survival.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Parametric survival regression — details_survival_reg_survival","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_survival_reg_survival.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Parametric survival regression — details_survival_reg_survival","text":"model object contains data required make predictions. saving model purpose prediction, size saved object might substantially reduced using functions butcher package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_survival_reg_survival.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Parametric survival regression — details_survival_reg_survival","text":"Kalbfleisch, J. D. Prentice, R. L. 2002 statistical analysis failure time data, Wiley.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_linear_LiblineaR.html","id":null,"dir":"Reference","previous_headings":"","what":"Linear support vector machines (SVMs) via LiblineaR — details_svm_linear_LiblineaR","title":"Linear support vector machines (SVMs) via LiblineaR — details_svm_linear_LiblineaR","text":"LiblineaR::LiblineaR() fits support vector machine model. classification, model tries maximize width margin classes. regression, model optimizes robust loss function affected large model residuals.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_linear_LiblineaR.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Linear support vector machines (SVMs) via LiblineaR — details_svm_linear_LiblineaR","text":"engine, multiple modes: classification regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_linear_LiblineaR.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Linear support vector machines (SVMs) via LiblineaR — details_svm_linear_LiblineaR","text":"model 2 tuning parameters: cost: Cost (type: double, default: 1.0) margin: Insensitivity Margin (type: double, default: default) engine fits models L2-regularized L2-loss. LiblineaR::LiblineaR() documentation, types 1 (classification) 11 (regression).","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_linear_LiblineaR.html","id":"translation-from-parsnip-to-the-original-package-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (regression)","title":"Linear support vector machines (SVMs) via LiblineaR — details_svm_linear_LiblineaR","text":"","code":"svm_linear( cost = double(1), margin = double(1) ) %>% set_engine(\"LiblineaR\") %>% set_mode(\"regression\") %>% translate() ## Linear Support Vector Machine Model Specification (regression) ## ## Main Arguments: ## cost = double(1) ## margin = double(1) ## ## Computational engine: LiblineaR ## ## Model fit template: ## LiblineaR::LiblineaR(x = missing_arg(), y = missing_arg(), C = double(1), ## svr_eps = double(1), type = 11)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_linear_LiblineaR.html","id":"translation-from-parsnip-to-the-original-package-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (classification)","title":"Linear support vector machines (SVMs) via LiblineaR — details_svm_linear_LiblineaR","text":"margin parameter apply classification models. Note LiblineaR engine produce class probabilities. optimizing model using tune package, default metrics require class probabilities. use tune_*() functions, metric set must passed argument contains metrics hard class predictions (e.g., accuracy).","code":"svm_linear( cost = double(1) ) %>% set_engine(\"LiblineaR\") %>% set_mode(\"classification\") %>% translate() ## Linear Support Vector Machine Model Specification (classification) ## ## Main Arguments: ## cost = double(1) ## ## Computational engine: LiblineaR ## ## Model fit template: ## LiblineaR::LiblineaR(x = missing_arg(), y = missing_arg(), C = double(1), ## type = 1)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_linear_LiblineaR.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Linear support vector machines (SVMs) via LiblineaR — details_svm_linear_LiblineaR","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Predictors scale. One way achieve center scale predictor mean zero variance one.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_linear_LiblineaR.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Linear support vector machines (SVMs) via LiblineaR — details_svm_linear_LiblineaR","text":"underlying model implementation allow case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_linear_LiblineaR.html","id":"examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Linear support vector machines (SVMs) via LiblineaR — details_svm_linear_LiblineaR","text":"“Fitting Predicting parsnip” article contains examples svm_linear() \"LiblineaR\" engine.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_linear_LiblineaR.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Linear support vector machines (SVMs) via LiblineaR — details_svm_linear_LiblineaR","text":"Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_linear_kernlab.html","id":null,"dir":"Reference","previous_headings":"","what":"Linear support vector machines (SVMs) via kernlab — details_svm_linear_kernlab","title":"Linear support vector machines (SVMs) via kernlab — details_svm_linear_kernlab","text":"kernlab::ksvm() fits support vector machine model. classification, model tries maximize width margin classes. regression, model optimizes robust loss function affected large model residuals.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_linear_kernlab.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Linear support vector machines (SVMs) via kernlab — details_svm_linear_kernlab","text":"engine, multiple modes: classification regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_linear_kernlab.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Linear support vector machines (SVMs) via kernlab — details_svm_linear_kernlab","text":"model 2 tuning parameters: cost: Cost (type: double, default: 1.0) margin: Insensitivity Margin (type: double, default: 0.1)","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_linear_kernlab.html","id":"translation-from-parsnip-to-the-original-package-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (regression)","title":"Linear support vector machines (SVMs) via kernlab — details_svm_linear_kernlab","text":"","code":"svm_linear( cost = double(1), margin = double(1) ) %>% set_engine(\"kernlab\") %>% set_mode(\"regression\") %>% translate() ## Linear Support Vector Machine Model Specification (regression) ## ## Main Arguments: ## cost = double(1) ## margin = double(1) ## ## Computational engine: kernlab ## ## Model fit template: ## kernlab::ksvm(x = missing_arg(), data = missing_arg(), C = double(1), ## epsilon = double(1), kernel = \"vanilladot\")"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_linear_kernlab.html","id":"translation-from-parsnip-to-the-original-package-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (classification)","title":"Linear support vector machines (SVMs) via kernlab — details_svm_linear_kernlab","text":"margin parameter apply classification models. Note \"kernlab\" engine naturally estimate class probabilities. produce , decision values model converted probabilities using Platt scaling. method fits additional model top SVM model. fitting Platt scaling model, random numbers used reproducible controlled R’s random number stream.","code":"svm_linear( cost = double(1) ) %>% set_engine(\"kernlab\") %>% set_mode(\"classification\") %>% translate() ## Linear Support Vector Machine Model Specification (classification) ## ## Main Arguments: ## cost = double(1) ## ## Computational engine: kernlab ## ## Model fit template: ## kernlab::ksvm(x = missing_arg(), data = missing_arg(), C = double(1), ## kernel = \"vanilladot\", prob.model = TRUE)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_linear_kernlab.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Linear support vector machines (SVMs) via kernlab — details_svm_linear_kernlab","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Predictors scale. One way achieve center scale predictor mean zero variance one.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_linear_kernlab.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Linear support vector machines (SVMs) via kernlab — details_svm_linear_kernlab","text":"underlying model implementation allow case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_linear_kernlab.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Linear support vector machines (SVMs) via kernlab — details_svm_linear_kernlab","text":"model object contains data required make predictions. saving model purpose prediction, size saved object might substantially reduced using functions butcher package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_linear_kernlab.html","id":"examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Linear support vector machines (SVMs) via kernlab — details_svm_linear_kernlab","text":"“Fitting Predicting parsnip” article contains examples svm_linear() \"kernlab\" engine.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_linear_kernlab.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Linear support vector machines (SVMs) via kernlab — details_svm_linear_kernlab","text":"Lin, HT, R Weng. “Note Platt’s Probabilistic Outputs Support Vector Machines” Karatzoglou, , Smola, , Hornik, K, Zeileis. 2004. “kernlab - S4 Package Kernel Methods R.”, Journal Statistical Software. Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_poly_kernlab.html","id":null,"dir":"Reference","previous_headings":"","what":"Polynomial support vector machines (SVMs) via kernlab — details_svm_poly_kernlab","title":"Polynomial support vector machines (SVMs) via kernlab — details_svm_poly_kernlab","text":"kernlab::ksvm() fits support vector machine model. classification, model tries maximize width margin classes. regression, model optimizes robust loss function affected large model residuals.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_poly_kernlab.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Polynomial support vector machines (SVMs) via kernlab — details_svm_poly_kernlab","text":"engine, multiple modes: classification regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_poly_kernlab.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Polynomial support vector machines (SVMs) via kernlab — details_svm_poly_kernlab","text":"model 4 tuning parameters: cost: Cost (type: double, default: 1.0) degree: Degree Interaction (type: integer, default: 1L1) scale_factor: Scale Factor (type: double, default: 1.0) margin: Insensitivity Margin (type: double, default: 0.1)","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_poly_kernlab.html","id":"translation-from-parsnip-to-the-original-package-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (regression)","title":"Polynomial support vector machines (SVMs) via kernlab — details_svm_poly_kernlab","text":"","code":"svm_poly( cost = double(1), degree = integer(1), scale_factor = double(1), margin = double(1) ) %>% set_engine(\"kernlab\") %>% set_mode(\"regression\") %>% translate() ## Polynomial Support Vector Machine Model Specification (regression) ## ## Main Arguments: ## cost = double(1) ## degree = integer(1) ## scale_factor = double(1) ## margin = double(1) ## ## Computational engine: kernlab ## ## Model fit template: ## kernlab::ksvm(x = missing_arg(), data = missing_arg(), C = double(1), ## epsilon = double(1), kernel = \"polydot\", kpar = list(degree = ~integer(1), ## scale = ~double(1)))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_poly_kernlab.html","id":"translation-from-parsnip-to-the-original-package-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (classification)","title":"Polynomial support vector machines (SVMs) via kernlab — details_svm_poly_kernlab","text":"margin parameter apply classification models. Note \"kernlab\" engine naturally estimate class probabilities. produce , decision values model converted probabilities using Platt scaling. method fits additional model top SVM model. fitting Platt scaling model, random numbers used reproducible controlled R’s random number stream.","code":"svm_poly( cost = double(1), degree = integer(1), scale_factor = double(1) ) %>% set_engine(\"kernlab\") %>% set_mode(\"classification\") %>% translate() ## Polynomial Support Vector Machine Model Specification (classification) ## ## Main Arguments: ## cost = double(1) ## degree = integer(1) ## scale_factor = double(1) ## ## Computational engine: kernlab ## ## Model fit template: ## kernlab::ksvm(x = missing_arg(), data = missing_arg(), C = double(1), ## kernel = \"polydot\", prob.model = TRUE, kpar = list(degree = ~integer(1), ## scale = ~double(1)))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_poly_kernlab.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Polynomial support vector machines (SVMs) via kernlab — details_svm_poly_kernlab","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Predictors scale. One way achieve center scale predictor mean zero variance one.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_poly_kernlab.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Polynomial support vector machines (SVMs) via kernlab — details_svm_poly_kernlab","text":"underlying model implementation allow case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_poly_kernlab.html","id":"examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Polynomial support vector machines (SVMs) via kernlab — details_svm_poly_kernlab","text":"“Fitting Predicting parsnip” article contains examples svm_poly() \"kernlab\" engine.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_poly_kernlab.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Polynomial support vector machines (SVMs) via kernlab — details_svm_poly_kernlab","text":"model object contains data required make predictions. saving model purpose prediction, size saved object might substantially reduced using functions butcher package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_poly_kernlab.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Polynomial support vector machines (SVMs) via kernlab — details_svm_poly_kernlab","text":"Lin, HT, R Weng. “Note Platt’s Probabilistic Outputs Support Vector Machines” Karatzoglou, , Smola, , Hornik, K, Zeileis. 2004. “kernlab - S4 Package Kernel Methods R.”, Journal Statistical Software. Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_rbf_kernlab.html","id":null,"dir":"Reference","previous_headings":"","what":"Radial basis function support vector machines (SVMs) via kernlab — details_svm_rbf_kernlab","title":"Radial basis function support vector machines (SVMs) via kernlab — details_svm_rbf_kernlab","text":"kernlab::ksvm() fits support vector machine model. classification, model tries maximize width margin classes. regression, model optimizes robust loss function affected large model residuals.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_rbf_kernlab.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Radial basis function support vector machines (SVMs) via kernlab — details_svm_rbf_kernlab","text":"engine, multiple modes: classification regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_rbf_kernlab.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Radial basis function support vector machines (SVMs) via kernlab — details_svm_rbf_kernlab","text":"model 3 tuning parameters: cost: Cost (type: double, default: 1.0) rbf_sigma: Radial Basis Function sigma (type: double, default: see ) margin: Insensitivity Margin (type: double, default: 0.1) default radial basis function kernel parameter. kernlab estimates data using heuristic method. See kernlab::sigest(). method uses random numbers , without setting seed fitting, model reproducible.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_rbf_kernlab.html","id":"translation-from-parsnip-to-the-original-package-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (regression)","title":"Radial basis function support vector machines (SVMs) via kernlab — details_svm_rbf_kernlab","text":"","code":"svm_rbf( cost = double(1), rbf_sigma = double(1), margin = double(1) ) %>% set_engine(\"kernlab\") %>% set_mode(\"regression\") %>% translate() ## Radial Basis Function Support Vector Machine Model Specification (regression) ## ## Main Arguments: ## cost = double(1) ## rbf_sigma = double(1) ## margin = double(1) ## ## Computational engine: kernlab ## ## Model fit template: ## kernlab::ksvm(x = missing_arg(), data = missing_arg(), C = double(1), ## epsilon = double(1), kernel = \"rbfdot\", kpar = list(sigma = ~double(1)))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_rbf_kernlab.html","id":"translation-from-parsnip-to-the-original-package-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (classification)","title":"Radial basis function support vector machines (SVMs) via kernlab — details_svm_rbf_kernlab","text":"margin parameter apply classification models. Note \"kernlab\" engine naturally estimate class probabilities. produce , decision values model converted probabilities using Platt scaling. method fits additional model top SVM model. fitting Platt scaling model, random numbers used reproducible controlled R’s random number stream.","code":"svm_rbf( cost = double(1), rbf_sigma = double(1) ) %>% set_engine(\"kernlab\") %>% set_mode(\"classification\") %>% translate() ## Radial Basis Function Support Vector Machine Model Specification (classification) ## ## Main Arguments: ## cost = double(1) ## rbf_sigma = double(1) ## ## Computational engine: kernlab ## ## Model fit template: ## kernlab::ksvm(x = missing_arg(), data = missing_arg(), C = double(1), ## kernel = \"rbfdot\", prob.model = TRUE, kpar = list(sigma = ~double(1)))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_rbf_kernlab.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Radial basis function support vector machines (SVMs) via kernlab — details_svm_rbf_kernlab","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Predictors scale. One way achieve center scale predictor mean zero variance one.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_rbf_kernlab.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Radial basis function support vector machines (SVMs) via kernlab — details_svm_rbf_kernlab","text":"underlying model implementation allow case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_rbf_kernlab.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Radial basis function support vector machines (SVMs) via kernlab — details_svm_rbf_kernlab","text":"model object contains data required make predictions. saving model purpose prediction, size saved object might substantially reduced using functions butcher package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_rbf_kernlab.html","id":"examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Radial basis function support vector machines (SVMs) via kernlab — details_svm_rbf_kernlab","text":"“Fitting Predicting parsnip” article contains examples svm_rbf() \"kernlab\" engine.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_rbf_kernlab.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Radial basis function support vector machines (SVMs) via kernlab — details_svm_rbf_kernlab","text":"Lin, HT, R Weng. “Note Platt’s Probabilistic Outputs Support Vector Machines” Karatzoglou, , Smola, , Hornik, K, Zeileis. 2004. “kernlab - S4 Package Kernel Methods R.”, Journal Statistical Software. Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/discrim_flexible.html","id":null,"dir":"Reference","previous_headings":"","what":"Flexible discriminant analysis — discrim_flexible","title":"Flexible discriminant analysis — discrim_flexible","text":"discrim_flexible() defines model fits discriminant analysis model can use nonlinear features created using multivariate adaptive regression splines (MARS). function can fit classification models. different ways fit model, method estimation chosen setting model engine. engine-specific pages model listed . earth¹² information parsnip used modeling https://www.tidymodels.org/.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/discrim_flexible.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Flexible discriminant analysis — discrim_flexible","text":"","code":"discrim_flexible( mode = \"classification\", num_terms = NULL, prod_degree = NULL, prune_method = NULL, engine = \"earth\" )"},{"path":"https://parsnip.tidymodels.org/dev/reference/discrim_flexible.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Flexible discriminant analysis — discrim_flexible","text":"mode single character string prediction outcome mode. Possible values model \"unknown\", \"regression\", \"classification\". num_terms number features retained final model, including intercept. prod_degree highest possible interaction degree. prune_method pruning method. engine single character string specifying computational engine use fitting.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/discrim_flexible.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Flexible discriminant analysis — discrim_flexible","text":"function defines type model fit. engine specified, method fit model also defined. See set_engine() setting engine, including set engine arguments. model trained fit fit() function used data. arguments function mode engine captured quosures. pass values programmatically, use injection operator like :","code":"value <- 1 discrim_flexible(argument = !!value)"},{"path":"https://parsnip.tidymodels.org/dev/reference/discrim_flexible.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Flexible discriminant analysis — discrim_flexible","text":"https://www.tidymodels.org, Tidy Modeling R, searchable table parsnip models","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/discrim_linear.html","id":null,"dir":"Reference","previous_headings":"","what":"Linear discriminant analysis — discrim_linear","title":"Linear discriminant analysis — discrim_linear","text":"discrim_linear() defines model estimates multivariate distribution predictors separately data class (usually Gaussian common covariance matrix). Bayes' theorem used compute probability class, given predictor values. function can fit classification models. different ways fit model, method estimation chosen setting model engine. engine-specific pages model listed . MASS¹² mda² sda² sparsediscrim² information parsnip used modeling https://www.tidymodels.org/.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/discrim_linear.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Linear discriminant analysis — discrim_linear","text":"","code":"discrim_linear( mode = \"classification\", penalty = NULL, regularization_method = NULL, engine = \"MASS\" )"},{"path":"https://parsnip.tidymodels.org/dev/reference/discrim_linear.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Linear discriminant analysis — discrim_linear","text":"mode single character string type model. possible value model \"classification\". penalty non-negative number representing amount regularization used engines. regularization_method character string type regularized estimation. Possible values : \"diagonal\", \"min_distance\", \"shrink_cov\", \"shrink_mean\" (sparsediscrim engine ). engine single character string specifying computational engine use fitting.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/discrim_linear.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Linear discriminant analysis — discrim_linear","text":"function defines type model fit. engine specified, method fit model also defined. See set_engine() setting engine, including set engine arguments. model trained fit fit() function used data. arguments function mode engine captured quosures. pass values programmatically, use injection operator like :","code":"value <- 1 discrim_linear(argument = !!value)"},{"path":"https://parsnip.tidymodels.org/dev/reference/discrim_linear.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Linear discriminant analysis — discrim_linear","text":"https://www.tidymodels.org, Tidy Modeling R, searchable table parsnip models","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/discrim_quad.html","id":null,"dir":"Reference","previous_headings":"","what":"Quadratic discriminant analysis — discrim_quad","title":"Quadratic discriminant analysis — discrim_quad","text":"discrim_quad() defines model estimates multivariate distribution predictors separately data class (usually Gaussian separate covariance matrices). Bayes' theorem used compute probability class, given predictor values. function can fit classification models. different ways fit model, method estimation chosen setting model engine. engine-specific pages model listed . MASS¹² sparsediscrim² information parsnip used modeling https://www.tidymodels.org/.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/discrim_quad.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Quadratic discriminant analysis — discrim_quad","text":"","code":"discrim_quad( mode = \"classification\", regularization_method = NULL, engine = \"MASS\" )"},{"path":"https://parsnip.tidymodels.org/dev/reference/discrim_quad.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Quadratic discriminant analysis — discrim_quad","text":"mode single character string type model. possible value model \"classification\". regularization_method character string type regularized estimation. Possible values : \"diagonal\", \"shrink_cov\", \"shrink_mean\" (sparsediscrim engine ). engine single character string specifying computational engine use fitting.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/discrim_quad.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Quadratic discriminant analysis — discrim_quad","text":"function defines type model fit. engine specified, method fit model also defined. See set_engine() setting engine, including set engine arguments. model trained fit fit() function used data. arguments function mode engine captured quosures. pass values programmatically, use injection operator like :","code":"value <- 1 discrim_quad(argument = !!value)"},{"path":"https://parsnip.tidymodels.org/dev/reference/discrim_quad.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Quadratic discriminant analysis — discrim_quad","text":"https://www.tidymodels.org, Tidy Modeling R, searchable table parsnip models","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/discrim_regularized.html","id":null,"dir":"Reference","previous_headings":"","what":"Regularized discriminant analysis — discrim_regularized","title":"Regularized discriminant analysis — discrim_regularized","text":"discrim_regularized() defines model estimates multivariate distribution predictors separately data class. structure model can LDA, QDA, amalgam two. Bayes' theorem used compute probability class, given predictor values. function can fit classification models. different ways fit model, method estimation chosen setting model engine. engine-specific pages model listed . klaR¹² information parsnip used modeling https://www.tidymodels.org/.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/discrim_regularized.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Regularized discriminant analysis — discrim_regularized","text":"","code":"discrim_regularized( mode = \"classification\", frac_common_cov = NULL, frac_identity = NULL, engine = \"klaR\" )"},{"path":"https://parsnip.tidymodels.org/dev/reference/discrim_regularized.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Regularized discriminant analysis — discrim_regularized","text":"mode single character string prediction outcome mode. Possible values model \"unknown\", \"regression\", \"classification\". frac_common_cov, frac_identity Numeric values zero one. engine single character string specifying computational engine use fitting.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/discrim_regularized.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Regularized discriminant analysis — discrim_regularized","text":"many ways regularizing models. example, one form regularization penalize model parameters. Similarly, classic James–Stein regularization approach shrinks model structure less complex form. model fits specific type regularized model Friedman (1989) uses two types regularization. One modulates class-specific covariance matrix . allows model balance LDA QDA. second regularization component shrinks covariance matrix towards identity matrix. penalization approach, discrim_linear() mda engine can used. regularization methods can used discrim_linear() discrim_quad() can used via sparsediscrim engine functions. function defines type model fit. engine specified, method fit model also defined. See set_engine() setting engine, including set engine arguments. model trained fit fit() function used data. arguments function mode engine captured quosures. pass values programmatically, use injection operator like :","code":"value <- 1 discrim_regularized(argument = !!value)"},{"path":"https://parsnip.tidymodels.org/dev/reference/discrim_regularized.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Regularized discriminant analysis — discrim_regularized","text":"https://www.tidymodels.org, Tidy Modeling R, searchable table parsnip models Friedman, J (1989). Regularized Discriminant Analysis. Journal American Statistical Association, 84, 165-175.","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/doc-tools.html","id":null,"dir":"Reference","previous_headings":"","what":"Tools for documenting engines — doc-tools","title":"Tools for documenting engines — doc-tools","text":"parsnip fairly complex documentation system engines model detailed documentation syntax, tuning parameters, preprocessing needs, . functions called .R files programmatically generate content help files model. find_engine_files() identifies engines model creates bulleted list links specific help files. make_seealso_list() creates set links \"See Also\" list bottom help pages. find_engine_files() function, used , find engines model function.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/doc-tools.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Tools for documenting engines — doc-tools","text":"","code":"find_engine_files(mod) make_engine_list(mod) make_seealso_list(mod, pkg = \"parsnip\")"},{"path":"https://parsnip.tidymodels.org/dev/reference/doc-tools.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Tools for documenting engines — doc-tools","text":"mod character string model file (e.g. \"linear_reg\") pkg character string package function invoked.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/doc-tools.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Tools for documenting engines — doc-tools","text":"make_engine_list() returns character string creates bulleted list links specific help files. make_seealso_list() returns formatted character string links. find_engine_files() returns tibble.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/doc-tools.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Tools for documenting engines — doc-tools","text":"parsnip includes document (README-DOCS.md) step--step instructions details. See code determine installed (see References section). parsnip users need use functions documentation.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/doc-tools.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Tools for documenting engines — doc-tools","text":"https://github.com/tidymodels/parsnip/blob/main/inst/README-DOCS.md","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/doc-tools.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Tools for documenting engines — doc-tools","text":"","code":"# See this file for step-by-step instructions. system.file(\"README-DOCS.md\", package = \"parsnip\") #> [1] \"/home/runner/work/_temp/Library/parsnip/README-DOCS.md\" # Code examples: make_engine_list(\"linear_reg\") #> There are different ways to fit this model, and the method of estimation is chosen by setting the model \\emph{engine}. The engine-specific pages for this model are listed below. #> #> #> \\itemize{ #> \\item \\code{\\link[parsnip:details_linear_reg_lm]{lm}¹} #> \\item \\code{\\link[parsnip:details_linear_reg_brulee]{brulee}} #> \\item \\code{\\link[parsnip:details_linear_reg_gee]{gee}²} #> \\item \\code{\\link[parsnip:details_linear_reg_glm]{glm}} #> \\item \\code{\\link[parsnip:details_linear_reg_glmer]{glmer}²} #> \\item \\code{\\link[parsnip:details_linear_reg_glmnet]{glmnet}} #> \\item \\code{\\link[parsnip:details_linear_reg_gls]{gls}²} #> \\item \\code{\\link[parsnip:details_linear_reg_h2o]{h2o}²} #> \\item \\code{\\link[parsnip:details_linear_reg_keras]{keras}} #> \\item \\code{\\link[parsnip:details_linear_reg_lme]{lme}²} #> \\item \\code{\\link[parsnip:details_linear_reg_lmer]{lmer}²} #> \\item \\code{\\link[parsnip:details_linear_reg_spark]{spark}} #> \\item \\code{\\link[parsnip:details_linear_reg_stan]{stan}} #> \\item \\code{\\link[parsnip:details_linear_reg_stan_glmer]{stan_glmer}²} #> } #> #> #> ¹ The default engine. ² Requires a parsnip extension package. cat(make_engine_list(\"linear_reg\")) #> There are different ways to fit this model, and the method of estimation is chosen by setting the model \\emph{engine}. The engine-specific pages for this model are listed below. #> #> #> \\itemize{ #> \\item \\code{\\link[parsnip:details_linear_reg_lm]{lm}¹} #> \\item \\code{\\link[parsnip:details_linear_reg_brulee]{brulee}} #> \\item \\code{\\link[parsnip:details_linear_reg_gee]{gee}²} #> \\item \\code{\\link[parsnip:details_linear_reg_glm]{glm}} #> \\item \\code{\\link[parsnip:details_linear_reg_glmer]{glmer}²} #> \\item \\code{\\link[parsnip:details_linear_reg_glmnet]{glmnet}} #> \\item \\code{\\link[parsnip:details_linear_reg_gls]{gls}²} #> \\item \\code{\\link[parsnip:details_linear_reg_h2o]{h2o}²} #> \\item \\code{\\link[parsnip:details_linear_reg_keras]{keras}} #> \\item \\code{\\link[parsnip:details_linear_reg_lme]{lme}²} #> \\item \\code{\\link[parsnip:details_linear_reg_lmer]{lmer}²} #> \\item \\code{\\link[parsnip:details_linear_reg_spark]{spark}} #> \\item \\code{\\link[parsnip:details_linear_reg_stan]{stan}} #> \\item \\code{\\link[parsnip:details_linear_reg_stan_glmer]{stan_glmer}²} #> } #> #> #> ¹ The default engine. ² Requires a parsnip extension package."},{"path":"https://parsnip.tidymodels.org/dev/reference/dot-extract_surv_status.html","id":null,"dir":"Reference","previous_headings":"","what":"Extract survival status — .extract_surv_status","title":"Extract survival status — .extract_surv_status","text":"Extract status survival::Surv() object.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/dot-extract_surv_status.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Extract survival status — .extract_surv_status","text":"surv single survival::Surv() object.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/dot-extract_surv_status.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Extract survival status — .extract_surv_status","text":"numeric vector.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/dot-extract_surv_time.html","id":null,"dir":"Reference","previous_headings":"","what":"Extract survival time — .extract_surv_time","title":"Extract survival time — .extract_surv_time","text":"Extract time component(s) survival::Surv() object.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/dot-extract_surv_time.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Extract survival time — .extract_surv_time","text":"surv single survival::Surv() object.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/dot-extract_surv_time.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Extract survival time — .extract_surv_time","text":"vector type \"right\" \"left\" tibble otherwise.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/dot-model_param_name_key.html","id":null,"dir":"Reference","previous_headings":"","what":"Translate names of model tuning parameters — .model_param_name_key","title":"Translate names of model tuning parameters — .model_param_name_key","text":"function creates key connects identifiers users make tuning parameter names, standardized parsnip parameter names, argument names underlying fit function engine.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/dot-model_param_name_key.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Translate names of model tuning parameters — .model_param_name_key","text":"","code":".model_param_name_key(object, as_tibble = TRUE)"},{"path":"https://parsnip.tidymodels.org/dev/reference/dot-model_param_name_key.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Translate names of model tuning parameters — .model_param_name_key","text":"object workflow parsnip model specification. as_tibble logical. results tibble (default) list can facilitate renaming grid objects?","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/dot-model_param_name_key.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Translate names of model tuning parameters — .model_param_name_key","text":"tibble columns user, parsnip, engine, list named character vectors user_to_parsnip parsnip_to_engine.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/dot-model_param_name_key.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Translate names of model tuning parameters — .model_param_name_key","text":"","code":"mod <- linear_reg(penalty = tune(\"regularization\"), mixture = tune()) %>% set_engine(\"glmnet\") mod %>% .model_param_name_key() #> # A tibble: 2 × 3 #> user parsnip engine #> #> 1 regularization penalty lambda #> 2 mixture mixture alpha rn <- mod %>% .model_param_name_key(as_tibble = FALSE) rn #> $user_to_parsnip #> penalty mixture #> \"regularization\" \"mixture\" #> #> $parsnip_to_engine #> lambda alpha #> \"penalty\" \"mixture\" #> grid <- tidyr::crossing(regularization = c(0, 1), mixture = (0:3) / 3) grid %>% dplyr::rename(!!!rn$user_to_parsnip) #> # A tibble: 8 × 2 #> penalty mixture #> #> 1 0 0 #> 2 0 0.333 #> 3 0 0.667 #> 4 0 1 #> 5 1 0 #> 6 1 0.333 #> 7 1 0.667 #> 8 1 1 grid %>% dplyr::rename(!!!rn$user_to_parsnip) %>% dplyr::rename(!!!rn$parsnip_to_engine) #> # A tibble: 8 × 2 #> lambda alpha #> #> 1 0 0 #> 2 0 0.333 #> 3 0 0.667 #> 4 0 1 #> 5 1 0 #> 6 1 0.333 #> 7 1 0.667 #> 8 1 1"},{"path":"https://parsnip.tidymodels.org/dev/reference/eval_args.html","id":null,"dir":"Reference","previous_headings":"","what":"Evaluate parsnip model arguments — eval_args","title":"Evaluate parsnip model arguments — eval_args","text":"Evaluate parsnip model arguments","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/eval_args.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Evaluate parsnip model arguments — eval_args","text":"","code":"eval_args(spec, ...)"},{"path":"https://parsnip.tidymodels.org/dev/reference/eval_args.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Evaluate parsnip model arguments — eval_args","text":"spec model specification ... used.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/extension-check-helpers.html","id":null,"dir":"Reference","previous_headings":"","what":"Model Specification Checking: — spec_is_possible","title":"Model Specification Checking: — spec_is_possible","text":"helpers spec_is_possible(), spec_is_loaded(), prompt_missing_implementation() provide tooling checking model specifications. addition spec, engine, mode arguments, functions take arguments user_specified_engine user_specified_mode, denoting whether user specified engine mode, respectively.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/extension-check-helpers.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Model Specification Checking: — spec_is_possible","text":"","code":"spec_is_possible( spec, engine = spec$engine, user_specified_engine = spec$user_specified_engine, mode = spec$mode, user_specified_mode = spec$user_specified_mode ) spec_is_loaded( spec, engine = spec$engine, user_specified_engine = spec$user_specified_engine, mode = spec$mode, user_specified_mode = spec$user_specified_mode ) prompt_missing_implementation( spec, engine = spec$engine, user_specified_engine = spec$user_specified_engine, mode = spec$mode, user_specified_mode = spec$user_specified_mode, prompt, ... )"},{"path":"https://parsnip.tidymodels.org/dev/reference/extension-check-helpers.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Model Specification Checking: — spec_is_possible","text":"spec_is_possible() checks union current parsnip model environment model_info_table \"pre-registered\" model specifications determine whether model well-specified. See parsnip:::model_info_table table. spec_is_loaded() checks current parsnip model environment. spec_is_possible() executed automatically new_model_spec(), set_mode(), set_engine(), spec_is_loaded() executed automatically print.model_spec(), among places. spec_is_possible() used model specification still \"progress\" specified, spec_is_loaded called parsnip extension receives indication user \"done\" specifying model specification: print, fit, addition workflow, extract_*(), example. spec_is_loaded() FALSE, prompt_missing_implementation() helper construct informative message prompt users load install needed packages. prompt argument refers prompting function use, usually cli::cli_inform cli::cli_abort, ellipses passed function.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/extract-parsnip.html","id":null,"dir":"Reference","previous_headings":"","what":"Extract elements of a parsnip model object — extract-parsnip","title":"Extract elements of a parsnip model object — extract-parsnip","text":"functions extract various elements parsnip object. exist yet, error thrown. extract_spec_parsnip() returns parsnip model specification. extract_fit_engine() returns engine specific fit embedded within parsnip model fit. example, using linear_reg() \"lm\" engine, returns underlying lm object. extract_parameter_dials() returns single dials parameter object. extract_parameter_set_dials() returns set dials parameter objects.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/extract-parsnip.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Extract elements of a parsnip model object — extract-parsnip","text":"","code":"# S3 method for model_fit extract_spec_parsnip(x, ...) # S3 method for model_fit extract_fit_engine(x, ...) # S3 method for model_spec extract_parameter_set_dials(x, ...) # S3 method for model_spec extract_parameter_dials(x, parameter, ...)"},{"path":"https://parsnip.tidymodels.org/dev/reference/extract-parsnip.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Extract elements of a parsnip model object — extract-parsnip","text":"x parsnip model_fit object parsnip model_spec object. ... currently used. parameter single string parameter ID.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/extract-parsnip.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Extract elements of a parsnip model object — extract-parsnip","text":"extracted value parsnip object, x, described description section.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/extract-parsnip.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Extract elements of a parsnip model object — extract-parsnip","text":"Extracting underlying engine fit can helpful describing model (via print(), summary(), plot(), etc.) variable importance/explainers. However, users invoke predict() method extracted model. may preprocessing operations parsnip executed data prior giving model. Bypassing can lead errors silently generating incorrect predictions. Good: Bad:","code":"parsnip_fit %>% predict(new_data) parsnip_fit %>% extract_fit_engine() %>% predict(new_data)"},{"path":"https://parsnip.tidymodels.org/dev/reference/extract-parsnip.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Extract elements of a parsnip model object — extract-parsnip","text":"","code":"lm_spec <- linear_reg() %>% set_engine(\"lm\") lm_fit <- fit(lm_spec, mpg ~ ., data = mtcars) lm_spec #> Linear Regression Model Specification (regression) #> #> Computational engine: lm #> extract_spec_parsnip(lm_fit) #> Linear Regression Model Specification (regression) #> #> Computational engine: lm #> #> Model fit template: #> stats::lm(formula = missing_arg(), data = missing_arg(), weights = missing_arg()) extract_fit_engine(lm_fit) #> #> Call: #> stats::lm(formula = mpg ~ ., data = data) #> #> Coefficients: #> (Intercept) cyl disp hp drat #> 12.30337 -0.11144 0.01334 -0.02148 0.78711 #> wt qsec vs am gear #> -3.71530 0.82104 0.31776 2.52023 0.65541 #> carb #> -0.19942 #> lm(mpg ~ ., data = mtcars) #> #> Call: #> lm(formula = mpg ~ ., data = mtcars) #> #> Coefficients: #> (Intercept) cyl disp hp drat #> 12.30337 -0.11144 0.01334 -0.02148 0.78711 #> wt qsec vs am gear #> -3.71530 0.82104 0.31776 2.52023 0.65541 #> carb #> -0.19942 #>"},{"path":"https://parsnip.tidymodels.org/dev/reference/fit.html","id":null,"dir":"Reference","previous_headings":"","what":"Fit a Model Specification to a Dataset — fit.model_spec","title":"Fit a Model Specification to a Dataset — fit.model_spec","text":"fit() fit_xy() take model specification, translate required code substituting arguments, execute model fit routine.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/fit.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Fit a Model Specification to a Dataset — fit.model_spec","text":"","code":"# S3 method for model_spec fit( object, formula, data, case_weights = NULL, control = control_parsnip(), ... ) # S3 method for model_spec fit_xy(object, x, y, case_weights = NULL, control = control_parsnip(), ...)"},{"path":"https://parsnip.tidymodels.org/dev/reference/fit.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Fit a Model Specification to a Dataset — fit.model_spec","text":"object object class model_spec chosen engine (via set_engine()). formula object class formula (one can coerced class): symbolic description model fitted. data Optional, depending interface (see Details ). data frame containing relevant variables (e.g. outcome(s), predictors, case weights, etc). Note: needed, named argument used. case_weights optional classed vector numeric case weights. must return TRUE hardhat::is_case_weights() run . See hardhat::frequency_weights() hardhat::importance_weights() examples. control named list elements verbosity catch. See control_parsnip(). ... currently used; values passed ignored. options required fit model passed using set_engine(). x matrix, sparse matrix, data frame predictors. models support sparse matrix input. See parsnip::get_encoding() details. x column names. y vector, matrix data frame outcome data.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/fit.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Fit a Model Specification to a Dataset — fit.model_spec","text":"model_fit object contains several elements: lvl: outcome factor, contains factor levels time model fitting. spec: model specification object (object call fit) fit: model executed without error, model object. Otherwise, try-error object error message. preproc: objects needed convert formula non-formula interface (terms object) return value also class related fitted model (e.g. \"_glm\") base class \"model_fit\".","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/fit.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Fit a Model Specification to a Dataset — fit.model_spec","text":"fit() fit_xy() substitute current arguments model specification computational engine's code, check validity, fit model using data engine-specific code. Different model functions different interfaces (e.g. formula x/y) functions translate interface used fit() fit_xy() invoked one required underlying model. possible, functions attempt avoid making copies data. example, underlying model uses formula fit() invoked, original data references model fit. However, underlying model uses something else, x/y, formula evaluated data converted required format. case, calls resulting model objects reference temporary objects used fit model. model engine set, model's default engine used (discussed model page). verbosity option control_parsnip() greater zero, warning produced. like use alternative method generating contrasts supplying formula fit(), set global option contrasts preferred method. example, might set : options(contrasts = c(unordered = \"contr.helmert\", ordered = \"contr.poly\")). See help page stats::contr.treatment() possible contrast types. models \"censored regression\" modes, additional computation executed saved parsnip object. censor_probs element contains \"reverse Kaplan-Meier\" curve models probability censoring. may used later compute inverse probability censoring weights performance measures.","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/fit.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Fit a Model Specification to a Dataset — fit.model_spec","text":"","code":"# Although `glm()` only has a formula interface, different # methods for specifying the model can be used library(dplyr) library(modeldata) data(\"lending_club\") lr_mod <- logistic_reg() using_formula <- lr_mod %>% set_engine(\"glm\") %>% fit(Class ~ funded_amnt + int_rate, data = lending_club) using_xy <- lr_mod %>% set_engine(\"glm\") %>% fit_xy(x = lending_club[, c(\"funded_amnt\", \"int_rate\")], y = lending_club$Class) using_formula #> parsnip model object #> #> #> Call: stats::glm(formula = Class ~ funded_amnt + int_rate, family = stats::binomial, #> data = data) #> #> Coefficients: #> (Intercept) funded_amnt int_rate #> 5.131e+00 2.767e-06 -1.586e-01 #> #> Degrees of Freedom: 9856 Total (i.e. Null); 9854 Residual #> Null Deviance:\t 4055 #> Residual Deviance: 3698 \tAIC: 3704 using_xy #> parsnip model object #> #> #> Call: stats::glm(formula = ..y ~ ., family = stats::binomial, data = data) #> #> Coefficients: #> (Intercept) funded_amnt int_rate #> 5.131e+00 2.767e-06 -1.586e-01 #> #> Degrees of Freedom: 9856 Total (i.e. Null); 9854 Residual #> Null Deviance:\t 4055 #> Residual Deviance: 3698 \tAIC: 3704"},{"path":"https://parsnip.tidymodels.org/dev/reference/fit_control.html","id":null,"dir":"Reference","previous_headings":"","what":"Control the fit function — fit_control","title":"Control the fit function — fit_control","text":"Pass options fit.model_spec() function control output computations","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/fit_control.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Control the fit function — fit_control","text":"","code":"fit_control(verbosity = 1L, catch = FALSE)"},{"path":"https://parsnip.tidymodels.org/dev/reference/fit_control.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Control the fit function — fit_control","text":"verbosity integer control verbose output . value zero, messages output shown packages loaded model fit. value 1, package loading quiet model fits can produce output screen (depending contain verbose-type argument). value 2 , output displayed execution time fit recorded printed. catch logical value TRUE evaluate model inside try(, silent = TRUE). model fails, object still returned (without error) inherits class \"try-error\".","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/fit_control.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Control the fit function — fit_control","text":"S3 object class \"control_parsnip\" named list results function call","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/fit_control.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Control the fit function — fit_control","text":"fit_control() deprecated favor control_parsnip().","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/fit_control.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Control the fit function — fit_control","text":"","code":"fit_control(verbosity = 2L) #> Warning: `fit_control()` was deprecated in parsnip 0.1.8. #> ℹ Please use `control_parsnip()` instead. #> parsnip control object #> - verbose level 2"},{"path":"https://parsnip.tidymodels.org/dev/reference/format-internals.html","id":null,"dir":"Reference","previous_headings":"","what":"Internal functions that format predictions — format-internals","title":"Internal functions that format predictions — format-internals","text":"used ensure appropriate column names inside tibbles.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/format-internals.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Internal functions that format predictions — format-internals","text":"","code":"format_num(x) format_class(x) format_classprobs(x) format_time(x) format_survival(x) format_linear_pred(x) format_hazard(x)"},{"path":"https://parsnip.tidymodels.org/dev/reference/format-internals.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Internal functions that format predictions — format-internals","text":"x data frame vector (depending context function).","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/format-internals.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Internal functions that format predictions — format-internals","text":"tibble","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/gen_additive_mod.html","id":null,"dir":"Reference","previous_headings":"","what":"Generalized additive models (GAMs) — gen_additive_mod","title":"Generalized additive models (GAMs) — gen_additive_mod","text":"gen_additive_mod() defines model can use smoothed functions numeric predictors generalized linear model. function can fit classification regression models. different ways fit model, method estimation chosen setting model engine. engine-specific pages model listed . mgcv¹ information parsnip used modeling https://www.tidymodels.org/.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/gen_additive_mod.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Generalized additive models (GAMs) — gen_additive_mod","text":"","code":"gen_additive_mod( mode = \"unknown\", select_features = NULL, adjust_deg_free = NULL, engine = \"mgcv\" )"},{"path":"https://parsnip.tidymodels.org/dev/reference/gen_additive_mod.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Generalized additive models (GAMs) — gen_additive_mod","text":"mode single character string prediction outcome mode. Possible values model \"unknown\", \"regression\", \"classification\". select_features TRUE FALSE. TRUE, model ability eliminate predictor (via penalization). Increasing adjust_deg_free increase likelihood removing predictors. adjust_deg_free select_features = TRUE, acts multiplier smoothness. Increase beyond 1 produce smoother models. engine single character string specifying computational engine use fitting.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/gen_additive_mod.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Generalized additive models (GAMs) — gen_additive_mod","text":"function defines type model fit. engine specified, method fit model also defined. See set_engine() setting engine, including set engine arguments. model trained fit fit() function used data. arguments function mode engine captured quosures. pass values programmatically, use injection operator like :","code":"value <- 1 gen_additive_mod(argument = !!value)"},{"path":"https://parsnip.tidymodels.org/dev/reference/gen_additive_mod.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Generalized additive models (GAMs) — gen_additive_mod","text":"https://www.tidymodels.org, Tidy Modeling R, searchable table parsnip models","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/gen_additive_mod.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Generalized additive models (GAMs) — gen_additive_mod","text":"","code":"show_engines(\"gen_additive_mod\") #> # A tibble: 2 × 2 #> engine mode #> #> 1 mgcv regression #> 2 mgcv classification gen_additive_mod() #> GAM Model Specification (unknown mode) #> #> Computational engine: mgcv #>"},{"path":"https://parsnip.tidymodels.org/dev/reference/get_model_env.html","id":null,"dir":"Reference","previous_headings":"","what":"Working with the parsnip model environment — get_model_env","title":"Working with the parsnip model environment — get_model_env","text":"functions read write environment package stores information model specifications.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/get_model_env.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Working with the parsnip model environment — get_model_env","text":"","code":"get_model_env() get_from_env(items) set_in_env(...) set_env_val(name, value)"},{"path":"https://parsnip.tidymodels.org/dev/reference/get_model_env.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Working with the parsnip model environment — get_model_env","text":"items character string objects model environment. ... Named values assigned model environment. name single character value new symbol model environment. value single value new value model environment.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/get_model_env.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Working with the parsnip model environment — get_model_env","text":"\"build parsnip model\" https://www.tidymodels.org/learn/develop/models/","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/get_model_env.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Working with the parsnip model environment — get_model_env","text":"","code":"# Access the model data: current_code <- get_model_env() ls(envir = current_code) #> [1] \"C5_rules\" \"C5_rules_args\" #> [3] \"C5_rules_fit\" \"C5_rules_modes\" #> [5] \"C5_rules_pkgs\" \"C5_rules_predict\" #> [7] \"auto_ml\" \"auto_ml_args\" #> [9] \"auto_ml_fit\" \"auto_ml_modes\" #> [11] \"auto_ml_pkgs\" \"auto_ml_predict\" #> [13] \"bag_mars\" \"bag_mars_args\" #> [15] \"bag_mars_fit\" \"bag_mars_modes\" #> [17] \"bag_mars_pkgs\" \"bag_mars_predict\" #> [19] \"bag_mlp\" \"bag_mlp_args\" #> [21] \"bag_mlp_fit\" \"bag_mlp_modes\" #> [23] \"bag_mlp_pkgs\" \"bag_mlp_predict\" #> [25] \"bag_tree\" \"bag_tree_args\" #> [27] \"bag_tree_fit\" \"bag_tree_modes\" #> [29] \"bag_tree_pkgs\" \"bag_tree_predict\" #> [31] \"bart\" \"bart_args\" #> [33] \"bart_encoding\" \"bart_fit\" #> [35] \"bart_modes\" \"bart_pkgs\" #> [37] \"bart_predict\" \"boost_tree\" #> [39] \"boost_tree_args\" \"boost_tree_encoding\" #> [41] \"boost_tree_fit\" \"boost_tree_modes\" #> [43] \"boost_tree_pkgs\" \"boost_tree_predict\" #> [45] \"cubist_rules\" \"cubist_rules_args\" #> [47] \"cubist_rules_fit\" \"cubist_rules_modes\" #> [49] \"cubist_rules_pkgs\" \"cubist_rules_predict\" #> [51] \"decision_tree\" \"decision_tree_args\" #> [53] \"decision_tree_encoding\" \"decision_tree_fit\" #> [55] \"decision_tree_modes\" \"decision_tree_pkgs\" #> [57] \"decision_tree_predict\" \"discrim_flexible\" #> [59] \"discrim_flexible_args\" \"discrim_flexible_fit\" #> [61] \"discrim_flexible_modes\" \"discrim_flexible_pkgs\" #> [63] \"discrim_flexible_predict\" \"discrim_linear\" #> [65] \"discrim_linear_args\" \"discrim_linear_fit\" #> [67] \"discrim_linear_modes\" \"discrim_linear_pkgs\" #> [69] \"discrim_linear_predict\" \"discrim_quad\" #> [71] \"discrim_quad_args\" \"discrim_quad_fit\" #> [73] \"discrim_quad_modes\" \"discrim_quad_pkgs\" #> [75] \"discrim_quad_predict\" \"discrim_regularized\" #> [77] \"discrim_regularized_args\" \"discrim_regularized_fit\" #> [79] \"discrim_regularized_modes\" \"discrim_regularized_pkgs\" #> [81] \"discrim_regularized_predict\" \"gen_additive_mod\" #> [83] \"gen_additive_mod_args\" \"gen_additive_mod_encoding\" #> [85] \"gen_additive_mod_fit\" \"gen_additive_mod_modes\" #> [87] \"gen_additive_mod_pkgs\" \"gen_additive_mod_predict\" #> [89] \"linear_reg\" \"linear_reg_args\" #> [91] \"linear_reg_encoding\" \"linear_reg_fit\" #> [93] \"linear_reg_modes\" \"linear_reg_pkgs\" #> [95] \"linear_reg_predict\" \"logistic_reg\" #> [97] \"logistic_reg_args\" \"logistic_reg_encoding\" #> [99] \"logistic_reg_fit\" \"logistic_reg_modes\" #> [101] \"logistic_reg_pkgs\" \"logistic_reg_predict\" #> [103] \"mars\" \"mars_args\" #> [105] \"mars_encoding\" \"mars_fit\" #> [107] \"mars_modes\" \"mars_pkgs\" #> [109] \"mars_predict\" \"mlp\" #> [111] \"mlp_args\" \"mlp_encoding\" #> [113] \"mlp_fit\" \"mlp_modes\" #> [115] \"mlp_pkgs\" \"mlp_predict\" #> [117] \"models\" \"modes\" #> [119] \"multinom_reg\" \"multinom_reg_args\" #> [121] \"multinom_reg_encoding\" \"multinom_reg_fit\" #> [123] \"multinom_reg_modes\" \"multinom_reg_pkgs\" #> [125] \"multinom_reg_predict\" \"naive_Bayes\" #> [127] \"naive_Bayes_args\" \"naive_Bayes_fit\" #> [129] \"naive_Bayes_modes\" \"naive_Bayes_pkgs\" #> [131] \"naive_Bayes_predict\" \"nearest_neighbor\" #> [133] \"nearest_neighbor_args\" \"nearest_neighbor_encoding\" #> [135] \"nearest_neighbor_fit\" \"nearest_neighbor_modes\" #> [137] \"nearest_neighbor_pkgs\" \"nearest_neighbor_predict\" #> [139] \"null_model\" \"null_model_args\" #> [141] \"null_model_encoding\" \"null_model_fit\" #> [143] \"null_model_modes\" \"null_model_pkgs\" #> [145] \"null_model_predict\" \"pls\" #> [147] \"pls_args\" \"pls_fit\" #> [149] \"pls_modes\" \"pls_pkgs\" #> [151] \"pls_predict\" \"poisson_reg\" #> [153] \"poisson_reg_args\" \"poisson_reg_fit\" #> [155] \"poisson_reg_modes\" \"poisson_reg_pkgs\" #> [157] \"poisson_reg_predict\" \"proportional_hazards\" #> [159] \"proportional_hazards_args\" \"proportional_hazards_fit\" #> [161] \"proportional_hazards_modes\" \"proportional_hazards_pkgs\" #> [163] \"proportional_hazards_predict\" \"rand_forest\" #> [165] \"rand_forest_args\" \"rand_forest_encoding\" #> [167] \"rand_forest_fit\" \"rand_forest_modes\" #> [169] \"rand_forest_pkgs\" \"rand_forest_predict\" #> [171] \"rule_fit\" \"rule_fit_args\" #> [173] \"rule_fit_fit\" \"rule_fit_modes\" #> [175] \"rule_fit_pkgs\" \"rule_fit_predict\" #> [177] \"surv_reg\" \"surv_reg_args\" #> [179] \"surv_reg_encoding\" \"surv_reg_fit\" #> [181] \"surv_reg_modes\" \"surv_reg_pkgs\" #> [183] \"surv_reg_predict\" \"survival_reg\" #> [185] \"survival_reg_args\" \"survival_reg_fit\" #> [187] \"survival_reg_modes\" \"survival_reg_pkgs\" #> [189] \"survival_reg_predict\" \"svm_linear\" #> [191] \"svm_linear_args\" \"svm_linear_encoding\" #> [193] \"svm_linear_fit\" \"svm_linear_modes\" #> [195] \"svm_linear_pkgs\" \"svm_linear_predict\" #> [197] \"svm_poly\" \"svm_poly_args\" #> [199] \"svm_poly_encoding\" \"svm_poly_fit\" #> [201] \"svm_poly_modes\" \"svm_poly_pkgs\" #> [203] \"svm_poly_predict\" \"svm_rbf\" #> [205] \"svm_rbf_args\" \"svm_rbf_encoding\" #> [207] \"svm_rbf_fit\" \"svm_rbf_modes\" #> [209] \"svm_rbf_pkgs\" \"svm_rbf_predict\""},{"path":"https://parsnip.tidymodels.org/dev/reference/glance.model_fit.html","id":null,"dir":"Reference","previous_headings":"","what":"Construct a single row summary ","title":"Construct a single row summary ","text":"method glances model parsnip model object, exists.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/glance.model_fit.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Construct a single row summary ","text":"","code":"# S3 method for model_fit glance(x, ...)"},{"path":"https://parsnip.tidymodels.org/dev/reference/glance.model_fit.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Construct a single row summary ","text":"x model R object convert single-row data frame ... arguments passed methods","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/glance.model_fit.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Construct a single row summary ","text":"tibble","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/glm_grouped.html","id":null,"dir":"Reference","previous_headings":"","what":"Fit a grouped binomial outcome from a data set with case weights — glm_grouped","title":"Fit a grouped binomial outcome from a data set with case weights — glm_grouped","text":"stats::glm() assumes tabular data set case weights corresponds \"different observations different dispersions\" (see ?glm). cases, case weights reflect covariate pattern observed multiple times (.e., frequency weights). case, stats::glm() expects data formatted number events factor level outcome can given formula cbind(events_1, events_2). glm_grouped() converts data integer case weights expected \"number events\" format binomial data.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/glm_grouped.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Fit a grouped binomial outcome from a data set with case weights — glm_grouped","text":"","code":"glm_grouped(formula, data, weights, ...)"},{"path":"https://parsnip.tidymodels.org/dev/reference/glm_grouped.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Fit a grouped binomial outcome from a data set with case weights — glm_grouped","text":"formula formula object one outcome two-level factors. data data frame outcomes predictors (case weights). weights integer vector weights whose length number rows data. non-integer numeric, converted integer (warning). ... Options pass stats::glm(). family set, automatically assigned basic binomial family.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/glm_grouped.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Fit a grouped binomial outcome from a data set with case weights — glm_grouped","text":"object produced stats::glm().","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/glm_grouped.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Fit a grouped binomial outcome from a data set with case weights — glm_grouped","text":"","code":"#---------------------------------------------------------------------------- # The same data set formatted three ways # First with basic case weights that, from ?glm, are used inappropriately. ucb_weighted <- as.data.frame(UCBAdmissions) ucb_weighted$Freq <- as.integer(ucb_weighted$Freq) head(ucb_weighted) #> Admit Gender Dept Freq #> 1 Admitted Male A 512 #> 2 Rejected Male A 313 #> 3 Admitted Female A 89 #> 4 Rejected Female A 19 #> 5 Admitted Male B 353 #> 6 Rejected Male B 207 nrow(ucb_weighted) #> [1] 24 # Format when yes/no data are in individual rows (probably still inappropriate) library(tidyr) ucb_long <- uncount(ucb_weighted, Freq) head(ucb_long) #> Admit Gender Dept #> 1 Admitted Male A #> 2 Admitted Male A #> 3 Admitted Male A #> 4 Admitted Male A #> 5 Admitted Male A #> 6 Admitted Male A nrow(ucb_long) #> [1] 4526 # Format where the outcome is formatted as number of events ucb_events <- ucb_weighted %>% tidyr::pivot_wider( id_cols = c(Gender, Dept), names_from = Admit, values_from = Freq, values_fill = 0L ) head(ucb_events) #> # A tibble: 6 × 4 #> Gender Dept Admitted Rejected #> #> 1 Male A 512 313 #> 2 Female A 89 19 #> 3 Male B 353 207 #> 4 Female B 17 8 #> 5 Male C 120 205 #> 6 Female C 202 391 nrow(ucb_events) #> [1] 12 #---------------------------------------------------------------------------- # Different model fits # Treat data as separate Bernoulli data: glm(Admit ~ Gender + Dept, data = ucb_long, family = binomial) #> #> Call: glm(formula = Admit ~ Gender + Dept, family = binomial, data = ucb_long) #> #> Coefficients: #> (Intercept) GenderFemale DeptB DeptC DeptD #> -0.58205 -0.09987 0.04340 1.26260 1.29461 #> DeptE DeptF #> 1.73931 3.30648 #> #> Degrees of Freedom: 4525 Total (i.e. Null); 4519 Residual #> Null Deviance:\t 6044 #> Residual Deviance: 5187 \tAIC: 5201 # Weights produce the same statistics glm( Admit ~ Gender + Dept, data = ucb_weighted, family = binomial, weights = ucb_weighted$Freq ) #> #> Call: glm(formula = Admit ~ Gender + Dept, family = binomial, data = ucb_weighted, #> weights = ucb_weighted$Freq) #> #> Coefficients: #> (Intercept) GenderFemale DeptB DeptC DeptD #> -0.58205 -0.09987 0.04340 1.26260 1.29461 #> DeptE DeptF #> 1.73931 3.30648 #> #> Degrees of Freedom: 23 Total (i.e. Null); 17 Residual #> Null Deviance:\t 6044 #> Residual Deviance: 5187 \tAIC: 5201 # Data as binomial \"x events out of n trials\" format. Note that, to get the same # coefficients, the order of the levels must be reversed. glm( cbind(Rejected, Admitted) ~ Gender + Dept, data = ucb_events, family = binomial ) #> #> Call: glm(formula = cbind(Rejected, Admitted) ~ Gender + Dept, family = binomial, #> data = ucb_events) #> #> Coefficients: #> (Intercept) GenderFemale DeptB DeptC DeptD #> -0.58205 -0.09987 0.04340 1.26260 1.29461 #> DeptE DeptF #> 1.73931 3.30648 #> #> Degrees of Freedom: 11 Total (i.e. Null); 5 Residual #> Null Deviance:\t 877.1 #> Residual Deviance: 20.2 \tAIC: 103.1 # The new function that starts with frequency weights and gets the correct place: glm_grouped(Admit ~ Gender + Dept, data = ucb_weighted, weights = ucb_weighted$Freq) #> #> Call: glm(formula = formula, family = \"binomial\", data = data) #> #> Coefficients: #> (Intercept) GenderFemale DeptB DeptC DeptD #> -0.58205 -0.09987 0.04340 1.26260 1.29461 #> DeptE DeptF #> 1.73931 3.30648 #> #> Degrees of Freedom: 11 Total (i.e. Null); 5 Residual #> Null Deviance:\t 877.1 #> Residual Deviance: 20.2 \tAIC: 103.1"},{"path":"https://parsnip.tidymodels.org/dev/reference/glmnet-details.html","id":null,"dir":"Reference","previous_headings":"","what":"Technical aspects of the glmnet model — glmnet-details","title":"Technical aspects of the glmnet model — glmnet-details","text":"glmnet popular statistical model regularized generalized linear models. notes reflect common questions particular model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/glmnet-details.html","id":"tidymodels-and-glmnet","dir":"Reference","previous_headings":"","what":"tidymodels and glmnet","title":"Technical aspects of the glmnet model — glmnet-details","text":"implementation glmnet package nice features. example, one main tuning parameters, regularization penalty, need specified fitting model. package fits compendium values, called regularization path. values depend data set value alpha, mixture parameter pure ridge model (alpha = 0) pure lasso model (alpha = 1). predicting, penalty values can simultaneously predicted, even exactly regularization path. , model approximates closest path values produce prediction. argument called lambda glmnet() function used specify path. discussion , linear_reg() used. information true parsnip models \"glmnet\" engine.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/glmnet-details.html","id":"fitting-and-predicting-using-parsnip","dir":"Reference","previous_headings":"","what":"Fitting and predicting using parsnip","title":"Technical aspects of the glmnet model — glmnet-details","text":"Recall tidymodels uses standardized parameter names across models chosen low jargon. argument penalty equivalent glmnet calls lambda value mixture alpha value. tidymodels, predict() methods defined make one prediction time. model, means predictions single penalty value. reason, models glmnet engines require user always specify single penalty value model defined. example, linear regression: predict() method called, automatically uses penalty given model defined. example: However, penalty values can predicted simultaneously using multi_predict() method:","code":"linear_reg(penalty = 1) %>% set_engine(\"glmnet\") library(tidymodels) fit <- linear_reg(penalty = 1) %>% set_engine(\"glmnet\") %>% fit(mpg ~ ., data = mtcars) # predict at penalty = 1 predict(fit, mtcars[1:3,]) ## # A tibble: 3 × 1 ## .pred ## ## 1 22.2 ## 2 21.5 ## 3 24.9 # predict at c(0.00, 0.01) multi_predict(fit, mtcars[1:3,], penalty = c(0.00, 0.01)) ## # A tibble: 3 × 1 ## .pred ## ## 1 ## 2 ## 3 # unnested: multi_predict(fit, mtcars[1:3,], penalty = c(0.00, 0.01)) %>% add_rowindex() %>% unnest(cols = \".pred\") ## # A tibble: 6 × 3 ## penalty .pred .row ## ## 1 0 22.6 1 ## 2 0.01 22.5 1 ## 3 0 22.1 2 ## 4 0.01 22.1 2 ## 5 0 26.3 3 ## 6 0.01 26.3 3"},{"path":"https://parsnip.tidymodels.org/dev/reference/glmnet-details.html","id":"where-did-lambda-go-","dir":"Reference","previous_headings":"","what":"Where did lambda go?","title":"Technical aspects of the glmnet model — glmnet-details","text":"may appear odd lambda value get used fit: Internally, value penalty = 1 saved parsnip object value set lambda. enables full path fit glmnet(). See section setting path.","code":"linear_reg(penalty = 1) %>% set_engine(\"glmnet\") %>% translate() ## Linear Regression Model Specification (regression) ## ## Main Arguments: ## penalty = 1 ## ## Computational engine: glmnet ## ## Model fit template: ## glmnet::glmnet(x = missing_arg(), y = missing_arg(), weights = missing_arg(), ## family = \"gaussian\")"},{"path":"https://parsnip.tidymodels.org/dev/reference/glmnet-details.html","id":"how-do-i-set-the-regularization-path-","dir":"Reference","previous_headings":"","what":"How do I set the regularization path?","title":"Technical aspects of the glmnet model — glmnet-details","text":"Regardless value use penalty, full coefficient path used glmnet::glmnet() called. want manually set path? Normally, pass vector lambda glmnet::glmnet(). parsnip models use glmnet engine can use special optional argument called path_values. argument glmnet::glmnet(); used parsnip independently set path. example, found want fully ridge regression model (.e., mixture = 0), can get wrong coefficients path contain zero (see issue #431). want use path, argument passed engine-specific option:","code":"coef_path_values <- c(0, 10^seq(-5, 1, length.out = 7)) fit_ridge <- linear_reg(penalty = 1, mixture = 0) %>% set_engine(\"glmnet\", path_values = coef_path_values) %>% fit(mpg ~ ., data = mtcars) all.equal(sort(fit_ridge$fit$lambda), coef_path_values) ## [1] TRUE # predict at penalty = 1 predict(fit_ridge, mtcars[1:3,]) ## # A tibble: 3 × 1 ## .pred ## ## 1 22.1 ## 2 21.8 ## 3 26.6"},{"path":"https://parsnip.tidymodels.org/dev/reference/glmnet-details.html","id":"tidying-the-model-object","dir":"Reference","previous_headings":"","what":"Tidying the model object","title":"Technical aspects of the glmnet model — glmnet-details","text":"broom::tidy() function gives summary object tibble. tl;dr tidy() glmnet model produced parsnip gives coefficients value given penalty. parsnip makes model, gives extra class. Use tidy() method object, produces coefficients penalty originally requested: Note tidy() method glmnet objects broom package. used directly underlying glmnet object, returns coefficients path: can nice plots might contain penalty value interested .","code":"tidy(fit) ## # A tibble: 11 × 3 ## term estimate penalty ## ## 1 (Intercept) 35.3 1 ## 2 cyl -0.872 1 ## 3 disp 0 1 ## 4 hp -0.0101 1 ## 5 drat 0 1 ## 6 wt -2.59 1 ## # ℹ 5 more rows # Use the basic tidy() method for glmnet all_tidy_coefs <- broom:::tidy.glmnet(fit$fit) all_tidy_coefs ## # A tibble: 640 × 5 ## term step estimate lambda dev.ratio ## ## 1 (Intercept) 1 20.1 5.15 0 ## 2 (Intercept) 2 21.6 4.69 0.129 ## 3 (Intercept) 3 23.2 4.27 0.248 ## 4 (Intercept) 4 24.7 3.89 0.347 ## 5 (Intercept) 5 26.0 3.55 0.429 ## 6 (Intercept) 6 27.2 3.23 0.497 ## # ℹ 634 more rows length(unique(all_tidy_coefs$lambda)) ## [1] 79"},{"path":"https://parsnip.tidymodels.org/dev/reference/glmnet_helpers.html","id":null,"dir":"Reference","previous_headings":"","what":"Helper functions for checking the penalty of glmnet models — .check_glmnet_penalty_fit","title":"Helper functions for checking the penalty of glmnet models — .check_glmnet_penalty_fit","text":"functions developer use. .check_glmnet_penalty_fit() checks model specification fitting glmnet model contains single value. .check_glmnet_penalty_predict() checks penalty value used prediction valid. called predict(), needs single value. Multiple values allowed multi_predict().","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/glmnet_helpers.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Helper functions for checking the penalty of glmnet models — .check_glmnet_penalty_fit","text":"","code":".check_glmnet_penalty_fit(x) .check_glmnet_penalty_predict(penalty = NULL, object, multi = FALSE)"},{"path":"https://parsnip.tidymodels.org/dev/reference/glmnet_helpers.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Helper functions for checking the penalty of glmnet models — .check_glmnet_penalty_fit","text":"x object class model_spec. penalty penalty value check. object object class model_fit. multi logical indicating multiple values allowed.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/glmnet_helpers_prediction.html","id":null,"dir":"Reference","previous_headings":"","what":"Organize glmnet predictions — .organize_glmnet_pred","title":"Organize glmnet predictions — .organize_glmnet_pred","text":"function developer use organizes predictions glmnet models.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/glmnet_helpers_prediction.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Organize glmnet predictions — .organize_glmnet_pred","text":"","code":".organize_glmnet_pred(x, object)"},{"path":"https://parsnip.tidymodels.org/dev/reference/glmnet_helpers_prediction.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Organize glmnet predictions — .organize_glmnet_pred","text":"x Predictions returned predict() method glmnet models. object object class model_fit.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/has_multi_predict.html","id":null,"dir":"Reference","previous_headings":"","what":"Tools for models that predict on sub-models — has_multi_predict","title":"Tools for models that predict on sub-models — has_multi_predict","text":"has_multi_predict() tests see object can make multiple predictions submodels object. multi_predict_args() returns names arguments multi_predict() model ().","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/has_multi_predict.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Tools for models that predict on sub-models — has_multi_predict","text":"","code":"has_multi_predict(object, ...) # S3 method for default has_multi_predict(object, ...) # S3 method for model_fit has_multi_predict(object, ...) # S3 method for workflow has_multi_predict(object, ...) multi_predict_args(object, ...) # S3 method for default multi_predict_args(object, ...) # S3 method for model_fit multi_predict_args(object, ...) # S3 method for workflow multi_predict_args(object, ...)"},{"path":"https://parsnip.tidymodels.org/dev/reference/has_multi_predict.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Tools for models that predict on sub-models — has_multi_predict","text":"object object test. ... currently used.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/has_multi_predict.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Tools for models that predict on sub-models — has_multi_predict","text":"has_multi_predict() returns single logical value multi_predict_args() returns character vector argument names (NA none exist).","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/has_multi_predict.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Tools for models that predict on sub-models — has_multi_predict","text":"","code":"lm_model_idea <- linear_reg() %>% set_engine(\"lm\") has_multi_predict(lm_model_idea) #> [1] FALSE lm_model_fit <- fit(lm_model_idea, mpg ~ ., data = mtcars) has_multi_predict(lm_model_fit) #> [1] FALSE multi_predict_args(lm_model_fit) #> [1] NA library(kknn) knn_fit <- nearest_neighbor(mode = \"regression\", neighbors = 5) %>% set_engine(\"kknn\") %>% fit(mpg ~ ., mtcars) multi_predict_args(knn_fit) #> [1] \"neighbors\" multi_predict(knn_fit, mtcars[1, -1], neighbors = 1:4)$.pred #> [[1]] #> # A tibble: 4 × 2 #> neighbors .pred #> #> 1 1 21 #> 2 2 21 #> 3 3 20.9 #> 4 4 21.0 #>"},{"path":"https://parsnip.tidymodels.org/dev/reference/keras_mlp.html","id":null,"dir":"Reference","previous_headings":"","what":"Simple interface to MLP models via keras — keras_mlp","title":"Simple interface to MLP models via keras — keras_mlp","text":"Instead building keras model sequentially, keras_mlp can used create feedforward network single hidden layer. Regularization via either weight decay dropout.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/keras_mlp.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Simple interface to MLP models via keras — keras_mlp","text":"","code":"keras_mlp( x, y, hidden_units = 5, penalty = 0, dropout = 0, epochs = 20, activation = \"softmax\", seeds = sample.int(10^5, size = 3), ... )"},{"path":"https://parsnip.tidymodels.org/dev/reference/keras_mlp.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Simple interface to MLP models via keras — keras_mlp","text":"x data frame matrix predictors y vector (factor numeric) matrix (numeric) outcome data. hidden_units integer number hidden units. penalty non-negative real number amount weight decay. Either parameter dropout can specified. dropout proportion parameters set zero. Either parameter penalty can specified. epochs integer number passes data. activation character string type activation function layers. seeds vector three positive integers control randomness calculations. ... Additional named arguments pass keras::compile() keras::fit(). Arguments sorted passed either function internally.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/keras_mlp.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Simple interface to MLP models via keras — keras_mlp","text":"keras model object.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/keras_predict_classes.html","id":null,"dir":"Reference","previous_headings":"","what":"Wrapper for keras class predictions — keras_predict_classes","title":"Wrapper for keras class predictions — keras_predict_classes","text":"Wrapper keras class predictions","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/keras_predict_classes.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Wrapper for keras class predictions — keras_predict_classes","text":"","code":"keras_predict_classes(object, x)"},{"path":"https://parsnip.tidymodels.org/dev/reference/keras_predict_classes.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Wrapper for keras class predictions — keras_predict_classes","text":"object keras model fit x data set.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/knit_engine_docs.html","id":null,"dir":"Reference","previous_headings":"","what":"Knit engine-specific documentation — knit_engine_docs","title":"Knit engine-specific documentation — knit_engine_docs","text":"Knit engine-specific documentation","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/knit_engine_docs.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Knit engine-specific documentation — knit_engine_docs","text":"","code":"knit_engine_docs(pattern = NULL)"},{"path":"https://parsnip.tidymodels.org/dev/reference/knit_engine_docs.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Knit engine-specific documentation — knit_engine_docs","text":"pattern regular expression specify files knit. default knits engine documentation files.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/knit_engine_docs.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Knit engine-specific documentation — knit_engine_docs","text":"tibble column file file name result (character vector echos output file name , failure, error message).","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/linear_reg.html","id":null,"dir":"Reference","previous_headings":"","what":"Linear regression — linear_reg","title":"Linear regression — linear_reg","text":"linear_reg() defines model can predict numeric values predictors using linear function. function can fit regression models. different ways fit model, method estimation chosen setting model engine. engine-specific pages model listed . lm¹ brulee gee² glm glmer² glmnet gls² h2o² keras lme² lmer² spark stan stan_glmer² information parsnip used modeling https://www.tidymodels.org/.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/linear_reg.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Linear regression — linear_reg","text":"","code":"linear_reg(mode = \"regression\", engine = \"lm\", penalty = NULL, mixture = NULL)"},{"path":"https://parsnip.tidymodels.org/dev/reference/linear_reg.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Linear regression — linear_reg","text":"mode single character string type model. possible value model \"regression\". engine single character string specifying computational engine use fitting. Possible engines listed . default model \"lm\". penalty non-negative number representing total amount regularization (specific engines ). mixture number zero one (inclusive) denoting proportion L1 regularization (.e. lasso) model. mixture = 1 specifies pure lasso model, mixture = 0 specifies ridge regression model, 0 < mixture < 1 specifies elastic net model, interpolating lasso ridge. Available specific engines .","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/linear_reg.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Linear regression — linear_reg","text":"function defines type model fit. engine specified, method fit model also defined. See set_engine() setting engine, including set engine arguments. model trained fit fit() function used data. arguments function mode engine captured quosures. pass values programmatically, use injection operator like :","code":"value <- 1 linear_reg(argument = !!value)"},{"path":"https://parsnip.tidymodels.org/dev/reference/linear_reg.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Linear regression — linear_reg","text":"https://www.tidymodels.org, Tidy Modeling R, searchable table parsnip models","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/linear_reg.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Linear regression — linear_reg","text":"","code":"show_engines(\"linear_reg\") #> # A tibble: 7 × 2 #> engine mode #> #> 1 lm regression #> 2 glm regression #> 3 glmnet regression #> 4 stan regression #> 5 spark regression #> 6 keras regression #> 7 brulee regression linear_reg() #> Linear Regression Model Specification (regression) #> #> Computational engine: lm #>"},{"path":"https://parsnip.tidymodels.org/dev/reference/list_md_problems.html","id":null,"dir":"Reference","previous_headings":"","what":"Locate and show errors/warnings in engine-specific documentation — list_md_problems","title":"Locate and show errors/warnings in engine-specific documentation — list_md_problems","text":"Locate show errors/warnings engine-specific documentation","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/list_md_problems.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Locate and show errors/warnings in engine-specific documentation — list_md_problems","text":"","code":"list_md_problems()"},{"path":"https://parsnip.tidymodels.org/dev/reference/list_md_problems.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Locate and show errors/warnings in engine-specific documentation — list_md_problems","text":"tibble column file file name, line indicating line error/warning occurred, problem showing error/warning message.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/logistic_reg.html","id":null,"dir":"Reference","previous_headings":"","what":"Logistic regression — logistic_reg","title":"Logistic regression — logistic_reg","text":"logistic_reg() defines generalized linear model binary outcomes. linear combination predictors used model log odds event. function can fit classification models. different ways fit model, method estimation chosen setting model engine. engine-specific pages model listed . glm¹ brulee gee² glmer² glmnet h2o² keras LiblineaR spark stan stan_glmer² information parsnip used modeling https://www.tidymodels.org/.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/logistic_reg.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Logistic regression — logistic_reg","text":"","code":"logistic_reg( mode = \"classification\", engine = \"glm\", penalty = NULL, mixture = NULL )"},{"path":"https://parsnip.tidymodels.org/dev/reference/logistic_reg.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Logistic regression — logistic_reg","text":"mode single character string type model. possible value model \"classification\". engine single character string specifying computational engine use fitting. Possible engines listed . default model \"glm\". penalty non-negative number representing total amount regularization (specific engines ). keras models, corresponds purely L2 regularization (aka weight decay) models can either combination L1 L2 (depending value mixture). mixture number zero one (inclusive) giving proportion L1 regularization (.e. lasso) model. mixture = 1 specifies pure lasso model, mixture = 0 specifies ridge regression model, 0 < mixture < 1 specifies elastic net model, interpolating lasso ridge. Available specific engines . LiblineaR models, mixture must exactly 1 0 .","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/logistic_reg.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Logistic regression — logistic_reg","text":"function defines type model fit. engine specified, method fit model also defined. See set_engine() setting engine, including set engine arguments. model trained fit fit() function used data. arguments function mode engine captured quosures. pass values programmatically, use injection operator like : model fits classification model binary outcomes; multiclass outcomes, see multinom_reg().","code":"value <- 1 logistic_reg(argument = !!value)"},{"path":"https://parsnip.tidymodels.org/dev/reference/logistic_reg.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Logistic regression — logistic_reg","text":"https://www.tidymodels.org, Tidy Modeling R, searchable table parsnip models","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/logistic_reg.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Logistic regression — logistic_reg","text":"","code":"show_engines(\"logistic_reg\") #> # A tibble: 7 × 2 #> engine mode #> #> 1 glm classification #> 2 glmnet classification #> 3 LiblineaR classification #> 4 spark classification #> 5 keras classification #> 6 stan classification #> 7 brulee classification logistic_reg() #> Logistic Regression Model Specification (classification) #> #> Computational engine: glm #>"},{"path":"https://parsnip.tidymodels.org/dev/reference/make_call.html","id":null,"dir":"Reference","previous_headings":"","what":"Make a parsnip call expression — make_call","title":"Make a parsnip call expression — make_call","text":"Make parsnip call expression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/make_call.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Make a parsnip call expression — make_call","text":"","code":"make_call(fun, ns, args, ...)"},{"path":"https://parsnip.tidymodels.org/dev/reference/make_call.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Make a parsnip call expression — make_call","text":"fun character string function name. ns character string package name. args named list argument values.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/make_call.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Make a parsnip call expression — make_call","text":"call.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/make_call.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Make a parsnip call expression — make_call","text":"arguments spliced ns::fun() call. missing, null, single logical, spliced.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/make_classes.html","id":null,"dir":"Reference","previous_headings":"","what":"Prepend a new class — make_classes","title":"Prepend a new class — make_classes","text":"adds extra class base class \"model_spec\".","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/make_classes.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Prepend a new class — make_classes","text":"","code":"make_classes(prefix)"},{"path":"https://parsnip.tidymodels.org/dev/reference/make_classes.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Prepend a new class — make_classes","text":"prefix character string class.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/make_classes.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Prepend a new class — make_classes","text":"character vector.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/mars.html","id":null,"dir":"Reference","previous_headings":"","what":"Multivariate adaptive regression splines (MARS) — mars","title":"Multivariate adaptive regression splines (MARS) — mars","text":"mars() defines generalized linear model uses artificial features predictors. features resemble hinge functions result model segmented regression small dimensions. function can fit classification regression models. different ways fit model, method estimation chosen setting model engine. engine-specific pages model listed . earth¹ information parsnip used modeling https://www.tidymodels.org/.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/mars.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Multivariate adaptive regression splines (MARS) — mars","text":"","code":"mars( mode = \"unknown\", engine = \"earth\", num_terms = NULL, prod_degree = NULL, prune_method = NULL )"},{"path":"https://parsnip.tidymodels.org/dev/reference/mars.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Multivariate adaptive regression splines (MARS) — mars","text":"mode single character string prediction outcome mode. Possible values model \"unknown\", \"regression\", \"classification\". engine single character string specifying computational engine use fitting. num_terms number features retained final model, including intercept. prod_degree highest possible interaction degree. prune_method pruning method.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/mars.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Multivariate adaptive regression splines (MARS) — mars","text":"function defines type model fit. engine specified, method fit model also defined. See set_engine() setting engine, including set engine arguments. model trained fit fit() function used data. arguments function mode engine captured quosures. pass values programmatically, use injection operator like :","code":"value <- 1 mars(argument = !!value)"},{"path":"https://parsnip.tidymodels.org/dev/reference/mars.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Multivariate adaptive regression splines (MARS) — mars","text":"https://www.tidymodels.org, Tidy Modeling R, searchable table parsnip models","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/mars.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Multivariate adaptive regression splines (MARS) — mars","text":"","code":"show_engines(\"mars\") #> # A tibble: 2 × 2 #> engine mode #> #> 1 earth classification #> 2 earth regression mars(mode = \"regression\", num_terms = 5) #> MARS Model Specification (regression) #> #> Main Arguments: #> num_terms = 5 #> #> Computational engine: earth #>"},{"path":"https://parsnip.tidymodels.org/dev/reference/max_mtry_formula.html","id":null,"dir":"Reference","previous_headings":"","what":"Determine largest value of mtry from formula.\nThis function potentially caps the value of mtry based on a formula and\ndata set. This is a safe approach for survival and/or multivariate models. — max_mtry_formula","title":"Determine largest value of mtry from formula.\nThis function potentially caps the value of mtry based on a formula and\ndata set. This is a safe approach for survival and/or multivariate models. — max_mtry_formula","text":"Determine largest value mtry formula. function potentially caps value mtry based formula data set. safe approach survival /multivariate models.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/max_mtry_formula.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Determine largest value of mtry from formula.\nThis function potentially caps the value of mtry based on a formula and\ndata set. This is a safe approach for survival and/or multivariate models. — max_mtry_formula","text":"","code":"max_mtry_formula(mtry, formula, data)"},{"path":"https://parsnip.tidymodels.org/dev/reference/max_mtry_formula.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Determine largest value of mtry from formula.\nThis function potentially caps the value of mtry based on a formula and\ndata set. This is a safe approach for survival and/or multivariate models. — max_mtry_formula","text":"mtry initial value mtry (may large). formula model formula. data training set (data frame).","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/max_mtry_formula.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Determine largest value of mtry from formula.\nThis function potentially caps the value of mtry based on a formula and\ndata set. This is a safe approach for survival and/or multivariate models. — max_mtry_formula","text":"value mtry.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/max_mtry_formula.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Determine largest value of mtry from formula.\nThis function potentially caps the value of mtry based on a formula and\ndata set. This is a safe approach for survival and/or multivariate models. — max_mtry_formula","text":"","code":"# should be 9 max_mtry_formula(200, cbind(wt, mpg) ~ ., data = mtcars) #> [1] 9"},{"path":"https://parsnip.tidymodels.org/dev/reference/maybe_matrix.html","id":null,"dir":"Reference","previous_headings":"","what":"Fuzzy conversions — maybe_matrix","title":"Fuzzy conversions — maybe_matrix","text":"substitutes .matrix() .data.frame() leave sparse matrix -.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/maybe_matrix.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Fuzzy conversions — maybe_matrix","text":"","code":"maybe_matrix(x) maybe_data_frame(x)"},{"path":"https://parsnip.tidymodels.org/dev/reference/maybe_matrix.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Fuzzy conversions — maybe_matrix","text":"x data frame, matrix, sparse matrix.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/maybe_matrix.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Fuzzy conversions — maybe_matrix","text":"data frame, matrix, sparse matrix.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/min_cols.html","id":null,"dir":"Reference","previous_headings":"","what":"Execution-time data dimension checks — min_cols","title":"Execution-time data dimension checks — min_cols","text":"tuning parameters, range values depend data dimensions (e.g. mtry). packages fail parameter values outside ranges. Since model might receive resampled versions data, ranges set prior point model fit. functions check possible range data adjust needed (warning).","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/min_cols.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Execution-time data dimension checks — min_cols","text":"","code":"min_cols(num_cols, source) min_rows(num_rows, source, offset = 0)"},{"path":"https://parsnip.tidymodels.org/dev/reference/min_cols.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Execution-time data dimension checks — min_cols","text":"num_cols, num_rows parameter value requested user. source data frame data used fit. source named \"data\", assumed one column data corresponds outcome (subtracted ). offset number subtracted number rows available data.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/min_cols.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Execution-time data dimension checks — min_cols","text":"integer (perhaps warning).","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/min_cols.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Execution-time data dimension checks — min_cols","text":"","code":"nearest_neighbor(neighbors= 100) %>% set_engine(\"kknn\") %>% set_mode(\"regression\") %>% translate() #> K-Nearest Neighbor Model Specification (regression) #> #> Main Arguments: #> neighbors = 100 #> #> Computational engine: kknn #> #> Model fit template: #> kknn::train.kknn(formula = missing_arg(), data = missing_arg(), #> ks = min_rows(100, data, 5)) library(ranger) rand_forest(mtry = 2, min_n = 100, trees = 3) %>% set_engine(\"ranger\") %>% set_mode(\"regression\") %>% fit(mpg ~ ., data = mtcars) #> Warning: 100 samples were requested but there were 32 rows in the data. 32 will be used. #> parsnip model object #> #> Ranger result #> #> Call: #> ranger::ranger(x = maybe_data_frame(x), y = y, mtry = min_cols(~2, x), num.trees = ~3, min.node.size = min_rows(~100, x), num.threads = 1, verbose = FALSE, seed = sample.int(10^5, 1)) #> #> Type: Regression #> Number of trees: 3 #> Sample size: 32 #> Number of independent variables: 10 #> Mtry: 2 #> Target node size: 32 #> Variable importance mode: none #> Splitrule: variance #> OOB prediction error (MSE): 39.02897 #> R squared (OOB): -0.07446488"},{"path":"https://parsnip.tidymodels.org/dev/reference/mlp.html","id":null,"dir":"Reference","previous_headings":"","what":"Single layer neural network — mlp","title":"Single layer neural network — mlp","text":"mlp() defines multilayer perceptron model (.k.. single layer, feed-forward neural network). function can fit classification regression models. different ways fit model, method estimation chosen setting model engine. engine-specific pages model listed . nnet¹ brulee h2o² keras information parsnip used modeling https://www.tidymodels.org/.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/mlp.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Single layer neural network — mlp","text":"","code":"mlp( mode = \"unknown\", engine = \"nnet\", hidden_units = NULL, penalty = NULL, dropout = NULL, epochs = NULL, activation = NULL, learn_rate = NULL )"},{"path":"https://parsnip.tidymodels.org/dev/reference/mlp.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Single layer neural network — mlp","text":"mode single character string prediction outcome mode. Possible values model \"unknown\", \"regression\", \"classification\". engine single character string specifying computational engine use fitting. hidden_units integer number units hidden model. penalty non-negative numeric value amount weight decay. dropout number 0 (inclusive) 1 denoting proportion model parameters randomly set zero model training. epochs integer number training iterations. activation single character string denoting type relationship original predictors hidden unit layer. activation function hidden output layers automatically set either \"linear\" \"softmax\" depending type outcome. Possible values : \"linear\", \"softmax\", \"relu\", \"elu\" learn_rate number rate boosting algorithm adapts iteration--iteration (specific engines ). sometimes referred shrinkage parameter.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/mlp.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Single layer neural network — mlp","text":"function defines type model fit. engine specified, method fit model also defined. See set_engine() setting engine, including set engine arguments. model trained fit fit() function used data. arguments function mode engine captured quosures. pass values programmatically, use injection operator like :","code":"value <- 1 mlp(argument = !!value)"},{"path":"https://parsnip.tidymodels.org/dev/reference/mlp.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Single layer neural network — mlp","text":"https://www.tidymodels.org, Tidy Modeling R, searchable table parsnip models","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/mlp.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Single layer neural network — mlp","text":"","code":"show_engines(\"mlp\") #> # A tibble: 6 × 2 #> engine mode #> #> 1 keras classification #> 2 keras regression #> 3 nnet classification #> 4 nnet regression #> 5 brulee classification #> 6 brulee regression mlp(mode = \"classification\", penalty = 0.01) #> Single Layer Neural Network Model Specification (classification) #> #> Main Arguments: #> penalty = 0.01 #> #> Computational engine: nnet #>"},{"path":"https://parsnip.tidymodels.org/dev/reference/model_db.html","id":null,"dir":"Reference","previous_headings":"","what":"parsnip model specification database — model_db","title":"parsnip model specification database — model_db","text":"used RStudio add-captures information mode specifications various R packages.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/model_db.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"parsnip model specification database — model_db","text":"model_db data frame","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/model_db.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"parsnip model specification database — model_db","text":"","code":"data(model_db)"},{"path":"https://parsnip.tidymodels.org/dev/reference/model_fit.html","id":null,"dir":"Reference","previous_headings":"","what":"Model Fit Object Information — model_fit","title":"Model Fit Object Information — model_fit","text":"object class \"model_fit\" container information model fit data.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/model_fit.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Model Fit Object Information — model_fit","text":"main elements object : lvl: vector factor levels outcome factor. NULL outcome factor vector. spec: model_spec object. fit: object produced fitting function. preproc: contains data-specific information required process new sample point prediction. example, underlying model function requires arguments x y user passed formula fit, preproc object contain items terms object . information required, NA. discussed documentation model_spec, original arguments specification saved quosures. evaluated model_fit object prior fitting. resulting model object prints call, user-defined options shown call preceded tilde (see example ). result use quosures specification. class structure basis parsnip stores model objects seeing data applying model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/model_fit.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Model Fit Object Information — model_fit","text":"","code":"# Keep the `x` matrix if the data are not too big. spec_obj <- linear_reg() %>% set_engine(\"lm\", x = ifelse(.obs() < 500, TRUE, FALSE)) spec_obj #> Linear Regression Model Specification (regression) #> #> Engine-Specific Arguments: #> x = ifelse(.obs() < 500, TRUE, FALSE) #> #> Computational engine: lm #> fit_obj <- fit(spec_obj, mpg ~ ., data = mtcars) fit_obj #> parsnip model object #> #> #> Call: #> stats::lm(formula = mpg ~ ., data = data, x = ~ifelse(.obs() < #> 500, TRUE, FALSE)) #> #> Coefficients: #> (Intercept) cyl disp hp drat #> 12.30337 -0.11144 0.01334 -0.02148 0.78711 #> wt qsec vs am gear #> -3.71530 0.82104 0.31776 2.52023 0.65541 #> carb #> -0.19942 #> nrow(fit_obj$fit$x) #> [1] 32"},{"path":"https://parsnip.tidymodels.org/dev/reference/model_formula.html","id":null,"dir":"Reference","previous_headings":"","what":"Formulas with special terms in tidymodels — model_formula","title":"Formulas with special terms in tidymodels — model_formula","text":"R, formulas provide compact, symbolic notation specify model terms. Many modeling functions R make use \"specials\", nonstandard notations used formulas. Specials defined handled special case given modeling package. example, mgcv package, provides support generalized additive models R, defines function s() -lined formulas. can used like : example, s() special defines smoothing term mgcv package knows look preprocessing model input. parsnip package can handle specials without issue. analogous code specifying generalized additive model parsnip \"mgcv\" engine looks like: However, parsnip often used conjunction greater tidymodels package ecosystem, defines pre-processing infrastructure functionality via packages like hardhat recipes. specials defined many modeling packages introduce conflicts infrastructure. support specials also maintaining consistent syntax elsewhere ecosystem, tidymodels delineates two types formulas: preprocessing formulas model formulas. Preprocessing formulas specify input variables, model formulas determine model structure.","code":"mgcv::gam(mpg ~ wt + s(disp, k = 5), data = mtcars) gen_additive_mod() %>% set_mode(\"regression\") %>% set_engine(\"mgcv\") %>% fit(mpg ~ wt + s(disp, k = 5), data = mtcars)"},{"path":"https://parsnip.tidymodels.org/dev/reference/model_formula.html","id":"example","dir":"Reference","previous_headings":"","what":"Example","title":"Formulas with special terms in tidymodels — model_formula","text":"create preprocessing formula model formula, just remove specials, retaining references input variables . example: parsnip, use model formula: recipes, use preprocessing formula : recipes package supplies large variety preprocessing techniques may replace need specials altogether, cases. workflows, use preprocessing formula everywhere, pass model formula formula argument add_model(): workflow pass model formula parsnip, using preprocessor formula elsewhere. still use preprocessing formula added recipe preprocessor using add_recipe() instead formula via add_formula().","code":"model_formula <- mpg ~ wt + s(disp, k = 5) preproc_formula <- mpg ~ wt + disp model_spec <- gen_additive_mod() %>% set_mode(\"regression\") %>% set_engine(\"mgcv\") model_spec %>% fit(model_formula, data = mtcars) library(recipes) recipe(preproc_formula, mtcars) library(workflows) wflow <- workflow() %>% add_formula(preproc_formula) %>% add_model(model_spec, formula = model_formula) fit(wflow, data = mtcars)"},{"path":"https://parsnip.tidymodels.org/dev/reference/model_printer.html","id":null,"dir":"Reference","previous_headings":"","what":"Print helper for model objects — model_printer","title":"Print helper for model objects — model_printer","text":"common format function prints information model object (e.g. arguments, calls, packages, etc).","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/model_printer.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Print helper for model objects — model_printer","text":"","code":"model_printer(x, ...)"},{"path":"https://parsnip.tidymodels.org/dev/reference/model_printer.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Print helper for model objects — model_printer","text":"x model object. ... currently used.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/model_spec.html","id":null,"dir":"Reference","previous_headings":"","what":"Model Specification Information — model_spec","title":"Model Specification Information — model_spec","text":"object class \"model_spec\" container information model fit.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/model_spec.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Model Specification Information — model_spec","text":"main elements object : args: vector main arguments model. names arguments may different counterparts n underlying model function. example, glmnet model, argument name amount penalty called \"penalty\" instead \"lambda\" make general usable across different types models (specific particular model function). elements args can tune() use tune package. information see https://www.tidymodels.org/start/tuning/. left defaults (NULL), arguments use underlying model functions default value. discussed , arguments args captured quosures immediately executed. ...: Optional model-function-specific parameters. args, quosures can tune(). mode: type model, \"regression\" \"classification\". modes added package adds functionality. method: slot filled later model's constructor function. generally contains lists information used create fit prediction code well required packages similar data. engine: character string declares exactly software used. can package name technology type. class structure basis parsnip stores model objects prior seeing data.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/model_spec.html","id":"argument-details","dir":"Reference","previous_headings":"","what":"Argument Details","title":"Model Specification Information — model_spec","text":"important detail understand creating model specifications intended functionally independent data. true tuning parameters data dependent, model specification interact data . example, R functions immediately evaluate arguments. example, calling mean(dat_vec), object dat_vec immediately evaluated inside function. parsnip model functions . example, using execute ncol(mtcars) - 1 creating specification. can seen output: model functions save argument expressions associated environments (.k.. quosure) evaluated later either fit.model_spec() fit_xy.model_spec() called actual data. consequence strategy data required get parameter values must available model fit. two main ways can fail : data modified creation model specification model fit function invoked. model specification saved loaded new session data objects exist. best way avoid issues reference data objects global environment use data descriptors .cols(). Another way writing previous specification dependent specific data object evaluated immediately model fitting process begins. One less advantageous approach solving issue use quasiquotation. insert actual R object model specification might best idea data object small. example, using work (reproducible sessions) embeds entire mtcars data set mtry expression: However, object number columns , bad: information quosures quasiquotation can found https://adv-r.hadley.nz/quasiquotation.html.","code":"rand_forest(mtry = ncol(mtcars) - 1) > rand_forest(mtry = ncol(mtcars) - 1) Random Forest Model Specification (unknown) Main Arguments: mtry = ncol(mtcars) - 1 rand_forest(mtry = .cols() - 1) rand_forest(mtry = ncol(!!mtcars) - 1) > rand_forest(mtry = ncol(!!mtcars) - 1) Random Forest Model Specification (unknown) Main Arguments: mtry = ncol(structure(list(Sepal.Length = c(5.1, 4.9, 4.7, 4.6, 5, > mtry_val <- ncol(mtcars) - 1 > mtry_val [1] 10 > rand_forest(mtry = !!mtry_val) Random Forest Model Specification (unknown) Main Arguments: mtry = 10"},{"path":"https://parsnip.tidymodels.org/dev/reference/multi_predict.html","id":null,"dir":"Reference","previous_headings":"","what":"Model predictions across many sub-models — multi_predict","title":"Model predictions across many sub-models — multi_predict","text":"models, predictions can made sub-models model object.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/multi_predict.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Model predictions across many sub-models — multi_predict","text":"","code":"multi_predict(object, ...) # S3 method for default multi_predict(object, ...) # S3 method for `_xgb.Booster` multi_predict(object, new_data, type = NULL, trees = NULL, ...) # S3 method for `_C5.0` multi_predict(object, new_data, type = NULL, trees = NULL, ...) # S3 method for `_elnet` multi_predict(object, new_data, type = NULL, penalty = NULL, ...) # S3 method for `_lognet` multi_predict(object, new_data, type = NULL, penalty = NULL, ...) # S3 method for `_multnet` multi_predict(object, new_data, type = NULL, penalty = NULL, ...) # S3 method for `_glmnetfit` multi_predict(object, new_data, type = NULL, penalty = NULL, ...) # S3 method for `_earth` multi_predict(object, new_data, type = NULL, num_terms = NULL, ...) # S3 method for `_torch_mlp` multi_predict(object, new_data, type = NULL, epochs = NULL, ...) # S3 method for `_train.kknn` multi_predict(object, new_data, type = NULL, neighbors = NULL, ...)"},{"path":"https://parsnip.tidymodels.org/dev/reference/multi_predict.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Model predictions across many sub-models — multi_predict","text":"object model_fit object. ... Optional arguments pass predict.model_fit(type = \"raw\") type. new_data rectangular data object, data frame. type single character value NULL. Possible values \"numeric\", \"class\", \"prob\", \"conf_int\", \"pred_int\", \"quantile\", \"raw\". NULL, predict() choose appropriate value based model's mode. trees integer vector number trees ensemble. penalty numeric vector penalty values. num_terms integer vector number MARS terms retain. epochs integer vector number training epochs. neighbors integer vector number nearest neighbors.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/multi_predict.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Model predictions across many sub-models — multi_predict","text":"tibble number rows data predicted. list-column named .pred contains tibbles multiple rows per sub-model. Note , within tibbles, column names follow usual standard based prediction type (.e. .pred_class type = \"class\" ).","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/multinom_reg.html","id":null,"dir":"Reference","previous_headings":"","what":"Multinomial regression — multinom_reg","title":"Multinomial regression — multinom_reg","text":"multinom_reg() defines model uses linear predictors predict multiclass data using multinomial distribution. function can fit classification models. different ways fit model, method estimation chosen setting model engine. engine-specific pages model listed . nnet¹ brulee glmnet h2o² keras spark information parsnip used modeling https://www.tidymodels.org/.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/multinom_reg.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Multinomial regression — multinom_reg","text":"","code":"multinom_reg( mode = \"classification\", engine = \"nnet\", penalty = NULL, mixture = NULL )"},{"path":"https://parsnip.tidymodels.org/dev/reference/multinom_reg.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Multinomial regression — multinom_reg","text":"mode single character string type model. possible value model \"classification\". engine single character string specifying computational engine use fitting. Possible engines listed . default model \"nnet\". penalty non-negative number representing total amount regularization (specific engines ). keras models, corresponds purely L2 regularization (aka weight decay) models can combination L1 L2 (depending value mixture). mixture number zero one (inclusive) giving proportion L1 regularization (.e. lasso) model. mixture = 1 specifies pure lasso model, mixture = 0 specifies ridge regression model, 0 < mixture < 1 specifies elastic net model, interpolating lasso ridge. Available specific engines .","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/multinom_reg.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Multinomial regression — multinom_reg","text":"function defines type model fit. engine specified, method fit model also defined. See set_engine() setting engine, including set engine arguments. model trained fit fit() function used data. arguments function mode engine captured quosures. pass values programmatically, use injection operator like : model fits classification model multiclass outcomes; binary outcomes, see logistic_reg().","code":"value <- 1 multinom_reg(argument = !!value)"},{"path":"https://parsnip.tidymodels.org/dev/reference/multinom_reg.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Multinomial regression — multinom_reg","text":"https://www.tidymodels.org, Tidy Modeling R, searchable table parsnip models","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/multinom_reg.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Multinomial regression — multinom_reg","text":"","code":"show_engines(\"multinom_reg\") #> # A tibble: 5 × 2 #> engine mode #> #> 1 glmnet classification #> 2 spark classification #> 3 keras classification #> 4 nnet classification #> 5 brulee classification multinom_reg() #> Multinomial Regression Model Specification (classification) #> #> Computational engine: nnet #>"},{"path":"https://parsnip.tidymodels.org/dev/reference/naive_Bayes.html","id":null,"dir":"Reference","previous_headings":"","what":"Naive Bayes models — naive_Bayes","title":"Naive Bayes models — naive_Bayes","text":"naive_Bayes() defines model uses Bayes' theorem compute probability class, given predictor values. function can fit classification models. different ways fit model, method estimation chosen setting model engine. engine-specific pages model listed . klaR¹² h2o² naivebayes² information parsnip used modeling https://www.tidymodels.org/.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/naive_Bayes.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Naive Bayes models — naive_Bayes","text":"","code":"naive_Bayes( mode = \"classification\", smoothness = NULL, Laplace = NULL, engine = \"klaR\" )"},{"path":"https://parsnip.tidymodels.org/dev/reference/naive_Bayes.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Naive Bayes models — naive_Bayes","text":"mode single character string prediction outcome mode. Possible values model \"unknown\", \"regression\", \"classification\". smoothness non-negative number representing relative smoothness class boundary. Smaller examples result model flexible boundaries larger values generate class boundaries less adaptable Laplace non-negative value Laplace correction smoothing low-frequency counts. engine single character string specifying computational engine use fitting.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/naive_Bayes.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Naive Bayes models — naive_Bayes","text":"function defines type model fit. engine specified, method fit model also defined. See set_engine() setting engine, including set engine arguments. model trained fit fit() function used data. arguments function mode engine captured quosures. pass values programmatically, use injection operator like :","code":"value <- 1 naive_Bayes(argument = !!value)"},{"path":"https://parsnip.tidymodels.org/dev/reference/naive_Bayes.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Naive Bayes models — naive_Bayes","text":"https://www.tidymodels.org, Tidy Modeling R, searchable table parsnip models","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/nearest_neighbor.html","id":null,"dir":"Reference","previous_headings":"","what":"K-nearest neighbors — nearest_neighbor","title":"K-nearest neighbors — nearest_neighbor","text":"nearest_neighbor() defines model uses K similar data points training set predict new samples. function can fit classification regression models. different ways fit model, method estimation chosen setting model engine. engine-specific pages model listed . kknn¹ information parsnip used modeling https://www.tidymodels.org/.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/nearest_neighbor.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"K-nearest neighbors — nearest_neighbor","text":"","code":"nearest_neighbor( mode = \"unknown\", engine = \"kknn\", neighbors = NULL, weight_func = NULL, dist_power = NULL )"},{"path":"https://parsnip.tidymodels.org/dev/reference/nearest_neighbor.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"K-nearest neighbors — nearest_neighbor","text":"mode single character string prediction outcome mode. Possible values model \"unknown\", \"regression\", \"classification\". engine single character string specifying computational engine use fitting. neighbors single integer number neighbors consider (often called k). kknn, value 5 used neighbors specified. weight_func single character type kernel function used weight distances samples. Valid choices : \"rectangular\", \"triangular\", \"epanechnikov\", \"biweight\", \"triweight\", \"cos\", \"inv\", \"gaussian\", \"rank\", \"optimal\". dist_power single number parameter used calculating Minkowski distance.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/nearest_neighbor.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"K-nearest neighbors — nearest_neighbor","text":"function defines type model fit. engine specified, method fit model also defined. See set_engine() setting engine, including set engine arguments. model trained fit fit() function used data. arguments function mode engine captured quosures. pass values programmatically, use injection operator like :","code":"value <- 1 nearest_neighbor(argument = !!value)"},{"path":"https://parsnip.tidymodels.org/dev/reference/nearest_neighbor.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"K-nearest neighbors — nearest_neighbor","text":"https://www.tidymodels.org, Tidy Modeling R, searchable table parsnip models","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/nearest_neighbor.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"K-nearest neighbors — nearest_neighbor","text":"","code":"show_engines(\"nearest_neighbor\") #> # A tibble: 2 × 2 #> engine mode #> #> 1 kknn classification #> 2 kknn regression nearest_neighbor(neighbors = 11) #> K-Nearest Neighbor Model Specification (unknown mode) #> #> Main Arguments: #> neighbors = 11 #> #> Computational engine: kknn #>"},{"path":"https://parsnip.tidymodels.org/dev/reference/null_model.html","id":null,"dir":"Reference","previous_headings":"","what":"Null model — null_model","title":"Null model — null_model","text":"null_model() defines simple, non-informative model. main arguments. function can fit classification regression models.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/null_model.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Null model — null_model","text":"","code":"null_model(mode = \"classification\")"},{"path":"https://parsnip.tidymodels.org/dev/reference/null_model.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Null model — null_model","text":"mode single character string model mode (e.g. \"regression\").","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/null_model.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Null model — null_model","text":"model can created using fit() function using following engines: R: \"parsnip\"","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/null_model.html","id":"engine-details","dir":"Reference","previous_headings":"","what":"Engine Details","title":"Null model — null_model","text":"Engines may pre-set default arguments executing model fit call. type model, template fit calls :","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/null_model.html","id":"parsnip","dir":"Reference","previous_headings":"","what":"parsnip","title":"Null model — null_model","text":"","code":"null_model() %>% set_engine(\"parsnip\") %>% set_mode(\"regression\") %>% translate() ## Null Model Specification (regression) ## ## Computational engine: parsnip ## ## Model fit template: ## parsnip::nullmodel(x = missing_arg(), y = missing_arg()) null_model() %>% set_engine(\"parsnip\") %>% set_mode(\"classification\") %>% translate() ## Null Model Specification (classification) ## ## Computational engine: parsnip ## ## Model fit template: ## parsnip::nullmodel(x = missing_arg(), y = missing_arg())"},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/null_model.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Null model — null_model","text":"","code":"null_model(mode = \"regression\") #> Null Model Specification (regression) #>"},{"path":"https://parsnip.tidymodels.org/dev/reference/nullmodel.html","id":null,"dir":"Reference","previous_headings":"","what":"Fit a simple, non-informative model — nullmodel","title":"Fit a simple, non-informative model — nullmodel","text":"Fit single mean largest class model. nullmodel() underlying computational function null_model() specification.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/nullmodel.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Fit a simple, non-informative model — nullmodel","text":"","code":"nullmodel(x, ...) # S3 method for default nullmodel(x = NULL, y, ...) # S3 method for nullmodel print(x, ...) # S3 method for nullmodel predict(object, new_data = NULL, type = NULL, ...)"},{"path":"https://parsnip.tidymodels.org/dev/reference/nullmodel.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Fit a simple, non-informative model — nullmodel","text":"x optional matrix data frame predictors. values used model fit ... Optional arguments (yet used) y numeric vector (regression) factor (classification) outcomes object object class nullmodel new_data matrix data frame predictors (used determine number predictions return) type Either \"raw\" (regression), \"class\" \"prob\" (classification)","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/nullmodel.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Fit a simple, non-informative model — nullmodel","text":"output nullmodel() list class nullmodel elements call function call value mean y prevalent class levels y factor, vector levels. NULL otherwise pct y factor, data frame column class (NULL otherwise). column prevalent class proportion training samples class (columns zero). n number elements y predict.nullmodel() returns either factor numeric vector depending class y. predictions always .","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/nullmodel.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Fit a simple, non-informative model — nullmodel","text":"nullmodel() emulates model building functions, returns simplest model possible given training set: single mean numeric outcomes prevalent class factor outcomes. class probabilities requested, percentage training set samples prevalent class returned.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/nullmodel.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Fit a simple, non-informative model — nullmodel","text":"","code":"outcome <- factor(sample(letters[1:2], size = 100, prob = c(.1, .9), replace = TRUE)) useless <- nullmodel(y = outcome) useless #> Null Regression Model #> Predicted Value: b predict(useless, matrix(NA, nrow = 5)) #> [1] b b b b b #> Levels: a b"},{"path":"https://parsnip.tidymodels.org/dev/reference/other_predict.html","id":null,"dir":"Reference","previous_headings":"","what":"Other predict methods. — predict_class.model_fit","title":"Other predict methods. — predict_class.model_fit","text":"internal functions meant directly called user.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/other_predict.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Other predict methods. — predict_class.model_fit","text":"","code":"# S3 method for model_fit predict_class(object, new_data, ...) # S3 method for model_fit predict_classprob(object, new_data, ...) # S3 method for model_fit predict_hazard(object, new_data, eval_time, time = deprecated(), ...) # S3 method for model_fit predict_confint(object, new_data, level = 0.95, std_error = FALSE, ...) # S3 method for model_fit predict_linear_pred(object, new_data, ...) predict_linear_pred(object, ...) # S3 method for model_fit predict_numeric(object, new_data, ...) predict_numeric(object, ...) # S3 method for model_fit predict_quantile( object, new_data, quantile = (1:9)/10, interval = \"none\", level = 0.95, ... ) # S3 method for model_fit predict_survival( object, new_data, eval_time, time = deprecated(), interval = \"none\", level = 0.95, ... ) predict_survival(object, ...) # S3 method for model_fit predict_time(object, new_data, ...) predict_time(object, ...)"},{"path":"https://parsnip.tidymodels.org/dev/reference/other_predict.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Other predict methods. — predict_class.model_fit","text":"object object class model_fit. new_data rectangular data object, data frame. ... Additional parsnip-related options, depending value type. Arguments underlying model's prediction function passed (use opts argument instead). Possible arguments : interval: type equal \"survival\" \"quantile\", interval estimates added, available? Options \"none\" \"confidence\". level: type equal \"conf_int\", \"pred_int\", \"survival\", parameter tail area intervals (e.g. confidence level confidence intervals). Default value 0.95. std_error: type equal \"conf_int\" \"pred_int\", add standard error fit prediction (scale linear predictors). Default value FALSE. quantile: type equal quantile, quantiles distribution. Default (1:9)/10. eval_time: type equal \"survival\" \"hazard\", time points survival probability hazard estimated. level single numeric value zero one interval estimates. std_error single logical whether standard error returned (assuming model can compute ). quantile vector numbers 0 1 quantile predicted.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/parsnip-package.html","id":null,"dir":"Reference","previous_headings":"","what":"parsnip — parsnip-package","title":"parsnip — parsnip-package","text":"goal parsnip provide tidy, unified interface models can used try range models without getting bogged syntactical minutiae underlying packages.","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/parsnip-package.html","id":"author","dir":"Reference","previous_headings":"","what":"Author","title":"parsnip — parsnip-package","text":"Maintainer: Max Kuhn max@posit.co Authors: Davis Vaughan davis@posit.co contributors: Emil Hvitfeldt emil.hvitfeldt@posit.co [contributor] Posit Software, PBC [copyright holder, funder]","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/parsnip_addin.html","id":null,"dir":"Reference","previous_headings":"","what":"Start an RStudio Addin that can write model specifications — parsnip_addin","title":"Start an RStudio Addin that can write model specifications — parsnip_addin","text":"parsnip_addin() starts process RStudio IDE Viewer window allows users write code parsnip model specifications various R packages. new code written current document location cursor.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/parsnip_addin.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Start an RStudio Addin that can write model specifications — parsnip_addin","text":"","code":"parsnip_addin()"},{"path":"https://parsnip.tidymodels.org/dev/reference/parsnip_update.html","id":null,"dir":"Reference","previous_headings":"","what":"Updating a model specification — update.bag_mars","title":"Updating a model specification — update.bag_mars","text":"parameters model specification need modified, update() can used lieu recreating object scratch.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/parsnip_update.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Updating a model specification — update.bag_mars","text":"","code":"# S3 method for bag_mars update( object, parameters = NULL, num_terms = NULL, prod_degree = NULL, prune_method = NULL, fresh = FALSE, ... ) # S3 method for bag_mlp update( object, parameters = NULL, hidden_units = NULL, penalty = NULL, epochs = NULL, fresh = FALSE, ... ) # S3 method for bag_tree update( object, parameters = NULL, cost_complexity = NULL, tree_depth = NULL, min_n = NULL, class_cost = NULL, fresh = FALSE, ... ) # S3 method for bart update( object, parameters = NULL, trees = NULL, prior_terminal_node_coef = NULL, prior_terminal_node_expo = NULL, prior_outcome_range = NULL, fresh = FALSE, ... ) # S3 method for boost_tree update( object, parameters = NULL, mtry = NULL, trees = NULL, min_n = NULL, tree_depth = NULL, learn_rate = NULL, loss_reduction = NULL, sample_size = NULL, stop_iter = NULL, fresh = FALSE, ... ) # S3 method for C5_rules update( object, parameters = NULL, trees = NULL, min_n = NULL, fresh = FALSE, ... ) # S3 method for cubist_rules update( object, parameters = NULL, committees = NULL, neighbors = NULL, max_rules = NULL, fresh = FALSE, ... ) # S3 method for decision_tree update( object, parameters = NULL, cost_complexity = NULL, tree_depth = NULL, min_n = NULL, fresh = FALSE, ... ) # S3 method for discrim_flexible update( object, num_terms = NULL, prod_degree = NULL, prune_method = NULL, fresh = FALSE, ... ) # S3 method for discrim_linear update( object, penalty = NULL, regularization_method = NULL, fresh = FALSE, ... ) # S3 method for discrim_quad update(object, regularization_method = NULL, fresh = FALSE, ...) # S3 method for discrim_regularized update( object, frac_common_cov = NULL, frac_identity = NULL, fresh = FALSE, ... ) # S3 method for gen_additive_mod update( object, select_features = NULL, adjust_deg_free = NULL, parameters = NULL, fresh = FALSE, ... ) # S3 method for linear_reg update( object, parameters = NULL, penalty = NULL, mixture = NULL, fresh = FALSE, ... ) # S3 method for logistic_reg update( object, parameters = NULL, penalty = NULL, mixture = NULL, fresh = FALSE, ... ) # S3 method for mars update( object, parameters = NULL, num_terms = NULL, prod_degree = NULL, prune_method = NULL, fresh = FALSE, ... ) # S3 method for mlp update( object, parameters = NULL, hidden_units = NULL, penalty = NULL, dropout = NULL, epochs = NULL, activation = NULL, learn_rate = NULL, fresh = FALSE, ... ) # S3 method for multinom_reg update( object, parameters = NULL, penalty = NULL, mixture = NULL, fresh = FALSE, ... ) # S3 method for naive_Bayes update(object, smoothness = NULL, Laplace = NULL, fresh = FALSE, ...) # S3 method for nearest_neighbor update( object, parameters = NULL, neighbors = NULL, weight_func = NULL, dist_power = NULL, fresh = FALSE, ... ) # S3 method for pls update( object, parameters = NULL, predictor_prop = NULL, num_comp = NULL, fresh = FALSE, ... ) # S3 method for poisson_reg update( object, parameters = NULL, penalty = NULL, mixture = NULL, fresh = FALSE, ... ) # S3 method for proportional_hazards update( object, parameters = NULL, penalty = NULL, mixture = NULL, fresh = FALSE, ... ) # S3 method for rand_forest update( object, parameters = NULL, mtry = NULL, trees = NULL, min_n = NULL, fresh = FALSE, ... ) # S3 method for rule_fit update( object, parameters = NULL, mtry = NULL, trees = NULL, min_n = NULL, tree_depth = NULL, learn_rate = NULL, loss_reduction = NULL, sample_size = NULL, penalty = NULL, fresh = FALSE, ... ) # S3 method for surv_reg update(object, parameters = NULL, dist = NULL, fresh = FALSE, ...) # S3 method for survival_reg update(object, parameters = NULL, dist = NULL, fresh = FALSE, ...) # S3 method for svm_linear update( object, parameters = NULL, cost = NULL, margin = NULL, fresh = FALSE, ... ) # S3 method for svm_poly update( object, parameters = NULL, cost = NULL, degree = NULL, scale_factor = NULL, margin = NULL, fresh = FALSE, ... ) # S3 method for svm_rbf update( object, parameters = NULL, cost = NULL, rbf_sigma = NULL, margin = NULL, fresh = FALSE, ... )"},{"path":"https://parsnip.tidymodels.org/dev/reference/parsnip_update.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Updating a model specification — update.bag_mars","text":"object model specification. parameters 1-row tibble named list main parameters update. Use either parameters main arguments directly updating. main arguments used, supersede values parameters. Also, using engine arguments object result error. num_terms number features retained final model, including intercept. prod_degree highest possible interaction degree. prune_method pruning method. fresh logical whether arguments modified -place replaced wholesale. ... used update(). hidden_units integer number units hidden model. penalty non-negative number representing amount regularization used engines. epochs integer number training iterations. cost_complexity positive number cost/complexity parameter (.k.. Cp) used CART models (specific engines ). tree_depth integer maximum depth tree. min_n integer minimum number data points node required node split . class_cost non-negative scalar class cost (cost 1 means extra cost). useful first level outcome factor minority class. case, values zero one can used bias second level factor. trees integer number trees contained ensemble. prior_terminal_node_coef coefficient prior probability node terminal node. prior_terminal_node_expo exponent prior probability node terminal node. prior_outcome_range positive value defines width prior predicted outcome within certain range. regression related observed range data; prior number standard deviations Gaussian distribution defined observed range data. classification, defined range +/-3 (assumed logit scale). default value 2. mtry number number (proportion) predictors randomly sampled split creating tree models (specific engines ). learn_rate number rate boosting algorithm adapts iteration--iteration (specific engines ). sometimes referred shrinkage parameter. loss_reduction number reduction loss function required split (specific engines ). sample_size number number (proportion) data exposed fitting routine. xgboost, sampling done iteration C5.0 samples training. stop_iter number iterations without improvement stopping (specific engines ). committees non-negative integer (greater 100) number members ensemble. neighbors integer zero nine number training set instances used adjust model-based prediction. max_rules largest number rules. regularization_method character string type regularized estimation. Possible values : \"diagonal\", \"min_distance\", \"shrink_cov\", \"shrink_mean\" (sparsediscrim engine ). frac_common_cov, frac_identity Numeric values zero one. select_features TRUE FALSE. TRUE, model ability eliminate predictor (via penalization). Increasing adjust_deg_free increase likelihood removing predictors. adjust_deg_free select_features = TRUE, acts multiplier smoothness. Increase beyond 1 produce smoother models. mixture number zero one (inclusive) denoting proportion L1 regularization (.e. lasso) model. mixture = 1 specifies pure lasso model, mixture = 0 specifies ridge regression model, 0 < mixture < 1 specifies elastic net model, interpolating lasso ridge. Available specific engines . dropout number 0 (inclusive) 1 denoting proportion model parameters randomly set zero model training. activation single character string denoting type relationship original predictors hidden unit layer. activation function hidden output layers automatically set either \"linear\" \"softmax\" depending type outcome. Possible values : \"linear\", \"softmax\", \"relu\", \"elu\" smoothness non-negative number representing relative smoothness class boundary. Smaller examples result model flexible boundaries larger values generate class boundaries less adaptable Laplace non-negative value Laplace correction smoothing low-frequency counts. weight_func single character type kernel function used weight distances samples. Valid choices : \"rectangular\", \"triangular\", \"epanechnikov\", \"biweight\", \"triweight\", \"cos\", \"inv\", \"gaussian\", \"rank\", \"optimal\". dist_power single number parameter used calculating Minkowski distance. predictor_prop maximum proportion original predictors can non-zero coefficients PLS component (via regularization). value used PLS components X. num_comp number PLS components retain. dist character string probability distribution outcome. default \"weibull\". cost positive number cost predicting sample within wrong side margin margin positive number epsilon SVM insensitive loss function (regression ) degree positive number polynomial degree. scale_factor positive number polynomial scaling factor. rbf_sigma positive number radial basis function.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/parsnip_update.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Updating a model specification — update.bag_mars","text":"updated model specification.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/parsnip_update.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Updating a model specification — update.bag_mars","text":"","code":"# ------------------------------------------------------------------------------ model <- C5_rules(trees = 10, min_n = 2) model #> ! parsnip could not locate an implementation for `C5_rules` model #> specifications. #> ℹ The parsnip extension package rules implements support for this #> specification. #> ℹ Please install (if needed) and load to continue. #> C5.0 Model Specification (classification) #> #> Main Arguments: #> trees = 10 #> min_n = 2 #> #> Computational engine: C5.0 #> update(model, trees = 1) #> ! parsnip could not locate an implementation for `C5_rules` model #> specifications. #> ℹ The parsnip extension package rules implements support for this #> specification. #> ℹ Please install (if needed) and load to continue. #> C5.0 Model Specification (classification) #> #> Main Arguments: #> trees = 1 #> min_n = 2 #> #> Computational engine: C5.0 #> update(model, trees = 1, fresh = TRUE) #> ! parsnip could not locate an implementation for `C5_rules` model #> specifications. #> ℹ The parsnip extension package rules implements support for this #> specification. #> ℹ Please install (if needed) and load to continue. #> C5.0 Model Specification (classification) #> #> Main Arguments: #> trees = 1 #> #> Computational engine: C5.0 #> # ------------------------------------------------------------------------------ model <- cubist_rules(committees = 10, neighbors = 2) model #> ! parsnip could not locate an implementation for `cubist_rules` model #> specifications. #> ℹ The parsnip extension package rules implements support for this #> specification. #> ℹ Please install (if needed) and load to continue. #> Cubist Model Specification (regression) #> #> Main Arguments: #> committees = 10 #> neighbors = 2 #> #> Computational engine: Cubist #> update(model, committees = 1) #> ! parsnip could not locate an implementation for `cubist_rules` model #> specifications. #> ℹ The parsnip extension package rules implements support for this #> specification. #> ℹ Please install (if needed) and load to continue. #> Cubist Model Specification (regression) #> #> Main Arguments: #> committees = 1 #> neighbors = 2 #> #> Computational engine: Cubist #> update(model, committees = 1, fresh = TRUE) #> ! parsnip could not locate an implementation for `cubist_rules` model #> specifications. #> ℹ The parsnip extension package rules implements support for this #> specification. #> ℹ Please install (if needed) and load to continue. #> Cubist Model Specification (regression) #> #> Main Arguments: #> committees = 1 #> #> Computational engine: Cubist #> model <- pls(predictor_prop = 0.1) model #> ! parsnip could not locate an implementation for `pls` model #> specifications. #> ℹ The parsnip extension package plsmod implements support for this #> specification. #> ℹ Please install (if needed) and load to continue. #> PLS Model Specification (unknown mode) #> #> Main Arguments: #> predictor_prop = 0.1 #> #> Computational engine: mixOmics #> update(model, predictor_prop = 1) #> ! parsnip could not locate an implementation for `pls` model #> specifications. #> ℹ The parsnip extension package plsmod implements support for this #> specification. #> ℹ Please install (if needed) and load to continue. #> PLS Model Specification (unknown mode) #> #> Main Arguments: #> predictor_prop = 1 #> #> Computational engine: mixOmics #> update(model, predictor_prop = 1, fresh = TRUE) #> ! parsnip could not locate an implementation for `pls` model #> specifications. #> ℹ The parsnip extension package plsmod implements support for this #> specification. #> ℹ Please install (if needed) and load to continue. #> PLS Model Specification (unknown mode) #> #> Main Arguments: #> predictor_prop = 1 #> #> Computational engine: mixOmics #> # ------------------------------------------------------------------------------ model <- rule_fit(trees = 10, min_n = 2) model #> ! parsnip could not locate an implementation for `rule_fit` model #> specifications. #> ℹ The parsnip extension packages agua and rules implement support for #> this specification. #> ℹ Please install (if needed) and load to continue. #> RuleFit Model Specification (unknown mode) #> #> Main Arguments: #> trees = 10 #> min_n = 2 #> #> Computational engine: xrf #> update(model, trees = 1) #> ! parsnip could not locate an implementation for `rule_fit` model #> specifications. #> ℹ The parsnip extension packages agua and rules implement support for #> this specification. #> ℹ Please install (if needed) and load to continue. #> RuleFit Model Specification (unknown mode) #> #> Main Arguments: #> trees = 1 #> min_n = 2 #> #> Computational engine: xrf #> update(model, trees = 1, fresh = TRUE) #> ! parsnip could not locate an implementation for `rule_fit` model #> specifications. #> ℹ The parsnip extension packages agua and rules implement support for #> this specification. #> ℹ Please install (if needed) and load to continue. #> RuleFit Model Specification (unknown mode) #> #> Main Arguments: #> trees = 1 #> #> Computational engine: xrf #> model <- boost_tree(mtry = 10, min_n = 3) model #> Boosted Tree Model Specification (unknown mode) #> #> Main Arguments: #> mtry = 10 #> min_n = 3 #> #> Computational engine: xgboost #> update(model, mtry = 1) #> Boosted Tree Model Specification (unknown mode) #> #> Main Arguments: #> mtry = 1 #> min_n = 3 #> #> Computational engine: xgboost #> update(model, mtry = 1, fresh = TRUE) #> Boosted Tree Model Specification (unknown mode) #> #> Main Arguments: #> mtry = 1 #> #> Computational engine: xgboost #> param_values <- tibble::tibble(mtry = 10, tree_depth = 5) model %>% update(param_values) #> Boosted Tree Model Specification (unknown mode) #> #> Main Arguments: #> mtry = 10 #> min_n = 3 #> tree_depth = 5 #> #> Computational engine: xgboost #> model %>% update(param_values, mtry = 3) #> Boosted Tree Model Specification (unknown mode) #> #> Main Arguments: #> mtry = 10 #> min_n = 3 #> tree_depth = 5 #> #> Computational engine: xgboost #> param_values$verbose <- 0 # Fails due to engine argument # model %>% update(param_values) model <- linear_reg(penalty = 10, mixture = 0.1) model #> Linear Regression Model Specification (regression) #> #> Main Arguments: #> penalty = 10 #> mixture = 0.1 #> #> Computational engine: lm #> update(model, penalty = 1) #> Linear Regression Model Specification (regression) #> #> Main Arguments: #> penalty = 1 #> mixture = 0.1 #> #> Computational engine: lm #> update(model, penalty = 1, fresh = TRUE) #> Linear Regression Model Specification (regression) #> #> Main Arguments: #> penalty = 1 #> #> Computational engine: lm #>"},{"path":"https://parsnip.tidymodels.org/dev/reference/pls.html","id":null,"dir":"Reference","previous_headings":"","what":"Partial least squares (PLS) — pls","title":"Partial least squares (PLS) — pls","text":"pls() defines partial least squares model uses latent variables model data. similar supervised version principal component. function can fit classification regression models. different ways fit model, method estimation chosen setting model engine. engine-specific pages model listed . mixOmics¹² information parsnip used modeling https://www.tidymodels.org/.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/pls.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Partial least squares (PLS) — pls","text":"","code":"pls( mode = \"unknown\", predictor_prop = NULL, num_comp = NULL, engine = \"mixOmics\" )"},{"path":"https://parsnip.tidymodels.org/dev/reference/pls.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Partial least squares (PLS) — pls","text":"mode single character string prediction outcome mode. Possible values model \"unknown\", \"regression\", \"classification\". predictor_prop maximum proportion original predictors can non-zero coefficients PLS component (via regularization). value used PLS components X. num_comp number PLS components retain. engine single character string specifying computational engine use fitting.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/pls.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Partial least squares (PLS) — pls","text":"function defines type model fit. engine specified, method fit model also defined. See set_engine() setting engine, including set engine arguments. model trained fit fit() function used data. arguments function mode engine captured quosures. pass values programmatically, use injection operator like :","code":"value <- 1 pls(argument = !!value)"},{"path":"https://parsnip.tidymodels.org/dev/reference/pls.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Partial least squares (PLS) — pls","text":"https://www.tidymodels.org, Tidy Modeling R, searchable table parsnip models","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/poisson_reg.html","id":null,"dir":"Reference","previous_headings":"","what":"Poisson regression models — poisson_reg","title":"Poisson regression models — poisson_reg","text":"poisson_reg() defines generalized linear model count data follow Poisson distribution. function can fit regression models. different ways fit model, method estimation chosen setting model engine. engine-specific pages model listed . glm¹² gee² glmer² glmnet² h2o² hurdle² stan² stan_glmer² zeroinfl² information parsnip used modeling https://www.tidymodels.org/.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/poisson_reg.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Poisson regression models — poisson_reg","text":"","code":"poisson_reg( mode = \"regression\", penalty = NULL, mixture = NULL, engine = \"glm\" )"},{"path":"https://parsnip.tidymodels.org/dev/reference/poisson_reg.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Poisson regression models — poisson_reg","text":"mode single character string type model. possible value model \"regression\". penalty non-negative number representing total amount regularization (glmnet ). mixture number zero one (inclusive) giving proportion L1 regularization (.e. lasso) model. mixture = 1 specifies pure lasso model, mixture = 0 specifies ridge regression model, 0 < mixture < 1 specifies elastic net model, interpolating lasso ridge. Available glmnet spark . engine single character string specifying computational engine use fitting.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/poisson_reg.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Poisson regression models — poisson_reg","text":"function defines type model fit. engine specified, method fit model also defined. See set_engine() setting engine, including set engine arguments. model trained fit fit() function used data. arguments function mode engine captured quosures. pass values programmatically, use injection operator like :","code":"value <- 1 poisson_reg(argument = !!value)"},{"path":"https://parsnip.tidymodels.org/dev/reference/poisson_reg.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Poisson regression models — poisson_reg","text":"https://www.tidymodels.org, Tidy Modeling R, searchable table parsnip models","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/predict.model_fit.html","id":null,"dir":"Reference","previous_headings":"","what":"Model predictions — predict.model_fit","title":"Model predictions — predict.model_fit","text":"Apply model create different types predictions. predict() can used types models uses \"type\" argument specificity.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/predict.model_fit.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Model predictions — predict.model_fit","text":"","code":"# S3 method for model_fit predict(object, new_data, type = NULL, opts = list(), ...) # S3 method for model_fit predict_raw(object, new_data, opts = list(), ...) predict_raw(object, ...)"},{"path":"https://parsnip.tidymodels.org/dev/reference/predict.model_fit.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Model predictions — predict.model_fit","text":"object object class model_fit. new_data rectangular data object, data frame. type single character value NULL. Possible values \"numeric\", \"class\", \"prob\", \"conf_int\", \"pred_int\", \"quantile\", \"time\", \"hazard\", \"survival\", \"raw\". NULL, predict() choose appropriate value based model's mode. opts list optional arguments underlying predict function used type = \"raw\". list include options model object new data predicted. ... Additional parsnip-related options, depending value type. Arguments underlying model's prediction function passed (use opts argument instead). Possible arguments : interval: type equal \"survival\" \"quantile\", interval estimates added, available? Options \"none\" \"confidence\". level: type equal \"conf_int\", \"pred_int\", \"survival\", parameter tail area intervals (e.g. confidence level confidence intervals). Default value 0.95. std_error: type equal \"conf_int\" \"pred_int\", add standard error fit prediction (scale linear predictors). Default value FALSE. quantile: type equal quantile, quantiles distribution. Default (1:9)/10. eval_time: type equal \"survival\" \"hazard\", time points survival probability hazard estimated.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/predict.model_fit.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Model predictions — predict.model_fit","text":"exception type = \"raw\", result predict.model_fit() tibble many rows rows new_data standardized column names, see : type = \"numeric\", tibble .pred column single outcome .pred_Yname columns multivariate outcome. type = \"class\", tibble .pred_class column. type = \"prob\", tibble .pred_classlevel columns. type = \"conf_int\" type = \"pred_int\", tibble .pred_lower .pred_upper columns attribute confidence level. case intervals can produces class probabilities (non-scalar outputs), columns named .pred_lower_classlevel . type = \"quantile\", tibble .pred column, list-column. list element contains tibble columns .pred .quantile (perhaps columns). type = \"time\", tibble .pred_time column. type = \"survival\", tibble .pred column, list-column. list element contains tibble columns .eval_time .pred_survival (perhaps columns). type = \"hazard\", tibble .pred column, list-column. list element contains tibble columns .eval_time .pred_hazard (perhaps columns). Using type = \"raw\" predict.model_fit() return unadulterated results prediction function. case Spark-based models, since table columns contain dots, convention used except 1) dots appear names 2) vectors never returned type-specific prediction functions. model fit failed error captured, predict() function return structure filled missing values. currently work multivariate models.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/predict.model_fit.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Model predictions — predict.model_fit","text":"type = NULL, predict() uses type = \"numeric\" regression models, type = \"class\" classification, type = \"time\" censored regression.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/predict.model_fit.html","id":"interval-predictions","dir":"Reference","previous_headings":"","what":"Interval predictions","title":"Model predictions — predict.model_fit","text":"using type = \"conf_int\" type = \"pred_int\", options level std_error can used. latter logical extra column standard error values (available).","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/predict.model_fit.html","id":"censored-regression-predictions","dir":"Reference","previous_headings":"","what":"Censored regression predictions","title":"Model predictions — predict.model_fit","text":"censored regression, numeric vector eval_time required survival hazard probabilities requested. time values required unique, finite, non-missing, non-negative. predict() functions adjust values fit specification removing offending points (warning). predict.model_fit() require outcome present. performance metrics predicted survival probability, inverse probability censoring weights (IPCW) required (see tidymodels.org reference ). require outcome thus returned predict(). can added via augment.model_fit() new_data contains column outcome Surv object. Also, type = \"linear_pred\", censored regression models default formatted linear predictor increases time. may opposite sign underlying model's predict() method produces. Set increasing = FALSE suppress behavior.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/predict.model_fit.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Model predictions — predict.model_fit","text":"https://www.tidymodels.org/learn/statistics/survival-metrics/","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/predict.model_fit.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Model predictions — predict.model_fit","text":"","code":"library(dplyr) lm_model <- linear_reg() %>% set_engine(\"lm\") %>% fit(mpg ~ ., data = mtcars %>% dplyr::slice(11:32)) pred_cars <- mtcars %>% dplyr::slice(1:10) %>% dplyr::select(-mpg) predict(lm_model, pred_cars) #> # A tibble: 10 × 1 #> .pred #> #> 1 23.4 #> 2 23.3 #> 3 27.6 #> 4 21.5 #> 5 17.6 #> 6 21.6 #> 7 13.9 #> 8 21.7 #> 9 25.6 #> 10 17.1 predict( lm_model, pred_cars, type = \"conf_int\", level = 0.90 ) #> # A tibble: 10 × 2 #> .pred_lower .pred_upper #> #> 1 17.9 29.0 #> 2 18.1 28.5 #> 3 24.0 31.3 #> 4 17.5 25.6 #> 5 14.3 20.8 #> 6 17.0 26.2 #> 7 9.65 18.2 #> 8 16.2 27.2 #> 9 14.2 37.0 #> 10 11.5 22.7 predict( lm_model, pred_cars, type = \"raw\", opts = list(type = \"terms\") ) #> cyl disp hp drat #> Mazda RX4 -0.001433177 -0.8113275 0.6303467 -0.06120265 #> Mazda RX4 Wag -0.001433177 -0.8113275 0.6303467 -0.06120265 #> Datsun 710 -0.009315653 -1.3336453 0.8557288 -0.05014798 #> Hornet 4 Drive -0.001433177 0.1730406 0.6303467 0.12009386 #> Hornet Sportabout 0.006449298 1.1975870 -0.2314083 0.10461733 #> Valiant -0.001433177 -0.1584303 0.6966356 0.19084372 #> Duster 360 0.006449298 1.1975870 -1.1594522 0.09135173 #> Merc 240D -0.009315653 -0.9449204 1.2667197 -0.01477305 #> Merc 230 -0.009315653 -1.0041833 0.8292133 -0.06562451 #> Merc 280 -0.001433177 -0.7349888 0.4579957 -0.06562451 #> wt qsec vs am gear #> Mazda RX4 2.4139815 -1.567729 0.2006406 2.88774 0.02512680 #> Mazda RX4 Wag 1.4488706 -0.736286 0.2006406 2.88774 0.02512680 #> Datsun 710 3.5494061 1.624418 -0.3511210 2.88774 0.02512680 #> Hornet 4 Drive 0.1620561 2.856736 -0.3511210 -2.40645 -0.06700481 #> Hornet Sportabout -0.6895124 -0.736286 0.2006406 -2.40645 -0.06700481 #> Valiant -0.7652074 4.014817 -0.3511210 -2.40645 -0.06700481 #> Duster 360 -1.1815297 -2.488255 0.2006406 -2.40645 -0.06700481 #> Merc 240D 0.2566748 3.688179 -0.3511210 -2.40645 0.02512680 #> Merc 230 0.4080647 7.993866 -0.3511210 -2.40645 0.02512680 #> Merc 280 -0.6895124 1.164155 -0.3511210 -2.40645 0.02512680 #> carb #> Mazda RX4 -0.2497240 #> Mazda RX4 Wag -0.2497240 #> Datsun 710 0.4668753 #> Hornet 4 Drive 0.4668753 #> Hornet Sportabout 0.2280089 #> Valiant 0.4668753 #> Duster 360 -0.2497240 #> Merc 240D 0.2280089 #> Merc 230 0.2280089 #> Merc 280 -0.2497240 #> attr(,\"constant\") #> [1] 19.96364"},{"path":"https://parsnip.tidymodels.org/dev/reference/prepare_data.html","id":null,"dir":"Reference","previous_headings":"","what":"Prepare data based on parsnip encoding information — prepare_data","title":"Prepare data based on parsnip encoding information — prepare_data","text":"Prepare data based parsnip encoding information","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/prepare_data.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Prepare data based on parsnip encoding information — prepare_data","text":"","code":"prepare_data(object, new_data)"},{"path":"https://parsnip.tidymodels.org/dev/reference/prepare_data.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Prepare data based on parsnip encoding information — prepare_data","text":"object parsnip model object new_data data frame","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/prepare_data.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Prepare data based on parsnip encoding information — prepare_data","text":"data frame matrix","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/proportional_hazards.html","id":null,"dir":"Reference","previous_headings":"","what":"Proportional hazards regression — proportional_hazards","title":"Proportional hazards regression — proportional_hazards","text":"proportional_hazards() defines model hazard function multiplicative function covariates times baseline hazard. function can fit censored regression models. different ways fit model, method estimation chosen setting model engine. engine-specific pages model listed . survival¹² glmnet² information parsnip used modeling https://www.tidymodels.org/.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/proportional_hazards.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Proportional hazards regression — proportional_hazards","text":"","code":"proportional_hazards( mode = \"censored regression\", engine = \"survival\", penalty = NULL, mixture = NULL )"},{"path":"https://parsnip.tidymodels.org/dev/reference/proportional_hazards.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Proportional hazards regression — proportional_hazards","text":"mode single character string prediction outcome mode. possible value model \"censored regression\". engine single character string specifying computational engine use fitting. penalty non-negative number representing total amount regularization (specific engines ). mixture number zero one (inclusive) denoting proportion L1 regularization (.e. lasso) model. mixture = 1 specifies pure lasso model, mixture = 0 specifies ridge regression model, 0 < mixture < 1 specifies elastic net model, interpolating lasso ridge. Available specific engines .","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/proportional_hazards.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Proportional hazards regression — proportional_hazards","text":"function defines type model fit. engine specified, method fit model also defined. See set_engine() setting engine, including set engine arguments. model trained fit fit() function used data. arguments function mode engine captured quosures. pass values programmatically, use injection operator like : Since survival models typically involve censoring (require use survival::Surv() objects), fit.model_spec() function require survival model specified via formula interface. Proportional hazards models include Cox model.","code":"value <- 1 proportional_hazards(argument = !!value)"},{"path":"https://parsnip.tidymodels.org/dev/reference/proportional_hazards.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Proportional hazards regression — proportional_hazards","text":"https://www.tidymodels.org, Tidy Modeling R, searchable table parsnip models","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/proportional_hazards.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Proportional hazards regression — proportional_hazards","text":"","code":"show_engines(\"proportional_hazards\") #> # A tibble: 0 × 2 #> # ℹ 2 variables: engine , mode proportional_hazards(mode = \"censored regression\") #> ! parsnip could not locate an implementation for `proportional_hazards` #> censored regression model specifications. #> ℹ The parsnip extension package censored implements support for this #> specification. #> ℹ Please install (if needed) and load to continue. #> Proportional Hazards Model Specification (censored regression) #> #> Computational engine: survival #>"},{"path":"https://parsnip.tidymodels.org/dev/reference/rand_forest.html","id":null,"dir":"Reference","previous_headings":"","what":"Random forest — rand_forest","title":"Random forest — rand_forest","text":"rand_forest() defines model creates large number decision trees, independent others. final prediction uses predictions individual trees combines . function can fit classification, regression, censored regression models. different ways fit model, method estimation chosen setting model engine. engine-specific pages model listed . ranger¹ aorsf² h2o² partykit² randomForest spark information parsnip used modeling https://www.tidymodels.org/.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/rand_forest.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Random forest — rand_forest","text":"","code":"rand_forest( mode = \"unknown\", engine = \"ranger\", mtry = NULL, trees = NULL, min_n = NULL )"},{"path":"https://parsnip.tidymodels.org/dev/reference/rand_forest.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Random forest — rand_forest","text":"mode single character string prediction outcome mode. Possible values model \"unknown\", \"regression\", \"classification\", \"censored regression\". engine single character string specifying computational engine use fitting. mtry integer number predictors randomly sampled split creating tree models. trees integer number trees contained ensemble. min_n integer minimum number data points node required node split .","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/rand_forest.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Random forest — rand_forest","text":"function defines type model fit. engine specified, method fit model also defined. See set_engine() setting engine, including set engine arguments. model trained fit fit() function used data. arguments function mode engine captured quosures. pass values programmatically, use injection operator like :","code":"value <- 1 rand_forest(argument = !!value)"},{"path":"https://parsnip.tidymodels.org/dev/reference/rand_forest.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Random forest — rand_forest","text":"https://www.tidymodels.org, Tidy Modeling R, searchable table parsnip models","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/rand_forest.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Random forest — rand_forest","text":"","code":"show_engines(\"rand_forest\") #> # A tibble: 6 × 2 #> engine mode #> #> 1 ranger classification #> 2 ranger regression #> 3 randomForest classification #> 4 randomForest regression #> 5 spark classification #> 6 spark regression rand_forest(mode = \"classification\", trees = 2000) #> Random Forest Model Specification (classification) #> #> Main Arguments: #> trees = 2000 #> #> Computational engine: ranger #>"},{"path":"https://parsnip.tidymodels.org/dev/reference/reexports.html","id":null,"dir":"Reference","previous_headings":"","what":"Objects exported from other packages — reexports","title":"Objects exported from other packages — reexports","text":"objects imported packages. Follow links see documentation. generics augment, fit, fit_xy, glance, required_pkgs, tidy, varying_args ggplot2 autoplot hardhat extract_fit_engine, extract_parameter_dials, extract_parameter_set_dials, extract_spec_parsnip, frequency_weights, importance_weights, tune magrittr %>%","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/repair_call.html","id":null,"dir":"Reference","previous_headings":"","what":"Repair a model call object — repair_call","title":"Repair a model call object — repair_call","text":"user passes formula fit() underlying model function uses formula, call object produced fit() may usable functions. example, arguments may still quosures data portion call correspond original data.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/repair_call.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Repair a model call object — repair_call","text":"","code":"repair_call(x, data)"},{"path":"https://parsnip.tidymodels.org/dev/reference/repair_call.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Repair a model call object — repair_call","text":"x fitted parsnip model. error occur underlying model call element. data data object relevant call. cases, data frame given parsnip model fit (.e., training set data). name data object inserted call.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/repair_call.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Repair a model call object — repair_call","text":"modified parsnip fitted model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/repair_call.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Repair a model call object — repair_call","text":"repair_call() call can adjust model objects call usable functions methods.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/repair_call.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Repair a model call object — repair_call","text":"","code":"fitted_model <- linear_reg() %>% set_engine(\"lm\", model = TRUE) %>% fit(mpg ~ ., data = mtcars) # In this call, note that `data` is not `mtcars` and the `model = ~TRUE` # indicates that the `model` argument is an `rlang` quosure. fitted_model$fit$call #> stats::lm(formula = mpg ~ ., data = data, model = ~TRUE) # All better: repair_call(fitted_model, mtcars)$fit$call #> stats::lm(formula = mpg ~ ., data = mtcars, model = TRUE)"},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/req_pkgs.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Determine required packages for a model — req_pkgs","text":"","code":"req_pkgs(x, ...)"},{"path":"https://parsnip.tidymodels.org/dev/reference/req_pkgs.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Determine required packages for a model — req_pkgs","text":"x model specification fit. ... used.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/req_pkgs.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Determine required packages for a model — req_pkgs","text":"character string package names ().","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/req_pkgs.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Determine required packages for a model — req_pkgs","text":"function deprecated favor required_pkgs().","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/required_pkgs.model_spec.html","id":null,"dir":"Reference","previous_headings":"","what":"Determine required packages for a model — required_pkgs.model_spec","title":"Determine required packages for a model — required_pkgs.model_spec","text":"Determine required packages model","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/required_pkgs.model_spec.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Determine required packages for a model — required_pkgs.model_spec","text":"","code":"# S3 method for model_spec required_pkgs(x, infra = TRUE, ...) # S3 method for model_fit required_pkgs(x, infra = TRUE, ...)"},{"path":"https://parsnip.tidymodels.org/dev/reference/required_pkgs.model_spec.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Determine required packages for a model — required_pkgs.model_spec","text":"x model specification fit. infra parsnip included result? ... used.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/required_pkgs.model_spec.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Determine required packages for a model — required_pkgs.model_spec","text":"character vector","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/required_pkgs.model_spec.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Determine required packages for a model — required_pkgs.model_spec","text":"","code":"should_fail <- try(required_pkgs(linear_reg(engine = NULL)), silent = TRUE) should_fail #> [1] \"Error in required_pkgs(linear_reg(engine = NULL)) : Please set an engine.\\n\" #> attr(,\"class\") #> [1] \"try-error\" #> attr(,\"condition\") #> #> Error in `required_pkgs()`: #> ! Please set an engine. #> --- #> Backtrace: #> ▆ #> 1. └─pkgdown::build_site_github_pages(new_process = FALSE, install = FALSE) #> 2. └─pkgdown::build_site(...) #> 3. └─pkgdown:::build_site_local(...) #> 4. └─pkgdown::build_reference(...) #> 5. └─purrr::map(...) #> 6. └─purrr:::map_(\"list\", .x, .f, ..., .progress = .progress) #> 7. ├─purrr:::with_indexed_errors(...) #> 8. │ └─base::withCallingHandlers(...) #> 9. ├─purrr:::call_with_cleanup(...) #> 10. └─pkgdown (local) .f(.x[[i]], ...) #> 11. ├─base::withCallingHandlers(...) #> 12. └─pkgdown:::data_reference_topic(...) #> 13. └─pkgdown:::run_examples(...) #> 14. └─pkgdown:::highlight_examples(code, topic, env = env) #> 15. └─downlit::evaluate_and_highlight(...) #> 16. └─evaluate::evaluate(code, child_env(env), new_device = TRUE, output_handler = output_handler) #> 17. └─evaluate:::evaluate_call(...) #> 18. ├─evaluate (local) timing_fn(...) #> 19. ├─evaluate (local) handle(...) #> 20. │ └─base::try(f, silent = TRUE) #> 21. │ └─base::tryCatch(...) #> 22. │ └─base (local) tryCatchList(expr, classes, parentenv, handlers) #> 23. │ └─base (local) tryCatchOne(expr, names, parentenv, handlers[[1L]]) #> 24. │ └─base (local) doTryCatch(return(expr), name, parentenv, handler) #> 25. ├─base::withCallingHandlers(...) #> 26. ├─base::withVisible(...) #> 27. └─evaluate:::eval_with_user_handlers(expr, envir, enclos, user_handlers) #> 28. └─base::eval(expr, envir, enclos) #> 29. └─base::eval(expr, envir, enclos) #> 30. ├─base::try(required_pkgs(linear_reg(engine = NULL)), silent = TRUE) #> 31. │ └─base::tryCatch(...) #> 32. │ └─base (local) tryCatchList(expr, classes, parentenv, handlers) #> 33. │ └─base (local) tryCatchOne(expr, names, parentenv, handlers[[1L]]) #> 34. │ └─base (local) doTryCatch(return(expr), name, parentenv, handler) #> 35. ├─generics::required_pkgs(linear_reg(engine = NULL)) #> 36. └─parsnip:::required_pkgs.model_spec(linear_reg(engine = NULL)) linear_reg() %>% set_engine(\"glmnet\") %>% required_pkgs() #> [1] \"parsnip\" \"glmnet\" linear_reg() %>% set_engine(\"glmnet\") %>% required_pkgs(infra = FALSE) #> [1] \"glmnet\" linear_reg() %>% set_engine(\"lm\") %>% fit(mpg ~ ., data = mtcars) %>% required_pkgs() #> [1] \"parsnip\" \"stats\""},{"path":"https://parsnip.tidymodels.org/dev/reference/rpart_train.html","id":null,"dir":"Reference","previous_headings":"","what":"Decision trees via rpart — rpart_train","title":"Decision trees via rpart — rpart_train","text":"rpart_train wrapper rpart() tree-based models model arguments main function.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/rpart_train.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Decision trees via rpart — rpart_train","text":"","code":"rpart_train( formula, data, weights = NULL, cp = 0.01, minsplit = 20, maxdepth = 30, ... )"},{"path":"https://parsnip.tidymodels.org/dev/reference/rpart_train.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Decision trees via rpart — rpart_train","text":"formula model formula. data data frame. weights Optional case weights. cp non-negative number complexity parameter. split decrease overall lack fit factor cp attempted. instance, anova splitting, means overall R-squared must increase cp step. main role parameter save computing time pruning splits obviously worthwhile. Essentially, user informs program split improve fit cp likely pruned cross-validation, hence program need pursue . minsplit integer minimum number observations must exist node order split attempted. maxdepth integer maximum depth node final tree, root node counted depth 0. Values greater 30 rpart give nonsense results 32-bit machines. function truncate maxdepth 30 cases. ... arguments pass either rpart rpart.control.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/rpart_train.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Decision trees via rpart — rpart_train","text":"fitted rpart model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/rule_fit.html","id":null,"dir":"Reference","previous_headings":"","what":"RuleFit models — rule_fit","title":"RuleFit models — rule_fit","text":"rule_fit() defines model derives simple feature rules tree ensemble uses features regularized model. function can fit classification regression models. different ways fit model, method estimation chosen setting model engine. engine-specific pages model listed . xrf¹² h2o² information parsnip used modeling https://www.tidymodels.org/.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/rule_fit.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"RuleFit models — rule_fit","text":"","code":"rule_fit( mode = \"unknown\", mtry = NULL, trees = NULL, min_n = NULL, tree_depth = NULL, learn_rate = NULL, loss_reduction = NULL, sample_size = NULL, stop_iter = NULL, penalty = NULL, engine = \"xrf\" )"},{"path":"https://parsnip.tidymodels.org/dev/reference/rule_fit.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"RuleFit models — rule_fit","text":"mode single character string prediction outcome mode. Possible values model \"unknown\", \"regression\", \"classification\". mtry number number (proportion) predictors randomly sampled split creating tree models (specific engines ). trees integer number trees contained ensemble. min_n integer minimum number data points node required node split . tree_depth integer maximum depth tree (.e. number splits) (specific engines ). learn_rate number rate boosting algorithm adapts iteration--iteration (specific engines ). sometimes referred shrinkage parameter. loss_reduction number reduction loss function required split (specific engines ). sample_size number number (proportion) data exposed fitting routine. xgboost, sampling done iteration C5.0 samples training. stop_iter number iterations without improvement stopping (specific engines ). penalty L1 regularization parameter. engine single character string specifying computational engine use fitting.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/rule_fit.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"RuleFit models — rule_fit","text":"RuleFit model creates regression model rules two stages. first stage uses tree-based model used generate set rules can filtered, modified, simplified. rules added predictors regularized generalized linear model can also conduct feature selection model training. function defines type model fit. engine specified, method fit model also defined. See set_engine() setting engine, including set engine arguments. model trained fit fit() function used data. arguments function mode engine captured quosures. pass values programmatically, use injection operator like :","code":"value <- 1 rule_fit(argument = !!value)"},{"path":"https://parsnip.tidymodels.org/dev/reference/rule_fit.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"RuleFit models — rule_fit","text":"Friedman, J. H., Popescu, B. E. (2008). \"Predictive learning via rule ensembles.\" Annals Applied Statistics, 2(3), 916-954. https://www.tidymodels.org, Tidy Modeling R, searchable table parsnip models","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/rule_fit.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"RuleFit models — rule_fit","text":"","code":"show_engines(\"rule_fit\") #> # A tibble: 0 × 2 #> # ℹ 2 variables: engine , mode rule_fit() #> ! parsnip could not locate an implementation for `rule_fit` model #> specifications. #> ℹ The parsnip extension packages agua and rules implement support for #> this specification. #> ℹ Please install (if needed) and load to continue. #> RuleFit Model Specification (unknown mode) #> #> Computational engine: xrf #>"},{"path":"https://parsnip.tidymodels.org/dev/reference/set_args.html","id":null,"dir":"Reference","previous_headings":"","what":"Change elements of a model specification — set_args","title":"Change elements of a model specification — set_args","text":"set_args() can used modify arguments model specification set_mode() used change model's mode.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/set_args.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Change elements of a model specification — set_args","text":"","code":"set_args(object, ...) set_mode(object, mode)"},{"path":"https://parsnip.tidymodels.org/dev/reference/set_args.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Change elements of a model specification — set_args","text":"object model specification. ... One named model arguments. mode character string model type (e.g. \"classification\" \"regression\")","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/set_args.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Change elements of a model specification — set_args","text":"updated model object.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/set_args.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Change elements of a model specification — set_args","text":"set_args() replace existing values arguments.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/set_args.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Change elements of a model specification — set_args","text":"","code":"rand_forest() #> Random Forest Model Specification (unknown mode) #> #> Computational engine: ranger #> rand_forest() %>% set_args(mtry = 3, importance = TRUE) %>% set_mode(\"regression\") #> Random Forest Model Specification (regression) #> #> Main Arguments: #> mtry = 3 #> #> Engine-Specific Arguments: #> importance = TRUE #> #> Computational engine: ranger #>"},{"path":"https://parsnip.tidymodels.org/dev/reference/set_engine.html","id":null,"dir":"Reference","previous_headings":"","what":"Declare a computational engine and specific arguments — set_engine","title":"Declare a computational engine and specific arguments — set_engine","text":"set_engine() used specify package system used fit model, along arguments specific software.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/set_engine.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Declare a computational engine and specific arguments — set_engine","text":"","code":"set_engine(object, engine, ...)"},{"path":"https://parsnip.tidymodels.org/dev/reference/set_engine.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Declare a computational engine and specific arguments — set_engine","text":"object model specification. engine character string software used fit model. highly dependent type model (e.g. linear regression, random forest, etc.). ... optional arguments associated chosen computational engine. captured quosures can tuned tune().","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/set_engine.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Declare a computational engine and specific arguments — set_engine","text":"updated model specification.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/set_engine.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Declare a computational engine and specific arguments — set_engine","text":"parsnip, model type differentiates basic modeling approaches, random forests, logistic regression, linear support vector machines, etc., mode denotes kind modeling context used (commonly, classification regression), computational engine indicates model fit, specific R package implementation even methods outside R like Keras Stan. Use show_engines() get list possible engines model interest. Modeling functions parsnip separate model arguments two categories: Main arguments commonly used tend available across engines. names standardized work different engines consistent way, can use parsnip main argument trees, instead heterogeneous arguments parameter ranger randomForest packages (num.trees ntree, respectively). Set model type function, like rand_forest(trees = 2000). Engine arguments either specific particular engine used rarely; change argument names underlying engine. ... argument set_engine() allows engine-specific argument passed directly engine fitting function, like set_engine(\"ranger\", importance = \"permutation\").","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/set_engine.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Declare a computational engine and specific arguments — set_engine","text":"","code":"# First, set main arguments using the standardized names logistic_reg(penalty = 0.01, mixture = 1/3) %>% # Now specify how you want to fit the model with another argument set_engine(\"glmnet\", nlambda = 10) %>% translate() #> Logistic Regression Model Specification (classification) #> #> Main Arguments: #> penalty = 0.01 #> mixture = 1/3 #> #> Engine-Specific Arguments: #> nlambda = 10 #> #> Computational engine: glmnet #> #> Model fit template: #> glmnet::glmnet(x = missing_arg(), y = missing_arg(), weights = missing_arg(), #> alpha = 1/3, nlambda = 10, family = \"binomial\") # Many models have possible engine-specific arguments decision_tree(tree_depth = 5) %>% set_engine(\"rpart\", parms = list(prior = c(.65,.35))) %>% set_mode(\"classification\") %>% translate() #> Decision Tree Model Specification (classification) #> #> Main Arguments: #> tree_depth = 5 #> #> Engine-Specific Arguments: #> parms = list(prior = c(0.65, 0.35)) #> #> Computational engine: rpart #> #> Model fit template: #> rpart::rpart(formula = missing_arg(), data = missing_arg(), weights = missing_arg(), #> maxdepth = 5, parms = list(prior = c(0.65, 0.35)))"},{"path":"https://parsnip.tidymodels.org/dev/reference/set_new_model.html","id":null,"dir":"Reference","previous_headings":"","what":"Tools to Register Models — set_new_model","title":"Tools to Register Models — set_new_model","text":"functions similar constructors can used validate conflicts underlying model structures used package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/set_new_model.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Tools to Register Models — set_new_model","text":"","code":"set_new_model(model) set_model_mode(model, mode) set_model_engine(model, mode, eng) set_model_arg(model, eng, parsnip, original, func, has_submodel) set_dependency(model, eng, pkg = \"parsnip\", mode = NULL) get_dependency(model) set_fit(model, mode, eng, value) get_fit(model) set_pred(model, mode, eng, type, value) get_pred_type(model, type) show_model_info(model) pred_value_template(pre = NULL, post = NULL, func, ...) set_encoding(model, mode, eng, options) get_encoding(model)"},{"path":"https://parsnip.tidymodels.org/dev/reference/set_new_model.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Tools to Register Models — set_new_model","text":"model single character string model type (e.g. \"rand_forest\", etc). mode single character string model mode (e.g. \"regression\"). eng single character string model engine. parsnip single character string \"harmonized\" argument name parsnip exposes. original single character string argument name underlying model function uses. func named character vector describes call function. func elements pkg fun. former optional recommended latter required. example, c(pkg = \"stats\", fun = \"lm\") used invoke usual linear regression function. cases, helpful use c(fun = \"predict\") using package's predict method. has_submodel single logical whether argument can make predictions multiple submodels . pkg options character string package name. value list conforms fit_obj pred_obj description , depending context. type single character value type prediction. Possible values : class, conf_int, numeric, pred_int, prob, quantile, raw. pre, post Optional functions pre- post-processing prediction results. ... Optional arguments passed args slot prediction objects. options list options engine-specific preprocessing encodings. See Details .","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/set_new_model.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Tools to Register Models — set_new_model","text":"functions available users add models engines (package otherwise) can accessed using parsnip. thoroughly documented package web site (see references ). short, parsnip stores environment object contains information code models used (e.g. fitting, predicting, etc). functions can used add models environment well helper functions can used makes sure model data right format. check_model_exists() checks model value ensures model already registered. check_model_doesnt_exist() checks model value also checks see novel environment. options engine-specific encodings dictate predictors handled. options ensure data parsnip gives underlying model allows model fit similar possible produced directly. example, fit() used fit model formula interface, typically predictor preprocessing must conducted. glmnet good example . four options can used encodings: predictor_indicators describes whether create indicator/dummy variables factor predictors. three options: \"none\" (expand factor predictors), \"traditional\" (apply standard model.matrix() encodings), \"one_hot\" (create complete set including baseline level factors). encoding affects cases fit.model_spec() used underlying model x/y interface. Another option compute_intercept; controls whether model.matrix() include intercept formula. affects inclusion intercept column. intercept, model.matrix() computes dummy variables one factor levels. Without intercept, model.matrix() computes full set indicators first factor variable, incomplete set remainder. Next, option remove_intercept remove intercept column model.matrix() finished. can useful model function (e.g. lm()) automatically generates intercept. Finally, allow_sparse_x specifies whether model function can natively accommodate sparse matrix representation predictors fitting tuning.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/set_new_model.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Tools to Register Models — set_new_model","text":"\"build parsnip model\" https://www.tidymodels.org/learn/develop/models/","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/set_new_model.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Tools to Register Models — set_new_model","text":"","code":"# set_new_model(\"shallow_learning_model\") # Show the information about a model: show_model_info(\"rand_forest\") #> Information for `rand_forest` #> modes: unknown, classification, regression, censored regression #> #> engines: #> classification: randomForest, ranger¹, spark #> regression: randomForest, ranger¹, spark #> #> ¹The model can use case weights. #> #> arguments: #> ranger: #> mtry --> mtry #> trees --> num.trees #> min_n --> min.node.size #> randomForest: #> mtry --> mtry #> trees --> ntree #> min_n --> nodesize #> spark: #> mtry --> feature_subset_strategy #> trees --> num_trees #> min_n --> min_instances_per_node #> #> fit modules: #> engine mode #> ranger classification #> ranger regression #> randomForest classification #> randomForest regression #> spark classification #> spark regression #> #> prediction modules: #> mode engine methods #> classification randomForest class, prob, raw #> classification ranger class, conf_int, prob, raw #> classification spark class, prob #> regression randomForest numeric, raw #> regression ranger conf_int, numeric, raw #> regression spark numeric #>"},{"path":"https://parsnip.tidymodels.org/dev/reference/set_tf_seed.html","id":null,"dir":"Reference","previous_headings":"","what":"Set seed in R and TensorFlow at the same time — set_tf_seed","title":"Set seed in R and TensorFlow at the same time — set_tf_seed","text":"Keras models requires seeds set R TensorFlow achieve reproducible results. function sets seeds time using version appropriate functions.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/set_tf_seed.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Set seed in R and TensorFlow at the same time — set_tf_seed","text":"","code":"set_tf_seed(seed)"},{"path":"https://parsnip.tidymodels.org/dev/reference/set_tf_seed.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Set seed in R and TensorFlow at the same time — set_tf_seed","text":"seed 1 integer value.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/show_call.html","id":null,"dir":"Reference","previous_headings":"","what":"Print the model call — show_call","title":"Print the model call — show_call","text":"Print model call","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/show_call.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Print the model call — show_call","text":"","code":"show_call(object)"},{"path":"https://parsnip.tidymodels.org/dev/reference/show_call.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Print the model call — show_call","text":"object \"model_spec\" object.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/show_call.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Print the model call — show_call","text":"character string.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/show_engines.html","id":null,"dir":"Reference","previous_headings":"","what":"Display currently available engines for a model — show_engines","title":"Display currently available engines for a model — show_engines","text":"possible engines model can depend packages loaded. parsnip extension add engines existing models. example, poissonreg package adds additional engines poisson_reg() model available unless poissonreg loaded.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/show_engines.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Display currently available engines for a model — show_engines","text":"","code":"show_engines(x)"},{"path":"https://parsnip.tidymodels.org/dev/reference/show_engines.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Display currently available engines for a model — show_engines","text":"x name parsnip model (e.g., \"linear_reg\", \"mars\", etc.)","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/show_engines.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Display currently available engines for a model — show_engines","text":"tibble.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/show_engines.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Display currently available engines for a model — show_engines","text":"","code":"show_engines(\"linear_reg\") #> # A tibble: 7 × 2 #> engine mode #> #> 1 lm regression #> 2 glm regression #> 3 glmnet regression #> 4 stan regression #> 5 spark regression #> 6 keras regression #> 7 brulee regression"},{"path":"https://parsnip.tidymodels.org/dev/reference/stan_conf_int.html","id":null,"dir":"Reference","previous_headings":"","what":"Wrapper for stan confidence intervals — stan_conf_int","title":"Wrapper for stan confidence intervals — stan_conf_int","text":"Wrapper stan confidence intervals","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/stan_conf_int.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Wrapper for stan confidence intervals — stan_conf_int","text":"","code":"stan_conf_int(object, newdata)"},{"path":"https://parsnip.tidymodels.org/dev/reference/stan_conf_int.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Wrapper for stan confidence intervals — stan_conf_int","text":"object stan model fit newdata data set.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/surv_reg.html","id":null,"dir":"Reference","previous_headings":"","what":"Parametric survival regression — surv_reg","title":"Parametric survival regression — surv_reg","text":"function deprecated favor survival_reg() uses \"censored regression\" mode. surv_reg() defines parametric survival model. information parsnip used modeling https://www.tidymodels.org/.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/surv_reg.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Parametric survival regression — surv_reg","text":"","code":"surv_reg(mode = \"regression\", engine = \"survival\", dist = NULL)"},{"path":"https://parsnip.tidymodels.org/dev/reference/surv_reg.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Parametric survival regression — surv_reg","text":"mode single character string prediction outcome mode. possible value model \"regression\". engine single character string specifying computational engine use fitting. dist character string probability distribution outcome. default \"weibull\".","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/surv_reg.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Parametric survival regression — surv_reg","text":"function defines type model fit. engine specified, method fit model also defined. See set_engine() setting engine, including set engine arguments. model trained fit fit() function used data. arguments function mode engine captured quosures. pass values programmatically, use injection operator like : Since survival models typically involve censoring (require use survival::Surv() objects), fit.model_spec() function require survival model specified via formula interface.","code":"value <- 1 surv_reg(argument = !!value)"},{"path":"https://parsnip.tidymodels.org/dev/reference/surv_reg.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Parametric survival regression — surv_reg","text":"https://www.tidymodels.org, Tidy Modeling R, searchable table parsnip models","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/survival_reg.html","id":null,"dir":"Reference","previous_headings":"","what":"Parametric survival regression — survival_reg","title":"Parametric survival regression — survival_reg","text":"survival_reg() defines parametric survival model. function can fit censored regression models. different ways fit model, method estimation chosen setting model engine. engine-specific pages model listed . survival¹² flexsurv² flexsurvspline² information parsnip used modeling https://www.tidymodels.org/.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/survival_reg.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Parametric survival regression — survival_reg","text":"","code":"survival_reg(mode = \"censored regression\", engine = \"survival\", dist = NULL)"},{"path":"https://parsnip.tidymodels.org/dev/reference/survival_reg.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Parametric survival regression — survival_reg","text":"mode single character string prediction outcome mode. possible value model \"censored regression\". engine single character string specifying computational engine use fitting. dist character string probability distribution outcome. default \"weibull\".","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/survival_reg.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Parametric survival regression — survival_reg","text":"function defines type model fit. engine specified, method fit model also defined. See set_engine() setting engine, including set engine arguments. model trained fit fit() function used data. arguments function mode engine captured quosures. pass values programmatically, use injection operator like : Since survival models typically involve censoring (require use survival::Surv() objects), fit.model_spec() function require survival model specified via formula interface.","code":"value <- 1 survival_reg(argument = !!value)"},{"path":"https://parsnip.tidymodels.org/dev/reference/survival_reg.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Parametric survival regression — survival_reg","text":"https://www.tidymodels.org, Tidy Modeling R, searchable table parsnip models","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/survival_reg.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Parametric survival regression — survival_reg","text":"","code":"show_engines(\"survival_reg\") #> # A tibble: 0 × 2 #> # ℹ 2 variables: engine , mode survival_reg(mode = \"censored regression\", dist = \"weibull\") #> ! parsnip could not locate an implementation for `survival_reg` censored #> regression model specifications. #> ℹ The parsnip extension package censored implements support for this #> specification. #> ℹ Please install (if needed) and load to continue. #> Parametric Survival Regression Model Specification (censored regression) #> #> Main Arguments: #> dist = weibull #> #> Computational engine: survival #>"},{"path":"https://parsnip.tidymodels.org/dev/reference/svm_linear.html","id":null,"dir":"Reference","previous_headings":"","what":"Linear support vector machines — svm_linear","title":"Linear support vector machines — svm_linear","text":"svm_linear() defines support vector machine model. classification, model tries maximize width margin classes (using linear class boundary). regression, model optimizes robust loss function affected large model residuals uses linear fit. function can fit classification regression models. different ways fit model, method estimation chosen setting model engine. engine-specific pages model listed . LiblineaR¹ kernlab information parsnip used modeling https://www.tidymodels.org/.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/svm_linear.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Linear support vector machines — svm_linear","text":"","code":"svm_linear(mode = \"unknown\", engine = \"LiblineaR\", cost = NULL, margin = NULL)"},{"path":"https://parsnip.tidymodels.org/dev/reference/svm_linear.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Linear support vector machines — svm_linear","text":"mode single character string prediction outcome mode. Possible values model \"unknown\", \"regression\", \"classification\". engine single character string specifying computational engine use fitting. cost positive number cost predicting sample within wrong side margin margin positive number epsilon SVM insensitive loss function (regression )","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/svm_linear.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Linear support vector machines — svm_linear","text":"function defines type model fit. engine specified, method fit model also defined. See set_engine() setting engine, including set engine arguments. model trained fit fit() function used data. arguments function mode engine captured quosures. pass values programmatically, use injection operator like :","code":"value <- 1 svm_linear(argument = !!value)"},{"path":"https://parsnip.tidymodels.org/dev/reference/svm_linear.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Linear support vector machines — svm_linear","text":"https://www.tidymodels.org, Tidy Modeling R, searchable table parsnip models","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/svm_linear.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Linear support vector machines — svm_linear","text":"","code":"show_engines(\"svm_linear\") #> # A tibble: 4 × 2 #> engine mode #> #> 1 LiblineaR classification #> 2 LiblineaR regression #> 3 kernlab classification #> 4 kernlab regression svm_linear(mode = \"classification\") #> Linear Support Vector Machine Model Specification (classification) #> #> Computational engine: LiblineaR #>"},{"path":"https://parsnip.tidymodels.org/dev/reference/svm_poly.html","id":null,"dir":"Reference","previous_headings":"","what":"Polynomial support vector machines — svm_poly","title":"Polynomial support vector machines — svm_poly","text":"svm_poly() defines support vector machine model. classification, model tries maximize width margin classes using polynomial class boundary. regression, model optimizes robust loss function affected large model residuals uses polynomial functions predictors. function can fit classification regression models. different ways fit model, method estimation chosen setting model engine. engine-specific pages model listed . kernlab¹ information parsnip used modeling https://www.tidymodels.org/.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/svm_poly.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Polynomial support vector machines — svm_poly","text":"","code":"svm_poly( mode = \"unknown\", engine = \"kernlab\", cost = NULL, degree = NULL, scale_factor = NULL, margin = NULL )"},{"path":"https://parsnip.tidymodels.org/dev/reference/svm_poly.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Polynomial support vector machines — svm_poly","text":"mode single character string prediction outcome mode. Possible values model \"unknown\", \"regression\", \"classification\". engine single character string specifying computational engine use fitting. cost positive number cost predicting sample within wrong side margin degree positive number polynomial degree. scale_factor positive number polynomial scaling factor. margin positive number epsilon SVM insensitive loss function (regression )","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/svm_poly.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Polynomial support vector machines — svm_poly","text":"function defines type model fit. engine specified, method fit model also defined. See set_engine() setting engine, including set engine arguments. model trained fit fit() function used data. arguments function mode engine captured quosures. pass values programmatically, use injection operator like :","code":"value <- 1 svm_poly(argument = !!value)"},{"path":"https://parsnip.tidymodels.org/dev/reference/svm_poly.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Polynomial support vector machines — svm_poly","text":"https://www.tidymodels.org, Tidy Modeling R, searchable table parsnip models","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/svm_poly.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Polynomial support vector machines — svm_poly","text":"","code":"show_engines(\"svm_poly\") #> # A tibble: 2 × 2 #> engine mode #> #> 1 kernlab classification #> 2 kernlab regression svm_poly(mode = \"classification\", degree = 1.2) #> Polynomial Support Vector Machine Model Specification (classification) #> #> Main Arguments: #> degree = 1.2 #> #> Computational engine: kernlab #>"},{"path":"https://parsnip.tidymodels.org/dev/reference/svm_rbf.html","id":null,"dir":"Reference","previous_headings":"","what":"Radial basis function support vector machines — svm_rbf","title":"Radial basis function support vector machines — svm_rbf","text":"svm_rbf() defines support vector machine model. classification, model tries maximize width margin classes using nonlinear class boundary. regression, model optimizes robust loss function affected large model residuals uses nonlinear functions predictors. function can fit classification regression models. different ways fit model, method estimation chosen setting model engine. engine-specific pages model listed . kernlab¹ information parsnip used modeling https://www.tidymodels.org/.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/svm_rbf.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Radial basis function support vector machines — svm_rbf","text":"","code":"svm_rbf( mode = \"unknown\", engine = \"kernlab\", cost = NULL, rbf_sigma = NULL, margin = NULL )"},{"path":"https://parsnip.tidymodels.org/dev/reference/svm_rbf.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Radial basis function support vector machines — svm_rbf","text":"mode single character string prediction outcome mode. Possible values model \"unknown\", \"regression\", \"classification\". engine single character string specifying computational engine use fitting. Possible engines listed . default model \"kernlab\". cost positive number cost predicting sample within wrong side margin rbf_sigma positive number radial basis function. margin positive number epsilon SVM insensitive loss function (regression )","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/svm_rbf.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Radial basis function support vector machines — svm_rbf","text":"function defines type model fit. engine specified, method fit model also defined. See set_engine() setting engine, including set engine arguments. model trained fit fit() function used data. arguments function mode engine captured quosures. pass values programmatically, use injection operator like :","code":"value <- 1 svm_rbf(argument = !!value)"},{"path":"https://parsnip.tidymodels.org/dev/reference/svm_rbf.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Radial basis function support vector machines — svm_rbf","text":"https://www.tidymodels.org, Tidy Modeling R, searchable table parsnip models","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/svm_rbf.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Radial basis function support vector machines — svm_rbf","text":"","code":"show_engines(\"svm_rbf\") #> # A tibble: 4 × 2 #> engine mode #> #> 1 kernlab classification #> 2 kernlab regression #> 3 liquidSVM classification #> 4 liquidSVM regression svm_rbf(mode = \"classification\", rbf_sigma = 0.2) #> Radial Basis Function Support Vector Machine Model Specification (classification) #> #> Main Arguments: #> rbf_sigma = 0.2 #> #> Computational engine: kernlab #>"},{"path":"https://parsnip.tidymodels.org/dev/reference/tidy._LiblineaR.html","id":null,"dir":"Reference","previous_headings":"","what":"tidy methods for LiblineaR models — tidy._LiblineaR","title":"tidy methods for LiblineaR models — tidy._LiblineaR","text":"tidy() methods various LiblineaR models return coefficients parsnip model fit.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/tidy._LiblineaR.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"tidy methods for LiblineaR models — tidy._LiblineaR","text":"","code":"# S3 method for `_LiblineaR` tidy(x, ...)"},{"path":"https://parsnip.tidymodels.org/dev/reference/tidy._LiblineaR.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"tidy methods for LiblineaR models — tidy._LiblineaR","text":"x fitted parsnip model used LiblineaR engine. ... used","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/tidy._LiblineaR.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"tidy methods for LiblineaR models — tidy._LiblineaR","text":"tibble columns term estimate.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/tidy._elnet.html","id":null,"dir":"Reference","previous_headings":"","what":"tidy methods for glmnet models — tidy._elnet","title":"tidy methods for glmnet models — tidy._elnet","text":"tidy() methods various glmnet models return coefficients specific penalty value used parsnip model fit.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/tidy._elnet.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"tidy methods for glmnet models — tidy._elnet","text":"","code":"# S3 method for `_elnet` tidy(x, penalty = NULL, ...) # S3 method for `_lognet` tidy(x, penalty = NULL, ...) # S3 method for `_multnet` tidy(x, penalty = NULL, ...) # S3 method for `_fishnet` tidy(x, penalty = NULL, ...)"},{"path":"https://parsnip.tidymodels.org/dev/reference/tidy._elnet.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"tidy methods for glmnet models — tidy._elnet","text":"x fitted parsnip model used glmnet engine. penalty single numeric value. none given, value specified model specification used. ... used","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/tidy._elnet.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"tidy methods for glmnet models — tidy._elnet","text":"tibble columns term, estimate, penalty. multinomial mode used, additional class column included.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/tidy.model_fit.html","id":null,"dir":"Reference","previous_headings":"","what":"Turn a parsnip model object into a tidy tibble — tidy.model_fit","title":"Turn a parsnip model object into a tidy tibble — tidy.model_fit","text":"method tidies model parsnip model object, exists.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/tidy.model_fit.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Turn a parsnip model object into a tidy tibble — tidy.model_fit","text":"","code":"# S3 method for model_fit tidy(x, ...)"},{"path":"https://parsnip.tidymodels.org/dev/reference/tidy.model_fit.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Turn a parsnip model object into a tidy tibble — tidy.model_fit","text":"x object converted tidy tibble::tibble(). ... Additional arguments tidying method.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/tidy.model_fit.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Turn a parsnip model object into a tidy tibble — tidy.model_fit","text":"tibble","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/tidy.nullmodel.html","id":null,"dir":"Reference","previous_headings":"","what":"Tidy method for null models — tidy.nullmodel","title":"Tidy method for null models — tidy.nullmodel","text":"Return results nullmodel tibble","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/tidy.nullmodel.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Tidy method for null models — tidy.nullmodel","text":"","code":"# S3 method for nullmodel tidy(x, ...)"},{"path":"https://parsnip.tidymodels.org/dev/reference/tidy.nullmodel.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Tidy method for null models — tidy.nullmodel","text":"x nullmodel object. ... used.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/tidy.nullmodel.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Tidy method for null models — tidy.nullmodel","text":"tibble column value.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/tidy.nullmodel.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Tidy method for null models — tidy.nullmodel","text":"","code":"nullmodel(mtcars[,-1], mtcars$mpg) %>% tidy() #> # A tibble: 1 × 1 #> value #> #> 1 20.1"},{"path":"https://parsnip.tidymodels.org/dev/reference/translate.html","id":null,"dir":"Reference","previous_headings":"","what":"Resolve a Model Specification for a Computational Engine — translate","title":"Resolve a Model Specification for a Computational Engine — translate","text":"translate() translate model specification code object specific particular engine (e.g. R package). translates generic parameters counterparts.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/translate.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Resolve a Model Specification for a Computational Engine — translate","text":"","code":"translate(x, ...) # S3 method for default translate(x, engine = x$engine, ...)"},{"path":"https://parsnip.tidymodels.org/dev/reference/translate.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Resolve a Model Specification for a Computational Engine — translate","text":"x model specification. ... currently used. engine computational engine model (see ?set_engine).","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/translate.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Resolve a Model Specification for a Computational Engine — translate","text":"translate() produces template call lacks specific argument values (data, etc). filled fit() called specifics data model. call may also include tune() arguments specification. handle tune() arguments, need use tune package. information see https://www.tidymodels.org/start/tuning/ contain resolved argument names specific model fitting function/engine. function can useful need understand parsnip goes generic model specific model fitting function. Note: function used internally users use understand underlying syntax . used modify model specification.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/translate.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Resolve a Model Specification for a Computational Engine — translate","text":"","code":"lm_spec <- linear_reg(penalty = 0.01) # `penalty` is tranlsated to `lambda` translate(lm_spec, engine = \"glmnet\") #> Linear Regression Model Specification (regression) #> #> Main Arguments: #> penalty = 0.01 #> #> Computational engine: glmnet #> #> Model fit template: #> glmnet::glmnet(x = missing_arg(), y = missing_arg(), weights = missing_arg(), #> family = \"gaussian\") # `penalty` not applicable for this model. translate(lm_spec, engine = \"lm\") #> Linear Regression Model Specification (regression) #> #> Main Arguments: #> penalty = 0.01 #> #> Computational engine: lm #> #> Model fit template: #> stats::lm(formula = missing_arg(), data = missing_arg(), weights = missing_arg()) # `penalty` is tranlsated to `reg_param` translate(lm_spec, engine = \"spark\") #> Linear Regression Model Specification (regression) #> #> Main Arguments: #> penalty = 0.01 #> #> Computational engine: spark #> #> Model fit template: #> sparklyr::ml_linear_regression(x = missing_arg(), formula = missing_arg(), #> weights = missing_arg(), reg_param = 0.01) # with a placeholder for an unknown argument value: translate(linear_reg(penalty = tune(), mixture = tune()), engine = \"glmnet\") #> Linear Regression Model Specification (regression) #> #> Main Arguments: #> penalty = tune() #> mixture = tune() #> #> Computational engine: glmnet #> #> Model fit template: #> glmnet::glmnet(x = missing_arg(), y = missing_arg(), weights = missing_arg(), #> alpha = tune(), family = \"gaussian\")"},{"path":"https://parsnip.tidymodels.org/dev/reference/type_sum.model_spec.html","id":null,"dir":"Reference","previous_headings":"","what":"Succinct summary of parsnip object — type_sum.model_spec","title":"Succinct summary of parsnip object — type_sum.model_spec","text":"type_sum controls objects shown inside tibble columns.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/type_sum.model_spec.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Succinct summary of parsnip object — type_sum.model_spec","text":"","code":"# S3 method for model_spec type_sum(x) # S3 method for model_fit type_sum(x)"},{"path":"https://parsnip.tidymodels.org/dev/reference/type_sum.model_spec.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Succinct summary of parsnip object — type_sum.model_spec","text":"x model_spec model_fit object summarise.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/type_sum.model_spec.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Succinct summary of parsnip object — type_sum.model_spec","text":"character value.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/type_sum.model_spec.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Succinct summary of parsnip object — type_sum.model_spec","text":"model_spec objects, summary \"spec[?]\" \"spec[+]\". former indicates either model mode declared specification tune() parameters. Otherwise, latter shown. fitted models, either \"fit[x]\" \"fit[+]\" used \"x\" implies model fit failed way.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/update_model_info_file.html","id":null,"dir":"Reference","previous_headings":"","what":"Save information about models — update_model_info_file","title":"Save information about models — update_model_info_file","text":"function writes tab delimited file package capture information known models. information includes packages tidymodels GitHub repository well packages known work well tidymodels packages (e.g. parsnip also tune, etc.). may model definitions extension packages included . data used document engines model function man page.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/update_model_info_file.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Save information about models — update_model_info_file","text":"","code":"update_model_info_file(path = \"inst/models.tsv\")"},{"path":"https://parsnip.tidymodels.org/dev/reference/update_model_info_file.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Save information about models — update_model_info_file","text":"path character string location tab delimited file.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/update_model_info_file.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Save information about models — update_model_info_file","text":"See model implementation guidelines best practices modeling modeling packages. highly recommended known parsnip extension packages loaded. unexported parsnip function extensions() list .","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/varying.html","id":null,"dir":"Reference","previous_headings":"","what":"A placeholder function for argument values — varying","title":"A placeholder function for argument values — varying","text":"varying() used parameter specified later date.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/varying.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"A placeholder function for argument values — varying","text":"","code":"varying()"},{"path":"https://parsnip.tidymodels.org/dev/reference/varying_args.html","id":null,"dir":"Reference","previous_headings":"","what":"Determine varying arguments — varying_args.model_spec","title":"Determine varying arguments — varying_args.model_spec","text":"varying_args() takes model specification recipe returns tibble information possible varying arguments whether actually varying. id column determined differently depending whether model_spec recipe used. model_spec, first class used. recipe, unique step id used.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/varying_args.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Determine varying arguments — varying_args.model_spec","text":"","code":"# S3 method for model_spec varying_args(object, full = TRUE, ...) # S3 method for recipe varying_args(object, full = TRUE, ...) # S3 method for step varying_args(object, full = TRUE, ...)"},{"path":"https://parsnip.tidymodels.org/dev/reference/varying_args.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Determine varying arguments — varying_args.model_spec","text":"object model_spec recipe. full single logical. possible varying parameters returned? FALSE, parameters actually varying returned. ... currently used.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/varying_args.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Determine varying arguments — varying_args.model_spec","text":"tibble columns parameter name (name), whether contains varying value (varying), id object (id), class used call method (type).","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/varying_args.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Determine varying arguments — varying_args.model_spec","text":"","code":"# List all possible varying args for the random forest spec rand_forest() %>% varying_args() #> Warning: `varying_args()` was deprecated in parsnip 0.1.8. #> ℹ Please use `tune_args()` instead. #> # A tibble: 3 × 4 #> name varying id type #> #> 1 mtry FALSE rand_forest model_spec #> 2 trees FALSE rand_forest model_spec #> 3 min_n FALSE rand_forest model_spec # mtry is now recognized as varying rand_forest(mtry = varying()) %>% varying_args() #> # A tibble: 3 × 4 #> name varying id type #> #> 1 mtry TRUE rand_forest model_spec #> 2 trees FALSE rand_forest model_spec #> 3 min_n FALSE rand_forest model_spec # Even engine specific arguments can vary rand_forest() %>% set_engine(\"ranger\", sample.fraction = varying()) %>% varying_args() #> # A tibble: 4 × 4 #> name varying id type #> #> 1 mtry FALSE rand_forest model_spec #> 2 trees FALSE rand_forest model_spec #> 3 min_n FALSE rand_forest model_spec #> 4 sample.fraction TRUE rand_forest model_spec # List only the arguments that actually vary rand_forest() %>% set_engine(\"ranger\", sample.fraction = varying()) %>% varying_args(full = FALSE) #> # A tibble: 1 × 4 #> name varying id type #> #> 1 sample.fraction TRUE rand_forest model_spec rand_forest() %>% set_engine( \"randomForest\", strata = Class, sampsize = varying() ) %>% varying_args() #> # A tibble: 5 × 4 #> name varying id type #> #> 1 mtry FALSE rand_forest model_spec #> 2 trees FALSE rand_forest model_spec #> 3 min_n FALSE rand_forest model_spec #> 4 strata FALSE rand_forest model_spec #> 5 sampsize TRUE rand_forest model_spec"},{"path":"https://parsnip.tidymodels.org/dev/reference/xgb_train.html","id":null,"dir":"Reference","previous_headings":"","what":"Boosted trees via xgboost — xgb_train","title":"Boosted trees via xgboost — xgb_train","text":"xgb_train() xgb_predict() wrappers xgboost tree-based models model arguments main function.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/xgb_train.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Boosted trees via xgboost — xgb_train","text":"","code":"xgb_train( x, y, weights = NULL, max_depth = 6, nrounds = 15, eta = 0.3, colsample_bynode = NULL, colsample_bytree = NULL, min_child_weight = 1, gamma = 0, subsample = 1, validation = 0, early_stop = NULL, counts = TRUE, event_level = c(\"first\", \"second\"), ... ) xgb_predict(object, new_data, ...)"},{"path":"https://parsnip.tidymodels.org/dev/reference/xgb_train.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Boosted trees via xgboost — xgb_train","text":"x data frame matrix predictors y vector (factor numeric) matrix (numeric) outcome data. max_depth integer maximum depth tree. nrounds integer number boosting iterations. eta numeric value zero one control learning rate. colsample_bynode Subsampling proportion columns node within tree. See counts argument . default uses columns. colsample_bytree Subsampling proportion columns tree. See counts argument . default uses columns. min_child_weight numeric value minimum sum instance weights needed child continue split. gamma number minimum loss reduction required make partition leaf node tree subsample Subsampling proportion rows. default, training data used. validation proportion data used performance assessment potential early stopping. early_stop integer NULL. NULL, number training iterations without improvement stopping. validation used, performance base validation set; otherwise, training set used. counts logical. FALSE, colsample_bynode colsample_bytree assumed proportions proportion columns affects (instead counts). event_level binary classification, single string either \"first\" \"second\" pass along describing level outcome considered \"event\". ... options pass xgb.train() xgboost's method predict(). new_data rectangular data object, data frame.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/xgb_train.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Boosted trees via xgboost — xgb_train","text":"fitted xgboost object.","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"parsnip-development-version","dir":"Changelog","previous_headings":"","what":"parsnip (development version)","title":"parsnip (development version)","text":"Fixed bug fitting model types \"spark\" engine (#1045). Fixed issue mlp() metadata stop_iter engine argument mistakenly protected \"brulee\" engine. (#1050) .filter_eval_time() moved survival standalone file. Improved errors documentation related special terms formulas. See ?model_formula learn . (#770, #1014) Improved errors cases outcome column mis-specified. (#1003) Fixed documentation mlp(engine = \"brulee\"): default values learn_rate epochs swapped (#1018). new_data argument predict() method censoring_model_reverse_km objects deprecated (#965). computing censoring weights, resulting vectors longer named (#1023). Fixed bug integration workflows using model formula formula preprocessor result double intercept (#1033). predict() method censoring_model_reverse_km objects now checks ... empty (#1029).","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"parsnip-111","dir":"Changelog","previous_headings":"","what":"parsnip 1.1.1","title":"parsnip 1.1.1","text":"CRAN release: 2023-08-17 Fixed bug prediction rank deficient lm() models produced .pred_res instead .pred. (#985) Fixed bug sparse data coerced non-sparse format predict(). BART models dbarts engine, predict() can now also return standard error confidence prediction intervals (#976). augment() now works censored regression models. censored regression helper functions exported: .extract_surv_status() .extract_surv_time() (#973, #980). Fixed bug boost_tree() models couldn’t fit 1 predictor validation argument used. (#994)","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"parsnip-110","dir":"Changelog","previous_headings":"","what":"parsnip 1.1.0","title":"parsnip 1.1.0","text":"CRAN release: 2023-04-12 release parsnip contains number new features bug fixes, accompanied several optimizations substantially decrease time fit() predict() package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"improvements-to-glmnet-engine-interfaces-1-1-0","dir":"Changelog","previous_headings":"","what":"Improvements to \"glmnet\" engine interfaces","title":"parsnip 1.1.0","text":"glmnet models fitted base-R family objects now supported linear_reg(), logistic_reg(), multinomial_reg() (#890). multi_predict() methods linear_reg(), logistic_reg(), multinom_reg() models fitted \"glmnet\" engine now check type better error accordingly (#900). .organize_glmnet_pred() now expects predictions single penalty value (#876).","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"survival-analysis-1-1-0","dir":"Changelog","previous_headings":"","what":"Survival analysis","title":"parsnip 1.1.0","text":"time argument predict_survival() predict_hazard() deprecated favor new eval_time argument (#936). Added several internal functions (help work Surv objects) standalone file can used packages via usethis::use_standalone(\"tidymodels/parsnip\"). changes provide tooling downstream packages handle inverse probability censoring weights (#893, #897, #937). internal method generating inverse probability censoring weights (IPCW) Graf et al (1999) available via .censoring_weights_graf().","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"bug-fixes-1-1-0","dir":"Changelog","previous_headings":"","what":"Bug fixes","title":"parsnip 1.1.0","text":"Made fit() behave consistently respect missingness classification setting. Previously, fit() erroneously raised error class outcome complete cases, now always passes along complete cases handled modeling function (#888). Fixed bug model fits engine = \"earth\" fail package’s namespace hadn’t attached (#251). Fixed bug model fits factor predictors engine = \"kknn\" fail package’s namespace hadn’t attached (#264). Fixed bug prediction boosted tree model fitted \"xgboost\" using custom objective function (#875).","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"other-changes-1-1-0","dir":"Changelog","previous_headings":"","what":"Other changes","title":"parsnip 1.1.0","text":"Implemented number optimizations parsnip’s backend substantially decrease evaluation time fit() predict() (#901, #902, #910, #921, #929, #923, #931, #932, #933). logistic_reg() now warn fit() outcome two levels (#545). Rather implemented method, check new_data argument mistakenly passed newdata multi_predict() now happens generic. Packages re-exporting multi_predict() generic implementing now-duplicate checks may see new failures can remove analogous checks. check already existed predict() methods (via predict.model_fit()) parsnip multi_predict() methods (#525). Functions now indicate class outcome outcome wrong class (#887). minimum version R now 3.5 (#926). Moved forward deprecation req_pkgs() favor required_pkgs(). function now error (#871). Transitioned soft-deprecations least year old warn-deprecations. changes apply fit_control(), surv_reg(), varying(), varying_args(), \"liquidSVM\" engine. Various bug fixes improvements documentation.","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"parsnip-104","dir":"Changelog","previous_headings":"","what":"parsnip 1.0.4","title":"parsnip 1.0.4","text":"CRAN release: 2023-02-22 censored regression models, “reverse Kaplan-Meier” curve computed censoring distribution. can used evaluating type model (#855). model specification methods generics::tune_args() generics::tunable() now registered unconditionally (tidymodels/workflows#192).","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"parsnip-103","dir":"Changelog","previous_headings":"","what":"parsnip 1.0.3","title":"parsnip 1.0.3","text":"CRAN release: 2022-11-11 Adds documentation tuning infrastructure new flexsurvspline engine survival_reg() model specification censored package (@mattwarkentin, #831). matrix interface fitting fit_xy() now works \"censored regression\" mode (#829). num_leaves argument boost_tree()s lightgbm engine (via bonsai package) now tunable. change data checking code resulted 3-fold speed-parsnip (#835)","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"parsnip-102","dir":"Changelog","previous_headings":"","what":"parsnip 1.0.2","title":"parsnip 1.0.2","text":"CRAN release: 2022-10-01 bagged neural network model added (bag_mlp()). Engine implementations live baguette package. Fixed installation failures due undocumented knitr installation dependency (#785). fit_xy() now fails model mode unknown. brulee engine-specific tuning parameters updated. changes can used dials version > 1.0.0. fit() fit_xy() doesn’t error anymore control argument isn’t control_parsnip() object. work long object passed control includes elements control_parsnip(). Improved prompts related missing (loaded) extension packages well better handling model mode conflicts.","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"parsnip-101","dir":"Changelog","previous_headings":"","what":"parsnip 1.0.1","title":"parsnip 1.0.1","text":"CRAN release: 2022-08-18 Enabled passing additional engine arguments xgboost boost_tree() engine. supply engine-specific arguments documented xgboost::xgb.train() arguments passed via params, supply list elements directly named arguments set_engine(). Read ?details_boost_tree_xgboost (#787).","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"parsnip-100","dir":"Changelog","previous_headings":"","what":"parsnip 1.0.0","title":"parsnip 1.0.0","text":"CRAN release: 2022-06-16","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"model-specification-changes-1-0-0","dir":"Changelog","previous_headings":"","what":"Model Specification Changes","title":"parsnip 1.0.0","text":"Enable use case weights models support . show_model_info() now indicates models can utilize case weights. Model type functions now message informatively needed parsnip extension package loaded (#731). Refactored internals model specification printing functions. changes non-breaking extension packages, new print_model_spec() helper exported use extensions desired (#739).","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"bug-fixes-1-0-0","dir":"Changelog","previous_headings":"","what":"Bug fixes","title":"parsnip 1.0.0","text":"Fixed bug previously set engine arguments propagate update() methods despite fresh = TRUE (#704). Fixed bug error thrown arguments model functions namespaced (#745). predict(type = \"prob\") now provide error outcome variable level called \"class\" (#720). inconsistency probability type predictions two-class GAM models fixed (#708) Fixed translated printing null_model() (#752)","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"other-changes-1-0-0","dir":"Changelog","previous_headings":"","what":"Other changes","title":"parsnip 1.0.0","text":"Added glm_grouped() function convert long data grouped format required glm() logistic regression. xgb_train() now allows case weights Added ctree_train() cforest_train() wrappers functions partykit package. Engines added parsnip extension packages. Exported xgb_predict() wraps xgboost’s predict() method use parsnip extension packages (#688). Added developer function, .model_param_name_key translates names tuning parameters.","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"parsnip-021","dir":"Changelog","previous_headings":"","what":"parsnip 0.2.1","title":"parsnip 0.2.1","text":"CRAN release: 2022-03-17 Fixed major bug spark models induced previous version (#671). Updated parsnip add-new models engines. Updated parameter ranges tunable() methods added missing engine argument brulee models. Added information install mixOmics package PLS models (#680)","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"parsnip-020","dir":"Changelog","previous_headings":"","what":"parsnip 0.2.0","title":"parsnip 0.2.0","text":"CRAN release: 2022-03-09","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"model-specification-changes-0-2-0","dir":"Changelog","previous_headings":"","what":"Model Specification Changes","title":"parsnip 0.2.0","text":"Bayesian additive regression trees (BART) added via bart() function. Added \"glm\" engine linear_reg() numeric outcomes (#624). Added brulee engines linear_reg(), logistic_reg(), multinom_reg() mlp().","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"bug-fixes-0-2-0","dir":"Changelog","previous_headings":"","what":"Bug fixes","title":"parsnip 0.2.0","text":"bug class predictions two-class GAM models fixed (#541) Fixed bug logistic_reg() LiblineaR engine (#552). list column produced creating survival probability predictions now always called .pred (.pred_survival used inside list column). Fixed outcome type checking affecting subset regression models (#625). Prediction using multinom_reg() nnet engine single row longer fails (#612).","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"other-changes-0-2-0","dir":"Changelog","previous_headings":"","what":"Other Changes","title":"parsnip 0.2.0","text":"xy interface used underlying model expects use matrix, better warning issued predictors contain non-numeric columns (including dates). fit time calculated verbosity argument control_parsnip() 2L greater. Also, call system.time() now uses gcFirst = FALSE. (#611) fit_control() soft-deprecated favor control_parsnip(). New extract_parameter_set_dials() method extract parameter sets model specs. New extract_parameter_dials() method extract single parameter model specs. Argument interval added prediction: types \"survival\" \"quantile\", estimates confidence prediction interval can added available (#615). set_dependency() now allows developers create package requirements specific model’s mode (#604). varying() soft-deprecated favor tune(). varying_args() soft-deprecated favor tune_args(). autoplot() method added glmnet objects, showing coefficient paths versus penalty values (#642). parsnip now robust working keras tensorflow larger range versions (#596). xgboost engines now use new iterationrange parameter instead deprecated ntreelimit (#656).","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"developer-0-2-0","dir":"Changelog","previous_headings":"","what":"Developer","title":"parsnip 0.2.0","text":"Models information can re-registered long information registered . helpful packages add new engines use devtools::load_all() (#653).","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"parsnip-017","dir":"Changelog","previous_headings":"","what":"parsnip 0.1.7","title":"parsnip 0.1.7","text":"CRAN release: 2021-07-21","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"model-specification-changes-0-1-7","dir":"Changelog","previous_headings":"","what":"Model Specification Changes","title":"parsnip 0.1.7","text":"model function (gen_additive_mod()) added generalized additive models. model now default engine used model defined. default model listed help documents. also adds functionality declare engine model specification function. set_engine() still required engine-specific arguments need added. (#513) parsnip now checks valid combination engine mode (#529) default engine multinom_reg() changed nnet.","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"other-changes-0-1-7","dir":"Changelog","previous_headings":"","what":"Other Changes","title":"parsnip 0.1.7","text":"helper functions .convert_form_to_xy_fit(), .convert_form_to_xy_new(), .convert_xy_to_form_fit(), .convert_xy_to_form_new() converting formula matrix interface now exported developer use (#508). Fix bug augment() non-predictor, non-outcome variables included data (#510). New article “Fitting Predicting parsnip” contains examples various combinations model type engine. ( #527)","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"parsnip-016","dir":"Changelog","previous_headings":"","what":"parsnip 0.1.6","title":"parsnip 0.1.6","text":"CRAN release: 2021-05-27","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"model-specification-changes-0-1-6","dir":"Changelog","previous_headings":"","what":"Model Specification Changes","title":"parsnip 0.1.6","text":"new linear SVM model svm_linear() now available LiblineaR engine (#424) kernlab engine (#438), LiblineaR engine available logistic_reg() well (#429). models can use sparse matrices via fit_xy() (#447) tidy method (#474). models glmnet engines: single value required penalty (either single numeric value value tune()) (#481). special argument called path_values can used set lambda path specific set numbers (independent value penalty). pure ridge regression models (.e., mixture = 1) generate incorrect values path include zero. See issue #431 discussion (#486). liquidSVM engine svm_rbf() deprecated due package’s removal CRAN. (#425) xgboost engine boosted trees translating mtry xgboost’s colsample_bytree. now map mtry colsample_bynode since consistent random forest works. colsample_bytree can still optimized passing engine argument. colsample_bynode added xgboost parsnip package code written. (#495) xgboost, mtry colsample_bytree can passed integer counts proportions, subsample validation always proportions. xgb_train() now new option counts (TRUE FALSE) states scale mtry colsample_bytree used. (#461)","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"other-changes-0-1-6","dir":"Changelog","previous_headings":"","what":"Other Changes","title":"parsnip 0.1.6","text":"Re-licensed package GPL-2 MIT. See consent copyright holders . set_mode() now checks mode compatible model class, similar new_model_spec() (@jtlandis, #467). set_mode() set_engine() now error NULL missing arguments (#503). Re-organized model documentation: update methods moved model help files (#479). model/engine combination help page. model help page dynamic bulleted list engines links individual help pages. generics::required_pkgs() extended parsnip objects. Prediction functions now give consistent error user uses unavailable value type (#489) augment() method changed avoid failing model enable class probabilities. method now returns tibbles despite input data class (#487) (#478) xgboost engines now respect event_level option predictions (#460).","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"parsnip-015","dir":"Changelog","previous_headings":"","what":"parsnip 0.1.5","title":"parsnip 0.1.5","text":"CRAN release: 2021-01-19 RStudio add-available makes writing multiple parsnip model specifications source window. can accessed via IDE addin menus calling parsnip_addin(). xgboost models, users can now pass objective set_engine(\"xgboost\"). (#403) Changes test cases CRAN get xgboost work Solaris configuration. now augument() method fitted models. See augment.model_fit. (#401) Column names x now required fit_xy() used. (#398) now event_level argument xgboost engine. (#420) New mode “censored regression” new prediction types “linear_pred”, “time”, “survival”, “hazard”. (#396) Censored regression models use fit_xy() (use fit()). (#442)","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"parsnip-014","dir":"Changelog","previous_headings":"","what":"parsnip 0.1.4","title":"parsnip 0.1.4","text":"CRAN release: 2020-10-27 show_engines() provide information current set model. three models (glmnet, xgboost, ranger), enable sparse matrix use via fit_xy() (#373). added protections added function arguments dependent data dimensions (e.g., mtry, neighbors, min_n, etc). (#184) Infrastructure improved running parsnip models parallel using PSOCK clusters Windows.","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"parsnip-013","dir":"Changelog","previous_headings":"","what":"parsnip 0.1.3","title":"parsnip 0.1.3","text":"CRAN release: 2020-08-04 glance() method model_fit objects added (#325) Specific tidy() methods glmnet models fit via parsnip created coefficients specific fitted parsnip model returned.","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"fixes-0-1-3","dir":"Changelog","previous_headings":"","what":"Fixes","title":"parsnip 0.1.3","text":"glmnet models fitting two intercepts (#349) various update() methods now work engine-specific parameters.","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"parsnip-012","dir":"Changelog","previous_headings":"","what":"parsnip 0.1.2","title":"parsnip 0.1.2","text":"CRAN release: 2020-07-03","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"breaking-changes-0-1-2","dir":"Changelog","previous_headings":"","what":"Breaking Changes","title":"parsnip 0.1.2","text":"parsnip now options set specific types predictor encodings different models. example, ranger models run using parsnip workflows thing creating indicator variables. encodings can overridden using blueprint options workflows. consequence, possible get different model fit previous versions parsnip. details specific encoding changes . (#326)","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"other-changes-0-1-2","dir":"Changelog","previous_headings":"","what":"Other Changes","title":"parsnip 0.1.2","text":"tidyr >= 1.0.0 now required. SVM models produced kernlab now use formula method (see breaking change notice ). change due ksvm() made indicator variables factor predictors (one-hot encodings). Since ordinary formula method , data passed -ksvm() results closer one get ksmv() called directly. MARS models produced earth now use formula method. xgboost, one-hot encoding used indicator variables created. --hood changes made non-standard data arguments modeling packages can accommodated. (#315)","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"new-features-0-1-2","dir":"Changelog","previous_headings":"","what":"New Features","title":"parsnip 0.1.2","text":"new main argument added boost_tree() called stop_iter early stopping. xgb_train() function gained arguments early stopping percentage data leave validation set. fit() used underlying model uses formula, actual formula pass model (instead placeholder). makes model call better. function named repair_call() added. can help change underlying models call object better reflect obtained model function used directly (instead via parsnip). useful user chooses formula interface model uses formula interface. also limited use recipes used construct feature set workflows tune. predict() function now checks see required modeling packages installed. packages loaded (attached). (#249) (#308) (tidymodels/workflows#45) function req_pkgs() user interface determining required packages. (#308)","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"parsnip-011","dir":"Changelog","previous_headings":"","what":"parsnip 0.1.1","title":"parsnip 0.1.1","text":"CRAN release: 2020-05-06","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"new-features-0-1-1","dir":"Changelog","previous_headings":"","what":"New Features","title":"parsnip 0.1.1","text":"liquidSVM added engine svm_rbf() (#300)","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"fixes-0-1-1","dir":"Changelog","previous_headings":"","what":"Fixes","title":"parsnip 0.1.1","text":"error message missing packages fixed (#289 #292)","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"other-changes-0-1-1","dir":"Changelog","previous_headings":"","what":"Other Changes","title":"parsnip 0.1.1","text":"S3 dispatch tidy() broken R 4.0.","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"parsnip-005","dir":"Changelog","previous_headings":"","what":"parsnip 0.0.5","title":"parsnip 0.0.5","text":"CRAN release: 2020-01-07","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"fixes-0-0-5","dir":"Changelog","previous_headings":"","what":"Fixes","title":"parsnip 0.0.5","text":"bug (#206 #234) fixed caused error predicting multinomial glmnet model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"other-changes-0-0-5","dir":"Changelog","previous_headings":"","what":"Other Changes","title":"parsnip 0.0.5","text":"glmnet removed dependency since new version depends 3.6.0 greater. Keeping constrain parsnip requirement. glmnet tests run locally. set internal functions now exported. helpful creating new package registers new model specifications.","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"new-features-0-0-5","dir":"Changelog","previous_headings":"","what":"New Features","title":"parsnip 0.0.5","text":"nnet added engine multinom_reg() #209","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"breaking-changes-0-0-5","dir":"Changelog","previous_headings":"","what":"Breaking Changes","title":"parsnip 0.0.5","text":"mis-mapped parameters (going parsnip underlying model function) spark boosted trees keras models. See 897c927.","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"parsnip-004","dir":"Changelog","previous_headings":"","what":"parsnip 0.0.4","title":"parsnip 0.0.4","text":"CRAN release: 2019-11-02","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"new-features-0-0-4","dir":"Changelog","previous_headings":"","what":"New Features","title":"parsnip 0.0.4","text":"time elapsed model fitting stored $elapsed slot parsnip model object, printed model object printed. default parameter ranges updated SVM, KNN, MARS models. model udpate() methods gained parameters argument cases parameters contained tibble list. fit_control() soft-deprecated favor control_parsnip().","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"fixes-0-0-4","dir":"Changelog","previous_headings":"","what":"Fixes","title":"parsnip 0.0.4","text":"bug fixed standardizing output column types multi_predict predict multinom_reg. bug fixed related using data descriptors fit_xy(). bug fixed related column names generated multi_predict(). top-level tibble always column named .pred list column contains tibbles across sub-models. column names sub-model tibbles names consistent predict() (previously incorrect). See 43c15db. bug fixed standardizing column names nnet class probability predictions.","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"parsnip-0031","dir":"Changelog","previous_headings":"","what":"parsnip 0.0.3.1","title":"parsnip 0.0.3.1","text":"CRAN release: 2019-08-06 Test case update due CRAN running extra tests (#202)","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"parsnip-003","dir":"Changelog","previous_headings":"","what":"parsnip 0.0.3","title":"parsnip 0.0.3","text":"CRAN release: 2019-07-31 Unplanned release based CRAN requirements Solaris.","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"breaking-changes-0-0-3","dir":"Changelog","previous_headings":"","what":"Breaking Changes","title":"parsnip 0.0.3","text":"method parsnip stores model information changed. custom models previous versions need use new method registering models. methods detailed ?get_model_env package vignette adding models. mode needs declared models can used one mode prior fitting /translation. surv_reg(), engine uses survival package now called survival instead survreg. glmnet models, full regularization path always fit regardless value given penalty. Previously, model fit passing penalty glmnet’s lambda argument model make predictions specific values. (#195)","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"new-features-0-0-3","dir":"Changelog","previous_headings":"","what":"New Features","title":"parsnip 0.0.3","text":"add_rowindex() can create column called .row data frame. computational engine explicitly set, default used. default documented corresponding model page. warning issued fit time unless verbosity zero. nearest_neighbor() gained multi_predict method. multi_predict() documentation little better organized. suite internal functions added help upcoming model tuning features. parsnip object always saved name(s) outcome variable(s) proper naming predicted values.","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"parsnip-002","dir":"Changelog","previous_headings":"","what":"parsnip 0.0.2","title":"parsnip 0.0.2","text":"CRAN release: 2019-03-22 Small release driven changes sample() current r-devel.","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"new-features-0-0-2","dir":"Changelog","previous_headings":"","what":"New Features","title":"parsnip 0.0.2","text":"“null model” now available fits predictor-free model (using mean outcome regression mode classification). fit_xy() can take single column data frame matrix y without error","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"other-changes-0-0-2","dir":"Changelog","previous_headings":"","what":"Other Changes","title":"parsnip 0.0.2","text":"varying_args() now full argument control whether full set possible varying arguments returned (opposed arguments actually varying). fit_control() returns S3 method. classification models, error occurs outcome data encoded factors (#115). prediction modules (e.g. predict_class, predict_numeric, etc) de-exported. internal functions used users users using . event time data set (check_times) included time (seconds) run R CMD check using “r-devel-windows-ix86+x86_64` flavor. Packages errored censored.","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"bug-fixes-0-0-2","dir":"Changelog","previous_headings":"","what":"Bug Fixes","title":"parsnip 0.0.2","text":"varying_args() now uses version generics package. means first argument, x, renamed object align generics. recipes step method varying_args(), now error checking catch user tries specify argument varying varying (example, id) (#132). find_varying(), internal function detecting varying arguments, now returns correct results size 0 argument provided. can also now detect varying arguments nested deeply call (#131, #134). multinomial regression, .pred_ prefix now added prediction column names (#107). multinomial regression using glmnet, multi_predict() now pulls correct default penalty (#108). Confidence prediction intervals logistic regression computed intervals single level. now computed. (#156)","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"parsnip-001","dir":"Changelog","previous_headings":"","what":"parsnip 0.0.1","title":"parsnip 0.0.1","text":"CRAN release: 2018-11-12 First CRAN release","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"parsnip-0009005","dir":"Changelog","previous_headings":"","what":"parsnip 0.0.0.9005","title":"parsnip 0.0.0.9005","text":"engine, associated arguments, now specified using set_engine(). engine argument","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"parsnip-0009004","dir":"Changelog","previous_headings":"","what":"parsnip 0.0.0.9004","title":"parsnip 0.0.0.9004","text":"Arguments modeling functions now captured quosures. others replaced ... Data descriptor names changed now functions. descriptor definitions “cols” “preds” switched.","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"parsnip-0009003","dir":"Changelog","previous_headings":"","what":"parsnip 0.0.0.9003","title":"parsnip 0.0.0.9003","text":"regularization changed penalty models consistent change. mode chosen model specification, assigned time fit. 51 underlying modeling packages now loaded namespace. exceptions noted documentation model. example, predict methods, earth package need attached fully operational.","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"parsnip-0009002","dir":"Changelog","previous_headings":"","what":"parsnip 0.0.0.9002","title":"parsnip 0.0.0.9002","text":"consistent snake_case, newdata changed new_data. predict_raw method added.","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"parsnip-0009001","dir":"Changelog","previous_headings":"","what":"parsnip 0.0.0.9001","title":"parsnip 0.0.0.9001","text":"package dependency suffered new change.","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"parsnip-0009000","dir":"Changelog","previous_headings":"","what":"parsnip 0.0.0.9000","title":"parsnip 0.0.0.9000","text":"fit interface previously used cover x/y interface well formula interface. Now, fit() formula interface fit_xy() x/y interface. Added NEWS.md file track changes package. predict methods overhauled consistent. MARS added.","code":""}] +[{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/CODE_OF_CONDUCT.html","id":"our-pledge","dir":"","previous_headings":"","what":"Our Pledge","title":"Contributor Covenant Code of Conduct","text":"members, contributors, leaders pledge make participation community harassment-free experience everyone, regardless age, body size, visible invisible disability, ethnicity, sex characteristics, gender identity expression, level experience, education, socio-economic status, nationality, personal appearance, race, caste, color, religion, sexual identity orientation. pledge act interact ways contribute open, welcoming, diverse, inclusive, healthy community.","code":""},{"path":"https://parsnip.tidymodels.org/dev/CODE_OF_CONDUCT.html","id":"our-standards","dir":"","previous_headings":"","what":"Our Standards","title":"Contributor Covenant Code of Conduct","text":"Examples behavior contributes positive environment community include: Demonstrating empathy kindness toward people respectful differing opinions, viewpoints, experiences Giving gracefully accepting constructive feedback Accepting responsibility apologizing affected mistakes, learning experience Focusing best just us individuals, overall community Examples unacceptable behavior include: use sexualized language imagery, sexual attention advances kind Trolling, insulting derogatory comments, personal political attacks Public private harassment Publishing others’ private information, physical email address, without explicit permission conduct reasonably considered inappropriate professional setting","code":""},{"path":"https://parsnip.tidymodels.org/dev/CODE_OF_CONDUCT.html","id":"enforcement-responsibilities","dir":"","previous_headings":"","what":"Enforcement Responsibilities","title":"Contributor Covenant Code of Conduct","text":"Community leaders responsible clarifying enforcing standards acceptable behavior take appropriate fair corrective action response behavior deem inappropriate, threatening, offensive, harmful. Community leaders right responsibility remove, edit, reject comments, commits, code, wiki edits, issues, contributions aligned Code Conduct, communicate reasons moderation decisions appropriate.","code":""},{"path":"https://parsnip.tidymodels.org/dev/CODE_OF_CONDUCT.html","id":"scope","dir":"","previous_headings":"","what":"Scope","title":"Contributor Covenant Code of Conduct","text":"Code Conduct applies within community spaces, also applies individual officially representing community public spaces. Examples representing community include using official e-mail address, posting via official social media account, acting appointed representative online offline event.","code":""},{"path":"https://parsnip.tidymodels.org/dev/CODE_OF_CONDUCT.html","id":"enforcement","dir":"","previous_headings":"","what":"Enforcement","title":"Contributor Covenant Code of Conduct","text":"Instances abusive, harassing, otherwise unacceptable behavior may reported community leaders responsible enforcement codeofconduct@posit.co. complaints reviewed investigated promptly fairly. community leaders obligated respect privacy security reporter incident.","code":""},{"path":"https://parsnip.tidymodels.org/dev/CODE_OF_CONDUCT.html","id":"enforcement-guidelines","dir":"","previous_headings":"","what":"Enforcement Guidelines","title":"Contributor Covenant Code of Conduct","text":"Community leaders follow Community Impact Guidelines determining consequences action deem violation Code Conduct:","code":""},{"path":"https://parsnip.tidymodels.org/dev/CODE_OF_CONDUCT.html","id":"id_1-correction","dir":"","previous_headings":"Enforcement Guidelines","what":"1. Correction","title":"Contributor Covenant Code of Conduct","text":"Community Impact: Use inappropriate language behavior deemed unprofessional unwelcome community. Consequence: private, written warning community leaders, providing clarity around nature violation explanation behavior inappropriate. public apology may requested.","code":""},{"path":"https://parsnip.tidymodels.org/dev/CODE_OF_CONDUCT.html","id":"id_2-warning","dir":"","previous_headings":"Enforcement Guidelines","what":"2. Warning","title":"Contributor Covenant Code of Conduct","text":"Community Impact: violation single incident series actions. Consequence: warning consequences continued behavior. interaction people involved, including unsolicited interaction enforcing Code Conduct, specified period time. includes avoiding interactions community spaces well external channels like social media. Violating terms may lead temporary permanent ban.","code":""},{"path":"https://parsnip.tidymodels.org/dev/CODE_OF_CONDUCT.html","id":"id_3-temporary-ban","dir":"","previous_headings":"Enforcement Guidelines","what":"3. Temporary Ban","title":"Contributor Covenant Code of Conduct","text":"Community Impact: serious violation community standards, including sustained inappropriate behavior. Consequence: temporary ban sort interaction public communication community specified period time. public private interaction people involved, including unsolicited interaction enforcing Code Conduct, allowed period. Violating terms may lead permanent ban.","code":""},{"path":"https://parsnip.tidymodels.org/dev/CODE_OF_CONDUCT.html","id":"id_4-permanent-ban","dir":"","previous_headings":"Enforcement Guidelines","what":"4. Permanent Ban","title":"Contributor Covenant Code of Conduct","text":"Community Impact: Demonstrating pattern violation community standards, including sustained inappropriate behavior, harassment individual, aggression toward disparagement classes individuals. Consequence: permanent ban sort public interaction within community.","code":""},{"path":"https://parsnip.tidymodels.org/dev/CODE_OF_CONDUCT.html","id":"attribution","dir":"","previous_headings":"","what":"Attribution","title":"Contributor Covenant Code of Conduct","text":"Code Conduct adapted Contributor Covenant, version 2.1, available https://www.contributor-covenant.org/version/2/1/code_of_conduct.html. Community Impact Guidelines inspired [Mozilla’s code conduct enforcement ladder][https://github.com/mozilla/inclusion]. answers common questions code conduct, see FAQ https://www.contributor-covenant.org/faq. Translations available https://www.contributor-covenant.org/translations.","code":""},{"path":"https://parsnip.tidymodels.org/dev/CONTRIBUTING.html","id":null,"dir":"","previous_headings":"","what":"Contributing to tidymodels","title":"Contributing to tidymodels","text":"detailed information contributing tidymodels packages, see development contributing guide.","code":""},{"path":"https://parsnip.tidymodels.org/dev/CONTRIBUTING.html","id":"documentation","dir":"","previous_headings":"","what":"Documentation","title":"Contributing to tidymodels","text":"Typos grammatical errors documentation may edited directly using GitHub web interface, long changes made source file. YES ✅: edit roxygen comment .R file R/ directory. 🚫: edit .Rd file man/ directory. use roxygen2, Markdown syntax, documentation.","code":""},{"path":"https://parsnip.tidymodels.org/dev/CONTRIBUTING.html","id":"code","dir":"","previous_headings":"","what":"Code","title":"Contributing to tidymodels","text":"submit 🎯 pull request tidymodels package, always file issue confirm tidymodels team agrees idea happy basic proposal. tidymodels packages work together. package contains unit tests, integration tests tests using packages contained extratests. pull requests, recommend create fork repo usethis::create_from_github(), initiate new branch usethis::pr_init(). Look build status making changes. README contains badges continuous integration services used package. New code follow tidyverse style guide. can use styler package apply styles, please don’t restyle code nothing PR. user-facing changes, add bullet top NEWS.md current development version header describing changes made followed GitHub username, links relevant issue(s)/PR(s). use testthat. Contributions test cases included easier accept. contribution spans use one package, consider building extratests changes check breakages /adding new tests . Let us know PR ran extra tests.","code":""},{"path":"https://parsnip.tidymodels.org/dev/CONTRIBUTING.html","id":"code-of-conduct","dir":"","previous_headings":"Code","what":"Code of Conduct","title":"Contributing to tidymodels","text":"project released Contributor Code Conduct. contributing project, agree abide terms.","code":""},{"path":"https://parsnip.tidymodels.org/dev/LICENSE.html","id":null,"dir":"","previous_headings":"","what":"MIT License","title":"MIT License","text":"Copyright (c) 2021 parsnip authors Permission hereby granted, free charge, person obtaining copy software associated documentation files (“Software”), deal Software without restriction, including without limitation rights use, copy, modify, merge, publish, distribute, sublicense, /sell copies Software, permit persons Software furnished , subject following conditions: copyright notice permission notice shall included copies substantial portions Software. SOFTWARE PROVIDED “”, WITHOUT WARRANTY KIND, EXPRESS IMPLIED, INCLUDING LIMITED WARRANTIES MERCHANTABILITY, FITNESS PARTICULAR PURPOSE NONINFRINGEMENT. EVENT SHALL AUTHORS COPYRIGHT HOLDERS LIABLE CLAIM, DAMAGES LIABILITY, WHETHER ACTION CONTRACT, TORT OTHERWISE, ARISING , CONNECTION SOFTWARE USE DEALINGS SOFTWARE.","code":""},{"path":"https://parsnip.tidymodels.org/dev/articles/Examples.html","id":"bart-models","dir":"Articles","previous_headings":"","what":"bart() models","title":"Fitting and predicting with parsnip","text":"’ll model ridership Chicago elevated trains function 14 day lagged ridership two stations. two predictors units (rides per day/1000) need normalized. last week data used training. last week predicted model fit. can define model specific parameters: Now create model fit object: holdout data can predicted: example data two predictors outcome two classes. predictors units. can define model specific parameters: Now create model fit object: holdout data can predicted hard class predictions probabilities. ’ll bind together one tibble:","code":"library(tidymodels) ## ── Attaching packages ─────────────────────────── tidymodels 1.1.1.9000 ── ## ✔ broom 1.0.5 ✔ rsample 1.2.0 ## ✔ dials 1.2.0 ✔ tibble 3.2.1 ## ✔ dplyr 1.1.4 ✔ tidyr 1.3.0 ## ✔ infer 1.0.5 ✔ tune 1.1.2 ## ✔ modeldata 1.3.0 ✔ workflows 1.1.3 ## ✔ parsnip 1.1.1.9007 ✔ workflowsets 1.0.1 ## ✔ purrr 1.0.2 ✔ yardstick 1.3.0 ## ✔ recipes 1.0.9 ## ── Conflicts ─────────────────────────────────── tidymodels_conflicts() ── ## ✖ purrr::discard() masks scales::discard() ## ✖ dplyr::filter() masks stats::filter() ## ✖ dplyr::lag() masks stats::lag() ## ✖ recipes::step() masks stats::step() ## • Learn how to get started at https://www.tidymodels.org/start/ tidymodels_prefer() data(Chicago) n <- nrow(Chicago) Chicago <- Chicago %>% select(ridership, Clark_Lake, Quincy_Wells) Chicago_train <- Chicago[1:(n - 7), ] Chicago_test <- Chicago[(n - 6):n, ] bt_reg_spec <- bart(trees = 15) %>% # This model can be used for classification or regression, so set mode set_mode(\"regression\") %>% set_engine(\"dbarts\") bt_reg_spec ## BART Model Specification (regression) ## ## Main Arguments: ## trees = 15 ## ## Computational engine: dbarts set.seed(1) bt_reg_fit <- bt_reg_spec %>% fit(ridership ~ ., data = Chicago_train) bt_reg_fit ## parsnip model object ## ## ## Call: ## `NULL`() predict(bt_reg_fit, Chicago_test) ## # A tibble: 7 × 1 ## .pred ## ## 1 20.1 ## 2 20.3 ## 3 21.3 ## 4 20.2 ## 5 19.4 ## 6 7.51 ## 7 6.44 library(tidymodels) tidymodels_prefer() data(two_class_dat) data_train <- two_class_dat[-(1:10), ] data_test <- two_class_dat[ 1:10 , ] bt_cls_spec <- bart(trees = 15) %>% # This model can be used for classification or regression, so set mode set_mode(\"classification\") %>% set_engine(\"dbarts\") bt_cls_spec ## ## Call: ## NULL set.seed(1) bt_cls_fit <- bt_cls_spec %>% fit(Class ~ ., data = data_train) bt_cls_fit ## parsnip model object ## ## ## Call: ## `NULL`() bind_cols( predict(bt_cls_fit, data_test), predict(bt_cls_fit, data_test, type = \"prob\") ) ## # A tibble: 10 × 3 ## .pred_class .pred_Class1 .pred_Class2 ## ## 1 Class2 0.352 0.648 ## 2 Class1 0.823 0.177 ## 3 Class1 0.497 0.503 ## 4 Class2 0.509 0.491 ## 5 Class2 0.434 0.566 ## 6 Class2 0.185 0.815 ## 7 Class1 0.663 0.337 ## 8 Class2 0.392 0.608 ## 9 Class1 0.967 0.033 ## 10 Class2 0.095 0.905"},{"path":"https://parsnip.tidymodels.org/dev/articles/Examples.html","id":"boost_tree-models","dir":"Articles","previous_headings":"","what":"boost_tree() models","title":"Fitting and predicting with parsnip","text":"’ll model ridership Chicago elevated trains function 14 day lagged ridership two stations. two predictors units (rides per day/1000) need normalized. last week data used training. last week predicted model fit. can define model specific parameters: Now create model fit object: holdout data can predicted: example data two predictors outcome two classes. predictors units. can define model specific parameters: Now create model fit object: holdout data can predicted hard class predictions probabilities. ’ll bind together one tibble: example data two predictors outcome two classes. predictors units. can define model specific parameters: Now create model fit object: holdout data can predicted hard class predictions probabilities. ’ll bind together one tibble:","code":"library(tidymodels) tidymodels_prefer() data(Chicago) n <- nrow(Chicago) Chicago <- Chicago %>% select(ridership, Clark_Lake, Quincy_Wells) Chicago_train <- Chicago[1:(n - 7), ] Chicago_test <- Chicago[(n - 6):n, ] bt_reg_spec <- boost_tree(trees = 15) %>% # This model can be used for classification or regression, so set mode set_mode(\"regression\") %>% set_engine(\"xgboost\") bt_reg_spec ## Boosted Tree Model Specification (regression) ## ## Main Arguments: ## trees = 15 ## ## Computational engine: xgboost set.seed(1) bt_reg_fit <- bt_reg_spec %>% fit(ridership ~ ., data = Chicago_train) bt_reg_fit ## parsnip model object ## ## ##### xgb.Booster ## raw: 51.4 Kb ## call: ## xgboost::xgb.train(params = list(eta = 0.3, max_depth = 6, gamma = 0, ## colsample_bytree = 1, colsample_bynode = 1, min_child_weight = 1, ## subsample = 1), data = x$data, nrounds = 15, watchlist = x$watchlist, ## verbose = 0, nthread = 1, objective = \"reg:squarederror\") ## params (as set within xgb.train): ## eta = \"0.3\", max_depth = \"6\", gamma = \"0\", colsample_bytree = \"1\", colsample_bynode = \"1\", min_child_weight = \"1\", subsample = \"1\", nthread = \"1\", objective = \"reg:squarederror\", validate_parameters = \"TRUE\" ## xgb.attributes: ## niter ## callbacks: ## cb.evaluation.log() ## # of features: 2 ## niter: 15 ## nfeatures : 2 ## evaluation_log: ## iter training_rmse ## 1 10.481475 ## 2 7.620929 ## --- ## 14 2.551943 ## 15 2.531085 predict(bt_reg_fit, Chicago_test) ## # A tibble: 7 × 1 ## .pred ## ## 1 20.6 ## 2 20.6 ## 3 20.2 ## 4 20.6 ## 5 19.3 ## 6 7.26 ## 7 5.92 library(tidymodels) tidymodels_prefer() data(two_class_dat) data_train <- two_class_dat[-(1:10), ] data_test <- two_class_dat[ 1:10 , ] bt_cls_spec <- boost_tree(trees = 15) %>% # This model can be used for classification or regression, so set mode set_mode(\"classification\") %>% set_engine(\"xgboost\") bt_cls_spec ## Boosted Tree Model Specification (classification) ## ## Main Arguments: ## trees = 15 ## ## Computational engine: xgboost set.seed(1) bt_cls_fit <- bt_cls_spec %>% fit(Class ~ ., data = data_train) bt_cls_fit ## parsnip model object ## ## ##### xgb.Booster ## raw: 40.8 Kb ## call: ## xgboost::xgb.train(params = list(eta = 0.3, max_depth = 6, gamma = 0, ## colsample_bytree = 1, colsample_bynode = 1, min_child_weight = 1, ## subsample = 1), data = x$data, nrounds = 15, watchlist = x$watchlist, ## verbose = 0, nthread = 1, objective = \"binary:logistic\") ## params (as set within xgb.train): ## eta = \"0.3\", max_depth = \"6\", gamma = \"0\", colsample_bytree = \"1\", colsample_bynode = \"1\", min_child_weight = \"1\", subsample = \"1\", nthread = \"1\", objective = \"binary:logistic\", validate_parameters = \"TRUE\" ## xgb.attributes: ## niter ## callbacks: ## cb.evaluation.log() ## # of features: 2 ## niter: 15 ## nfeatures : 2 ## evaluation_log: ## iter training_logloss ## 1 0.5524619 ## 2 0.4730697 ## --- ## 14 0.2523133 ## 15 0.2490712 bind_cols( predict(bt_cls_fit, data_test), predict(bt_cls_fit, data_test, type = \"prob\") ) ## # A tibble: 10 × 3 ## .pred_class .pred_Class1 .pred_Class2 ## ## 1 Class2 0.220 0.780 ## 2 Class1 0.931 0.0689 ## 3 Class1 0.638 0.362 ## 4 Class1 0.815 0.185 ## 5 Class2 0.292 0.708 ## 6 Class2 0.120 0.880 ## 7 Class1 0.796 0.204 ## 8 Class2 0.392 0.608 ## 9 Class1 0.879 0.121 ## 10 Class2 0.0389 0.961 library(tidymodels) tidymodels_prefer() data(two_class_dat) data_train <- two_class_dat[-(1:10), ] data_test <- two_class_dat[ 1:10 , ] bt_cls_spec <- boost_tree(trees = 15) %>% set_mode(\"classification\") %>% set_engine(\"C5.0\") bt_cls_spec ## Boosted Tree Model Specification (classification) ## ## Main Arguments: ## trees = 15 ## ## Computational engine: C5.0 set.seed(1) bt_cls_fit <- bt_cls_spec %>% fit(Class ~ ., data = data_train) bt_cls_fit ## parsnip model object ## ## ## Call: ## C5.0.default(x = x, y = y, trials = 15, control ## = C50::C5.0Control(minCases = 2, sample = 0)) ## ## Classification Tree ## Number of samples: 781 ## Number of predictors: 2 ## ## Number of boosting iterations: 15 requested; 6 used due to early stopping ## Average tree size: 3.2 ## ## Non-standard options: attempt to group attributes bind_cols( predict(bt_cls_fit, data_test), predict(bt_cls_fit, data_test, type = \"prob\") ) ## # A tibble: 10 × 3 ## .pred_class .pred_Class1 .pred_Class2 ## ## 1 Class2 0.311 0.689 ## 2 Class1 0.863 0.137 ## 3 Class1 0.535 0.465 ## 4 Class2 0.336 0.664 ## 5 Class2 0.336 0.664 ## 6 Class2 0.137 0.863 ## 7 Class2 0.496 0.504 ## 8 Class2 0.311 0.689 ## 9 Class1 1 0 ## 10 Class2 0 1"},{"path":"https://parsnip.tidymodels.org/dev/articles/Examples.html","id":"decision_tree-models","dir":"Articles","previous_headings":"","what":"decision_tree() models","title":"Fitting and predicting with parsnip","text":"’ll model ridership Chicago elevated trains function 14 day lagged ridership two stations. two predictors units (rides per day/1000) need normalized. last week data used training. last week predicted model fit. can define model specific parameters: Now create model fit object: holdout data can predicted: example data two predictors outcome two classes. predictors units. can define model specific parameters: Now create model fit object: holdout data can predicted hard class predictions probabilities. ’ll bind together one tibble: example data two predictors outcome two classes. predictors units. can define model specific parameters: Now create model fit object: holdout data can predicted hard class predictions probabilities. ’ll bind together one tibble:","code":"library(tidymodels) tidymodels_prefer() data(Chicago) n <- nrow(Chicago) Chicago <- Chicago %>% select(ridership, Clark_Lake, Quincy_Wells) Chicago_train <- Chicago[1:(n - 7), ] Chicago_test <- Chicago[(n - 6):n, ] dt_reg_spec <- decision_tree(tree_depth = 30) %>% # This model can be used for classification or regression, so set mode set_mode(\"regression\") %>% set_engine(\"rpart\") dt_reg_spec ## Decision Tree Model Specification (regression) ## ## Main Arguments: ## tree_depth = 30 ## ## Computational engine: rpart set.seed(1) dt_reg_fit <- dt_reg_spec %>% fit(ridership ~ ., data = Chicago_train) dt_reg_fit ## parsnip model object ## ## n= 5691 ## ## node), split, n, deviance, yval ## * denotes terminal node ## ## 1) root 5691 244958.800 13.615560 ## 2) Quincy_Wells< 2.737 1721 22973.630 5.194394 ## 4) Clark_Lake< 5.07 1116 13166.830 4.260215 * ## 5) Clark_Lake>=5.07 605 7036.349 6.917607 * ## 3) Quincy_Wells>=2.737 3970 47031.540 17.266140 ## 6) Clark_Lake< 17.6965 1940 16042.090 15.418210 * ## 7) Clark_Lake>=17.6965 2030 18033.560 19.032140 * predict(dt_reg_fit, Chicago_test) ## # A tibble: 7 × 1 ## .pred ## ## 1 19.0 ## 2 19.0 ## 3 19.0 ## 4 19.0 ## 5 19.0 ## 6 6.92 ## 7 6.92 library(tidymodels) tidymodels_prefer() data(two_class_dat) data_train <- two_class_dat[-(1:10), ] data_test <- two_class_dat[ 1:10 , ] dt_cls_spec <- decision_tree(tree_depth = 30) %>% # This model can be used for classification or regression, so set mode set_mode(\"classification\") %>% set_engine(\"rpart\") dt_cls_spec ## Decision Tree Model Specification (classification) ## ## Main Arguments: ## tree_depth = 30 ## ## Computational engine: rpart set.seed(1) dt_cls_fit <- dt_cls_spec %>% fit(Class ~ ., data = data_train) dt_cls_fit ## parsnip model object ## ## n= 781 ## ## node), split, n, loss, yval, (yprob) ## * denotes terminal node ## ## 1) root 781 348 Class1 (0.5544174 0.4455826) ## 2) B< 1.495535 400 61 Class1 (0.8475000 0.1525000) * ## 3) B>=1.495535 381 94 Class2 (0.2467192 0.7532808) ## 6) B< 2.079458 191 70 Class2 (0.3664921 0.6335079) ## 12) A>=2.572663 48 13 Class1 (0.7291667 0.2708333) * ## 13) A< 2.572663 143 35 Class2 (0.2447552 0.7552448) * ## 7) B>=2.079458 190 24 Class2 (0.1263158 0.8736842) * bind_cols( predict(dt_cls_fit, data_test), predict(dt_cls_fit, data_test, type = \"prob\") ) ## # A tibble: 10 × 3 ## .pred_class .pred_Class1 .pred_Class2 ## ## 1 Class2 0.245 0.755 ## 2 Class1 0.848 0.152 ## 3 Class1 0.848 0.152 ## 4 Class1 0.729 0.271 ## 5 Class1 0.729 0.271 ## 6 Class2 0.126 0.874 ## 7 Class2 0.245 0.755 ## 8 Class2 0.245 0.755 ## 9 Class1 0.848 0.152 ## 10 Class2 0.126 0.874 library(tidymodels) tidymodels_prefer() data(two_class_dat) data_train <- two_class_dat[-(1:10), ] data_test <- two_class_dat[ 1:10 , ] dt_cls_spec <- decision_tree(min_n = 2) %>% set_mode(\"classification\") %>% set_engine(\"C5.0\") dt_cls_spec ## Decision Tree Model Specification (classification) ## ## Main Arguments: ## min_n = 2 ## ## Computational engine: C5.0 set.seed(1) dt_cls_fit <- dt_cls_spec %>% fit(Class ~ ., data = data_train) dt_cls_fit ## parsnip model object ## ## ## Call: ## C5.0.default(x = x, y = y, trials = 1, control ## = C50::C5.0Control(minCases = 2, sample = 0)) ## ## Classification Tree ## Number of samples: 781 ## Number of predictors: 2 ## ## Tree size: 4 ## ## Non-standard options: attempt to group attributes bind_cols( predict(dt_cls_fit, data_test), predict(dt_cls_fit, data_test, type = \"prob\") ) ## # A tibble: 10 × 3 ## .pred_class .pred_Class1 .pred_Class2 ## ## 1 Class2 0.233 0.767 ## 2 Class1 0.847 0.153 ## 3 Class1 0.847 0.153 ## 4 Class1 0.727 0.273 ## 5 Class1 0.727 0.273 ## 6 Class2 0.118 0.882 ## 7 Class2 0.233 0.767 ## 8 Class2 0.233 0.767 ## 9 Class1 0.847 0.153 ## 10 Class2 0.118 0.882"},{"path":"https://parsnip.tidymodels.org/dev/articles/Examples.html","id":"gen_additive_mod-models","dir":"Articles","previous_headings":"","what":"gen_additive_mod() models","title":"Fitting and predicting with parsnip","text":"’ll model ridership Chicago elevated trains function 14 day lagged ridership two stations. two predictors units (rides per day/1000) need normalized. last week data used training. last week predicted model fit. can define model specific parameters: Now create model fit object: holdout data can predicted: example data two predictors outcome two classes. predictors units. can define model specific parameters: Now create model fit object: holdout data can predicted hard class predictions probabilities. ’ll bind together one tibble:","code":"library(tidymodels) tidymodels_prefer() data(Chicago) n <- nrow(Chicago) Chicago <- Chicago %>% select(ridership, Clark_Lake, Quincy_Wells) Chicago_train <- Chicago[1:(n - 7), ] Chicago_test <- Chicago[(n - 6):n, ] gam_reg_spec <- gen_additive_mod(select_features = FALSE, adjust_deg_free = 10) %>% # This model can be used for classification or regression, so set mode set_mode(\"regression\") %>% set_engine(\"mgcv\") gam_reg_spec ## GAM Model Specification (regression) ## ## Main Arguments: ## select_features = FALSE ## adjust_deg_free = 10 ## ## Computational engine: mgcv set.seed(1) gam_reg_fit <- gam_reg_spec %>% fit(ridership ~ Clark_Lake + Quincy_Wells, data = Chicago_train) gam_reg_fit ## parsnip model object ## ## ## Family: gaussian ## Link function: identity ## ## Formula: ## ridership ~ Clark_Lake + Quincy_Wells ## Total model degrees of freedom 3 ## ## GCV score: 9.505245 predict(gam_reg_fit, Chicago_test) ## # A tibble: 7 × 1 ## .pred ## ## 1 20.3 ## 2 20.5 ## 3 20.8 ## 4 20.5 ## 5 18.8 ## 6 7.45 ## 7 7.02 library(tidymodels) tidymodels_prefer() data(two_class_dat) data_train <- two_class_dat[-(1:10), ] data_test <- two_class_dat[ 1:10 , ] gam_cls_spec <- gen_additive_mod(select_features = FALSE, adjust_deg_free = 10) %>% # This model can be used for classification or regression, so set mode set_mode(\"classification\") %>% set_engine(\"mgcv\") gam_cls_spec ## GAM Model Specification (classification) ## ## Main Arguments: ## select_features = FALSE ## adjust_deg_free = 10 ## ## Computational engine: mgcv set.seed(1) gam_cls_fit <- gam_cls_spec %>% fit(Class ~ A + B, data = data_train) gam_cls_fit ## parsnip model object ## ## ## Family: binomial ## Link function: logit ## ## Formula: ## Class ~ A + B ## Total model degrees of freedom 3 ## ## UBRE score: -0.07548008 bind_cols( predict(gam_cls_fit, data_test), predict(gam_cls_fit, data_test, type = \"prob\") ) ## # A tibble: 10 × 3 ## .pred_class .pred_Class1 .pred_Class2 ## ## 1 Class1 0.518 0.482 ## 2 Class1 0.909 0.0913 ## 3 Class1 0.648 0.352 ## 4 Class1 0.610 0.390 ## 5 Class2 0.443 0.557 ## 6 Class2 0.206 0.794 ## 7 Class1 0.708 0.292 ## 8 Class1 0.567 0.433 ## 9 Class1 0.994 0.00582 ## 10 Class2 0.108 0.892"},{"path":"https://parsnip.tidymodels.org/dev/articles/Examples.html","id":"linear_reg-models","dir":"Articles","previous_headings":"","what":"linear_reg() models","title":"Fitting and predicting with parsnip","text":"’ll model ridership Chicago elevated trains function 14 day lagged ridership two stations. two predictors units (rides per day/1000) need normalized. last week data used training. last week predicted model fit. can define model specific parameters: Now create model fit object: holdout data can predicted: ’ll model ridership Chicago elevated trains function 14 day lagged ridership two stations. two predictors units (rides per day/1000) need normalized. last week data used training. last week predicted model fit. can define model specific parameters: Now create model fit object: holdout data can predicted: ’ll model ridership Chicago elevated trains function 14 day lagged ridership two stations. two predictors units (rides per day/1000) need normalized. last week data used training. last week predicted model fit. can define model specific parameters: Now create model fit object: holdout data can predicted: ’ll model ridership Chicago elevated trains function 14 day lagged ridership two stations. two predictors units (rides per day/1000) need normalized. last week data used training. last week predicted model fit. can define model specific parameters: Now create model fit object: holdout data can predicted: ’ll model ridership Chicago elevated trains function 14 day lagged ridership two stations. two predictors units (rides per day/1000) need normalized. last week data used training. last week predicted model fit. can define model specific parameters: Now create model fit object: holdout data can predicted:","code":"library(tidymodels) tidymodels_prefer() data(Chicago) n <- nrow(Chicago) Chicago <- Chicago %>% select(ridership, Clark_Lake, Quincy_Wells) Chicago_train <- Chicago[1:(n - 7), ] Chicago_test <- Chicago[(n - 6):n, ] linreg_reg_spec <- linear_reg() %>% set_engine(\"lm\") linreg_reg_spec ## Linear Regression Model Specification (regression) ## ## Computational engine: lm set.seed(1) linreg_reg_fit <- linreg_reg_spec %>% fit(ridership ~ ., data = Chicago_train) linreg_reg_fit ## parsnip model object ## ## ## Call: ## stats::lm(formula = ridership ~ ., data = data) ## ## Coefficients: ## (Intercept) Clark_Lake Quincy_Wells ## 1.6624 0.7738 0.2557 predict(linreg_reg_fit, Chicago_test) ## # A tibble: 7 × 1 ## .pred ## ## 1 20.3 ## 2 20.5 ## 3 20.8 ## 4 20.5 ## 5 18.8 ## 6 7.45 ## 7 7.02 library(tidymodels) tidymodels_prefer() data(Chicago) n <- nrow(Chicago) Chicago <- Chicago %>% select(ridership, Clark_Lake, Quincy_Wells) Chicago_train <- Chicago[1:(n - 7), ] Chicago_test <- Chicago[(n - 6):n, ] linreg_reg_spec <- linear_reg() %>% set_engine(\"glm\") linreg_reg_spec ## Linear Regression Model Specification (regression) ## ## Computational engine: glm set.seed(1) linreg_reg_fit <- linreg_reg_spec %>% fit(ridership ~ ., data = Chicago_train) linreg_reg_fit ## parsnip model object ## ## ## Call: stats::glm(formula = ridership ~ ., family = stats::gaussian, ## data = data) ## ## Coefficients: ## (Intercept) Clark_Lake Quincy_Wells ## 1.6624 0.7738 0.2557 ## ## Degrees of Freedom: 5690 Total (i.e. Null); 5688 Residual ## Null Deviance: 245000 ## Residual Deviance: 53530 AIC: 28910 predict(linreg_reg_fit, Chicago_test) ## # A tibble: 7 × 1 ## .pred ## ## 1 20.3 ## 2 20.5 ## 3 20.8 ## 4 20.5 ## 5 18.8 ## 6 7.45 ## 7 7.02 library(tidymodels) tidymodels_prefer() data(Chicago) n <- nrow(Chicago) Chicago <- Chicago %>% select(ridership, Clark_Lake, Quincy_Wells) Chicago_train <- Chicago[1:(n - 7), ] Chicago_test <- Chicago[(n - 6):n, ] linreg_reg_spec <- linear_reg(penalty = 0.1) %>% set_engine(\"glmnet\") linreg_reg_spec ## Linear Regression Model Specification (regression) ## ## Main Arguments: ## penalty = 0.1 ## ## Computational engine: glmnet set.seed(1) linreg_reg_fit <- linreg_reg_spec %>% fit(ridership ~ ., data = Chicago_train) linreg_reg_fit ## parsnip model object ## ## ## Call: glmnet::glmnet(x = maybe_matrix(x), y = y, family = \"gaussian\") ## ## Df %Dev Lambda ## 1 0 0.00 5.7970 ## 2 1 13.25 5.2820 ## 3 1 24.26 4.8130 ## 4 1 33.40 4.3850 ## 5 1 40.98 3.9960 ## 6 1 47.28 3.6410 ## 7 1 52.51 3.3170 ## 8 1 56.85 3.0220 ## 9 1 60.45 2.7540 ## 10 1 63.44 2.5090 ## 11 1 65.92 2.2860 ## 12 1 67.99 2.0830 ## 13 1 69.70 1.8980 ## 14 1 71.12 1.7300 ## 15 1 72.30 1.5760 ## 16 2 73.29 1.4360 ## 17 2 74.11 1.3080 ## 18 2 74.80 1.1920 ## 19 2 75.37 1.0860 ## 20 2 75.84 0.9897 ## 21 2 76.23 0.9018 ## 22 2 76.56 0.8217 ## 23 2 76.83 0.7487 ## 24 2 77.05 0.6822 ## 25 2 77.24 0.6216 ## 26 2 77.39 0.5664 ## 27 2 77.52 0.5160 ## 28 2 77.63 0.4702 ## 29 2 77.72 0.4284 ## 30 2 77.79 0.3904 ## 31 2 77.85 0.3557 ## 32 2 77.90 0.3241 ## 33 2 77.94 0.2953 ## 34 2 77.98 0.2691 ## 35 2 78.01 0.2452 ## 36 2 78.03 0.2234 ## 37 2 78.05 0.2035 ## 38 2 78.07 0.1855 ## 39 2 78.08 0.1690 ## 40 2 78.09 0.1540 ## 41 2 78.10 0.1403 ## 42 2 78.11 0.1278 ## 43 2 78.12 0.1165 ## 44 2 78.12 0.1061 ## 45 2 78.13 0.0967 ## 46 2 78.13 0.0881 ## 47 2 78.13 0.0803 ## 48 2 78.14 0.0732 ## 49 2 78.14 0.0666 ## 50 2 78.14 0.0607 ## 51 2 78.14 0.0553 ## 52 2 78.14 0.0504 ## 53 2 78.14 0.0459 ## 54 2 78.15 0.0419 ## 55 2 78.15 0.0381 predict(linreg_reg_fit, Chicago_test) ## # A tibble: 7 × 1 ## .pred ## ## 1 20.2 ## 2 20.4 ## 3 20.7 ## 4 20.4 ## 5 18.7 ## 6 7.57 ## 7 7.15 library(tidymodels) tidymodels_prefer() data(Chicago) n <- nrow(Chicago) Chicago <- Chicago %>% select(ridership, Clark_Lake, Quincy_Wells) Chicago_train <- Chicago[1:(n - 7), ] Chicago_test <- Chicago[(n - 6):n, ] linreg_reg_spec <- linear_reg(penalty = 0.1) %>% set_engine(\"keras\") linreg_reg_spec ## Linear Regression Model Specification (regression) ## ## Main Arguments: ## penalty = 0.1 ## ## Computational engine: keras set.seed(1) linreg_reg_fit <- linreg_reg_spec %>% fit(ridership ~ ., data = Chicago_train) ## Epoch 1/20 ## 1/178 [..............................] - ETA: 37s - loss: 4.0178 72/178 [===========>..................] - ETA: 0s - loss: 9.2526 141/178 [======================>.......] - ETA: 0s - loss: 9.9516 178/178 [==============================] - 0s 734us/step - loss: 10.0162 ## Epoch 2/20 ## 1/178 [..............................] - ETA: 0s - loss: 8.8066 68/178 [==========>...................] - ETA: 0s - loss: 9.5041 137/178 [======================>.......] - ETA: 0s - loss: 9.4180 178/178 [==============================] - 0s 832us/step - loss: 9.9176 ## Epoch 3/20 ## 1/178 [..............................] - ETA: 0s - loss: 13.9874 70/178 [==========>...................] - ETA: 0s - loss: 9.7820 139/178 [======================>.......] - ETA: 0s - loss: 10.1092 178/178 [==============================] - 0s 751us/step - loss: 9.8406 ## Epoch 4/20 ## 1/178 [..............................] - ETA: 0s - loss: 2.4863 70/178 [==========>...................] - ETA: 0s - loss: 9.8285 139/178 [======================>.......] - ETA: 0s - loss: 9.5838 178/178 [==============================] - 0s 737us/step - loss: 9.7912 ## Epoch 5/20 ## 1/178 [..............................] - ETA: 0s - loss: 3.7076 70/178 [==========>...................] - ETA: 0s - loss: 10.5940 139/178 [======================>.......] - ETA: 0s - loss: 9.9238 178/178 [==============================] - 0s 740us/step - loss: 9.7440 ## Epoch 6/20 ## 1/178 [..............................] - ETA: 0s - loss: 17.2106 70/178 [==========>...................] - ETA: 0s - loss: 9.4003 124/178 [===================>..........] - ETA: 0s - loss: 9.9788 178/178 [==============================] - 0s 803us/step - loss: 9.7044 ## Epoch 7/20 ## 1/178 [..............................] - ETA: 0s - loss: 8.6231 70/178 [==========>...................] - ETA: 0s - loss: 9.7931 139/178 [======================>.......] - ETA: 0s - loss: 9.8011 178/178 [==============================] - 0s 741us/step - loss: 9.6681 ## Epoch 8/20 ## 1/178 [..............................] - ETA: 0s - loss: 8.8271 70/178 [==========>...................] - ETA: 0s - loss: 9.4372 140/178 [======================>.......] - ETA: 0s - loss: 9.6402 178/178 [==============================] - 0s 733us/step - loss: 9.6429 ## Epoch 9/20 ## 1/178 [..............................] - ETA: 0s - loss: 9.8849 70/178 [==========>...................] - ETA: 0s - loss: 10.2799 140/178 [======================>.......] - ETA: 0s - loss: 9.7851 178/178 [==============================] - 0s 734us/step - loss: 9.6176 ## Epoch 10/20 ## 1/178 [..............................] - ETA: 0s - loss: 9.8390 70/178 [==========>...................] - ETA: 0s - loss: 9.2099 139/178 [======================>.......] - ETA: 0s - loss: 9.9742 178/178 [==============================] - 0s 739us/step - loss: 9.6048 ## Epoch 11/20 ## 1/178 [..............................] - ETA: 0s - loss: 12.1220 70/178 [==========>...................] - ETA: 0s - loss: 8.8306 137/178 [======================>.......] - ETA: 0s - loss: 9.4540 178/178 [==============================] - 0s 748us/step - loss: 9.5852 ## Epoch 12/20 ## 1/178 [..............................] - ETA: 0s - loss: 8.0364 71/178 [==========>...................] - ETA: 0s - loss: 9.9407 140/178 [======================>.......] - ETA: 0s - loss: 9.5596 178/178 [==============================] - 0s 734us/step - loss: 9.5785 ## Epoch 13/20 ## 1/178 [..............................] - ETA: 0s - loss: 18.1718 71/178 [==========>...................] - ETA: 0s - loss: 9.1830 140/178 [======================>.......] - ETA: 0s - loss: 9.3422 178/178 [==============================] - 0s 733us/step - loss: 9.5692 ## Epoch 14/20 ## 1/178 [..............................] - ETA: 0s - loss: 7.5960 70/178 [==========>...................] - ETA: 0s - loss: 9.4781 139/178 [======================>.......] - ETA: 0s - loss: 9.5747 178/178 [==============================] - 0s 735us/step - loss: 9.5480 ## Epoch 15/20 ## 1/178 [..............................] - ETA: 0s - loss: 1.3175 69/178 [==========>...................] - ETA: 0s - loss: 9.2704 139/178 [======================>.......] - ETA: 0s - loss: 9.5494 178/178 [==============================] - 0s 737us/step - loss: 9.5444 ## Epoch 16/20 ## 1/178 [..............................] - ETA: 0s - loss: 8.1835 71/178 [==========>...................] - ETA: 0s - loss: 10.3360 140/178 [======================>.......] - ETA: 0s - loss: 9.8695 178/178 [==============================] - 0s 730us/step - loss: 9.5391 ## Epoch 17/20 ## 1/178 [..............................] - ETA: 0s - loss: 6.5206 71/178 [==========>...................] - ETA: 0s - loss: 9.7326 140/178 [======================>.......] - ETA: 0s - loss: 9.3318 178/178 [==============================] - 0s 734us/step - loss: 9.5301 ## Epoch 18/20 ## 1/178 [..............................] - ETA: 0s - loss: 17.5520 70/178 [==========>...................] - ETA: 0s - loss: 9.0890 140/178 [======================>.......] - ETA: 0s - loss: 9.4896 178/178 [==============================] - 0s 734us/step - loss: 9.5319 ## Epoch 19/20 ## 1/178 [..............................] - ETA: 0s - loss: 6.6445 70/178 [==========>...................] - ETA: 0s - loss: 9.1370 137/178 [======================>.......] - ETA: 0s - loss: 9.4737 178/178 [==============================] - 0s 747us/step - loss: 9.5278 ## Epoch 20/20 ## 1/178 [..............................] - ETA: 0s - loss: 10.1576 70/178 [==========>...................] - ETA: 0s - loss: 10.9734 139/178 [======================>.......] - ETA: 0s - loss: 9.4202 178/178 [==============================] - 0s 738us/step - loss: 9.5204 linreg_reg_fit ## parsnip model object ## ## Model: \"sequential\" ## __________________________________________________________________________ ## Layer (type) Output Shape Param # ## ========================================================================== ## dense (Dense) (None, 1) 3 ## dense_1 (Dense) (None, 1) 2 ## ========================================================================== ## Total params: 5 ## Trainable params: 5 ## Non-trainable params: 0 ## __________________________________________________________________________ predict(linreg_reg_fit, Chicago_test) ## # A tibble: 7 × 1 ## .pred ## ## 1 20.4 ## 2 20.6 ## 3 20.9 ## 4 20.7 ## 5 19.0 ## 6 7.45 ## 7 7.09 library(tidymodels) tidymodels_prefer() data(Chicago) n <- nrow(Chicago) Chicago <- Chicago %>% select(ridership, Clark_Lake, Quincy_Wells) Chicago_train <- Chicago[1:(n - 7), ] Chicago_test <- Chicago[(n - 6):n, ] linreg_reg_spec <- linear_reg() %>% set_engine(\"stan\") linreg_reg_spec ## Linear Regression Model Specification (regression) ## ## Computational engine: stan set.seed(1) linreg_reg_fit <- linreg_reg_spec %>% fit(ridership ~ ., data = Chicago_train) linreg_reg_fit ## parsnip model object ## ## stan_glm ## family: gaussian [identity] ## formula: ridership ~ . ## observations: 5691 ## predictors: 3 ## ------ ## Median MAD_SD ## (Intercept) 1.7 0.1 ## Clark_Lake 0.8 0.0 ## Quincy_Wells 0.3 0.1 ## ## Auxiliary parameter(s): ## Median MAD_SD ## sigma 3.1 0.0 ## ## ------ ## * For help interpreting the printed output see ?print.stanreg ## * For info on the priors used see ?prior_summary.stanreg predict(linreg_reg_fit, Chicago_test) ## # A tibble: 7 × 1 ## .pred ## ## 1 20.3 ## 2 20.5 ## 3 20.8 ## 4 20.5 ## 5 18.8 ## 6 7.45 ## 7 7.02"},{"path":"https://parsnip.tidymodels.org/dev/articles/Examples.html","id":"logistic_reg-models","dir":"Articles","previous_headings":"","what":"logistic_reg() models","title":"Fitting and predicting with parsnip","text":"example data two predictors outcome two classes. predictors units. can define model specific parameters: Now create model fit object: holdout data can predicted hard class predictions probabilities. ’ll bind together one tibble: example data two predictors outcome two classes. predictors units. can define model specific parameters: Now create model fit object: holdout data can predicted hard class predictions probabilities. ’ll bind together one tibble: example data two predictors outcome two classes. predictors units. can define model specific parameters: Now create model fit object: holdout data can predicted hard class predictions probabilities. ’ll bind together one tibble: example data two predictors outcome two classes. predictors units. can define model specific parameters: Now create model fit object: holdout data can predicted hard class predictions probabilities. ’ll bind together one tibble: example data two predictors outcome two classes. predictors units. can define model specific parameters: Now create model fit object: holdout data can predicted hard class predictions probabilities. ’ll bind together one tibble:","code":"library(tidymodels) tidymodels_prefer() data(two_class_dat) data_train <- two_class_dat[-(1:10), ] data_test <- two_class_dat[ 1:10 , ] logreg_cls_spec <- logistic_reg() %>% set_engine(\"glm\") logreg_cls_spec ## Logistic Regression Model Specification (classification) ## ## Computational engine: glm set.seed(1) logreg_cls_fit <- logreg_cls_spec %>% fit(Class ~ ., data = data_train) logreg_cls_fit ## parsnip model object ## ## ## Call: stats::glm(formula = Class ~ ., family = stats::binomial, data = data) ## ## Coefficients: ## (Intercept) A B ## -3.755 -1.259 3.855 ## ## Degrees of Freedom: 780 Total (i.e. Null); 778 Residual ## Null Deviance: 1073 ## Residual Deviance: 662.1 AIC: 668.1 bind_cols( predict(logreg_cls_fit, data_test), predict(logreg_cls_fit, data_test, type = \"prob\") ) ## # A tibble: 10 × 3 ## .pred_class .pred_Class1 .pred_Class2 ## ## 1 Class1 0.518 0.482 ## 2 Class1 0.909 0.0913 ## 3 Class1 0.648 0.352 ## 4 Class1 0.610 0.390 ## 5 Class2 0.443 0.557 ## 6 Class2 0.206 0.794 ## 7 Class1 0.708 0.292 ## 8 Class1 0.567 0.433 ## 9 Class1 0.994 0.00582 ## 10 Class2 0.108 0.892 library(tidymodels) tidymodels_prefer() data(two_class_dat) data_train <- two_class_dat[-(1:10), ] data_test <- two_class_dat[ 1:10 , ] logreg_cls_spec <- logistic_reg(penalty = 0.1) %>% set_engine(\"glmnet\") logreg_cls_spec ## Logistic Regression Model Specification (classification) ## ## Main Arguments: ## penalty = 0.1 ## ## Computational engine: glmnet set.seed(1) logreg_cls_fit <- logreg_cls_spec %>% fit(Class ~ ., data = data_train) logreg_cls_fit ## parsnip model object ## ## ## Call: glmnet::glmnet(x = maybe_matrix(x), y = y, family = \"binomial\") ## ## Df %Dev Lambda ## 1 0 0.00 0.308500 ## 2 1 4.76 0.281100 ## 3 1 8.75 0.256100 ## 4 1 12.13 0.233300 ## 5 1 15.01 0.212600 ## 6 1 17.50 0.193700 ## 7 1 19.64 0.176500 ## 8 1 21.49 0.160800 ## 9 1 23.10 0.146500 ## 10 1 24.49 0.133500 ## 11 1 25.71 0.121700 ## 12 1 26.76 0.110900 ## 13 1 27.67 0.101000 ## 14 1 28.46 0.092030 ## 15 1 29.15 0.083860 ## 16 1 29.74 0.076410 ## 17 1 30.25 0.069620 ## 18 1 30.70 0.063430 ## 19 1 31.08 0.057800 ## 20 1 31.40 0.052660 ## 21 1 31.68 0.047990 ## 22 1 31.92 0.043720 ## 23 1 32.13 0.039840 ## 24 2 32.70 0.036300 ## 25 2 33.50 0.033070 ## 26 2 34.18 0.030140 ## 27 2 34.78 0.027460 ## 28 2 35.29 0.025020 ## 29 2 35.72 0.022800 ## 30 2 36.11 0.020770 ## 31 2 36.43 0.018930 ## 32 2 36.71 0.017250 ## 33 2 36.96 0.015710 ## 34 2 37.16 0.014320 ## 35 2 37.34 0.013050 ## 36 2 37.49 0.011890 ## 37 2 37.62 0.010830 ## 38 2 37.73 0.009868 ## 39 2 37.82 0.008992 ## 40 2 37.90 0.008193 ## 41 2 37.97 0.007465 ## 42 2 38.02 0.006802 ## 43 2 38.07 0.006198 ## 44 2 38.11 0.005647 ## 45 2 38.15 0.005145 ## 46 2 38.18 0.004688 ## 47 2 38.20 0.004272 ## 48 2 38.22 0.003892 ## 49 2 38.24 0.003547 ## 50 2 38.25 0.003231 ## 51 2 38.26 0.002944 ## 52 2 38.27 0.002683 ## 53 2 38.28 0.002444 ## 54 2 38.29 0.002227 ## 55 2 38.29 0.002029 ## 56 2 38.30 0.001849 ## 57 2 38.30 0.001685 ## 58 2 38.31 0.001535 ## 59 2 38.31 0.001399 ## 60 2 38.31 0.001275 ## 61 2 38.31 0.001161 ## 62 2 38.32 0.001058 ## 63 2 38.32 0.000964 ## 64 2 38.32 0.000879 ## 65 2 38.32 0.000800 bind_cols( predict(logreg_cls_fit, data_test), predict(logreg_cls_fit, data_test, type = \"prob\") ) ## # A tibble: 10 × 3 ## .pred_class .pred_Class1 .pred_Class2 ## ## 1 Class1 0.530 0.470 ## 2 Class1 0.713 0.287 ## 3 Class1 0.616 0.384 ## 4 Class2 0.416 0.584 ## 5 Class2 0.417 0.583 ## 6 Class2 0.288 0.712 ## 7 Class1 0.554 0.446 ## 8 Class1 0.557 0.443 ## 9 Class1 0.820 0.180 ## 10 Class2 0.206 0.794 library(tidymodels) tidymodels_prefer() data(two_class_dat) data_train <- two_class_dat[-(1:10), ] data_test <- two_class_dat[ 1:10 , ] logreg_cls_spec <- logistic_reg(penalty = 0.1) %>% set_engine(\"keras\") logreg_cls_spec ## Logistic Regression Model Specification (classification) ## ## Main Arguments: ## penalty = 0.1 ## ## Computational engine: keras set.seed(1) logreg_cls_fit <- logreg_cls_spec %>% fit(Class ~ ., data = data_train) ## Epoch 1/20 ## 1/25 [>.............................] - ETA: 7s - loss: 0.9612 25/25 [==============================] - 0s 839us/step - loss: 0.9000 ## Epoch 2/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.8651 25/25 [==============================] - 0s 824us/step - loss: 0.8891 ## Epoch 3/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.9680 25/25 [==============================] - 0s 821us/step - loss: 0.8785 ## Epoch 4/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.8629 25/25 [==============================] - 0s 819us/step - loss: 0.8680 ## Epoch 5/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.7740 25/25 [==============================] - 0s 843us/step - loss: 0.8579 ## Epoch 6/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.8865 25/25 [==============================] - 0s 845us/step - loss: 0.8479 ## Epoch 7/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.8074 25/25 [==============================] - 0s 848us/step - loss: 0.8383 ## Epoch 8/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.8382 25/25 [==============================] - 0s 852us/step - loss: 0.8288 ## Epoch 9/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.8655 25/25 [==============================] - 0s 844us/step - loss: 0.8195 ## Epoch 10/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.7916 25/25 [==============================] - 0s 843us/step - loss: 0.8105 ## Epoch 11/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.8338 25/25 [==============================] - 0s 840us/step - loss: 0.8017 ## Epoch 12/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.7643 25/25 [==============================] - 0s 846us/step - loss: 0.7931 ## Epoch 13/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.7616 25/25 [==============================] - 0s 844us/step - loss: 0.7845 ## Epoch 14/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.7430 25/25 [==============================] - 0s 846us/step - loss: 0.7762 ## Epoch 15/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.7676 25/25 [==============================] - 0s 850us/step - loss: 0.7684 ## Epoch 16/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.7518 25/25 [==============================] - 0s 851us/step - loss: 0.7603 ## Epoch 17/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.7961 25/25 [==============================] - 0s 835us/step - loss: 0.7524 ## Epoch 18/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.7796 25/25 [==============================] - 0s 838us/step - loss: 0.7448 ## Epoch 19/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.7352 25/25 [==============================] - 0s 842us/step - loss: 0.7373 ## Epoch 20/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.7264 25/25 [==============================] - 0s 851us/step - loss: 0.7300 logreg_cls_fit ## parsnip model object ## ## Model: \"sequential_1\" ## __________________________________________________________________________ ## Layer (type) Output Shape Param # ## ========================================================================== ## dense_2 (Dense) (None, 1) 3 ## dense_3 (Dense) (None, 2) 4 ## ========================================================================== ## Total params: 7 ## Trainable params: 7 ## Non-trainable params: 0 ## __________________________________________________________________________ bind_cols( predict(logreg_cls_fit, data_test), predict(logreg_cls_fit, data_test, type = \"prob\") ) ## # A tibble: 10 × 3 ## .pred_class .pred_Class1 .pred_Class2 ## ## 1 Class1 0.501 0.499 ## 2 Class1 0.834 0.166 ## 3 Class1 0.512 0.488 ## 4 Class1 0.829 0.171 ## 5 Class1 0.635 0.365 ## 6 Class1 0.531 0.469 ## 7 Class1 0.734 0.266 ## 8 Class1 0.516 0.484 ## 9 Class1 0.990 0.0103 ## 10 Class2 0.490 0.510 library(tidymodels) tidymodels_prefer() data(two_class_dat) data_train <- two_class_dat[-(1:10), ] data_test <- two_class_dat[ 1:10 , ] logreg_cls_spec <- logistic_reg(penalty = 0.1) %>% set_engine(\"LiblineaR\") logreg_cls_spec ## Logistic Regression Model Specification (classification) ## ## Main Arguments: ## penalty = 0.1 ## ## Computational engine: LiblineaR set.seed(1) logreg_cls_fit <- logreg_cls_spec %>% fit(Class ~ ., data = data_train) logreg_cls_fit ## parsnip model object ## ## $TypeDetail ## [1] \"L2-regularized logistic regression primal (L2R_LR)\" ## ## $Type ## [1] 0 ## ## $W ## A B Bias ## [1,] 1.219818 -3.759034 3.674861 ## ## $Bias ## [1] 1 ## ## $ClassNames ## [1] Class1 Class2 ## Levels: Class1 Class2 ## ## $NbClass ## [1] 2 ## ## attr(,\"class\") ## [1] \"LiblineaR\" bind_cols( predict(logreg_cls_fit, data_test), predict(logreg_cls_fit, data_test, type = \"prob\") ) ## # A tibble: 10 × 3 ## .pred_class .pred_Class1 .pred_Class2 ## ## 1 Class1 0.517 0.483 ## 2 Class1 0.904 0.0964 ## 3 Class1 0.645 0.355 ## 4 Class1 0.604 0.396 ## 5 Class2 0.442 0.558 ## 6 Class2 0.210 0.790 ## 7 Class1 0.702 0.298 ## 8 Class1 0.565 0.435 ## 9 Class1 0.993 0.00667 ## 10 Class2 0.112 0.888 library(tidymodels) tidymodels_prefer() data(two_class_dat) data_train <- two_class_dat[-(1:10), ] data_test <- two_class_dat[ 1:10 , ] logreg_cls_spec <- logistic_reg() %>% set_engine(\"stan\") logreg_cls_spec ## Logistic Regression Model Specification (classification) ## ## Computational engine: stan set.seed(1) logreg_cls_fit <- logreg_cls_spec %>% fit(Class ~ ., data = data_train) logreg_cls_fit ## parsnip model object ## ## stan_glm ## family: binomial [logit] ## formula: Class ~ . ## observations: 781 ## predictors: 3 ## ------ ## Median MAD_SD ## (Intercept) -3.8 0.3 ## A -1.3 0.2 ## B 3.9 0.3 ## ## ------ ## * For help interpreting the printed output see ?print.stanreg ## * For info on the priors used see ?prior_summary.stanreg bind_cols( predict(logreg_cls_fit, data_test), predict(logreg_cls_fit, data_test, type = \"prob\") ) ## # A tibble: 10 × 3 ## .pred_class .pred_Class1 .pred_Class2 ## ## 1 Class1 0.518 0.482 ## 2 Class1 0.909 0.0909 ## 3 Class1 0.650 0.350 ## 4 Class1 0.609 0.391 ## 5 Class2 0.443 0.557 ## 6 Class2 0.206 0.794 ## 7 Class1 0.708 0.292 ## 8 Class1 0.568 0.432 ## 9 Class1 0.994 0.00580 ## 10 Class2 0.108 0.892"},{"path":"https://parsnip.tidymodels.org/dev/articles/Examples.html","id":"mars-models","dir":"Articles","previous_headings":"","what":"mars() models","title":"Fitting and predicting with parsnip","text":"’ll model ridership Chicago elevated trains function 14 day lagged ridership two stations. two predictors units (rides per day/1000) need normalized. last week data used training. last week predicted model fit. can define model specific parameters: Now create model fit object: holdout data can predicted: example data two predictors outcome two classes. predictors units. can define model specific parameters: Now create model fit object: holdout data can predicted hard class predictions probabilities. ’ll bind together one tibble:","code":"library(tidymodels) tidymodels_prefer() data(Chicago) n <- nrow(Chicago) Chicago <- Chicago %>% select(ridership, Clark_Lake, Quincy_Wells) Chicago_train <- Chicago[1:(n - 7), ] Chicago_test <- Chicago[(n - 6):n, ] mars_reg_spec <- mars(prod_degree = 1, prune_method = \"backward\") %>% # This model can be used for classification or regression, so set mode set_mode(\"regression\") %>% set_engine(\"earth\") mars_reg_spec ## MARS Model Specification (regression) ## ## Main Arguments: ## prod_degree = 1 ## prune_method = backward ## ## Computational engine: earth set.seed(1) mars_reg_fit <- mars_reg_spec %>% fit(ridership ~ ., data = Chicago_train) ## ## Attaching package: 'plotrix' ## The following object is masked from 'package:scales': ## ## rescale mars_reg_fit ## parsnip model object ## ## Selected 5 of 6 terms, and 2 of 2 predictors ## Termination condition: RSq changed by less than 0.001 at 6 terms ## Importance: Clark_Lake, Quincy_Wells ## Number of terms at each degree of interaction: 1 4 (additive model) ## GCV 9.085818 RSS 51543.98 GRSq 0.7889881 RSq 0.789581 predict(mars_reg_fit, Chicago_test) ## # A tibble: 7 × 1 ## .pred ## ## 1 20.4 ## 2 20.7 ## 3 21.0 ## 4 20.7 ## 5 19.0 ## 6 7.99 ## 7 6.68 library(tidymodels) tidymodels_prefer() data(two_class_dat) data_train <- two_class_dat[-(1:10), ] data_test <- two_class_dat[ 1:10 , ] mars_cls_spec <- mars(prod_degree = 1, prune_method = \"backward\") %>% # This model can be used for classification or regression, so set mode set_mode(\"classification\") %>% set_engine(\"earth\") mars_cls_spec ## MARS Model Specification (classification) ## ## Main Arguments: ## prod_degree = 1 ## prune_method = backward ## ## Computational engine: earth set.seed(1) mars_cls_fit <- mars_cls_spec %>% fit(Class ~ ., data = data_train) mars_cls_fit ## parsnip model object ## ## GLM (family binomial, link logit): ## nulldev df dev df devratio AIC iters converged ## 1073.43 780 632.723 775 0.411 644.7 5 1 ## ## Earth selected 6 of 13 terms, and 2 of 2 predictors ## Termination condition: Reached nk 21 ## Importance: B, A ## Number of terms at each degree of interaction: 1 5 (additive model) ## Earth GCV 0.1334948 RSS 101.3432 GRSq 0.461003 RSq 0.4747349 bind_cols( predict(mars_cls_fit, data_test), predict(mars_cls_fit, data_test, type = \"prob\") ) ## # A tibble: 10 × 3 ## .pred_class .pred_Class1 .pred_Class2 ## ## 1 Class2 0.332 0.668 ## 2 Class1 0.845 0.155 ## 3 Class1 0.585 0.415 ## 4 Class1 0.690 0.310 ## 5 Class2 0.483 0.517 ## 6 Class2 0.318 0.682 ## 7 Class1 0.661 0.339 ## 8 Class2 0.398 0.602 ## 9 Class1 0.990 0.00972 ## 10 Class2 0.0625 0.938"},{"path":"https://parsnip.tidymodels.org/dev/articles/Examples.html","id":"mlp-models","dir":"Articles","previous_headings":"","what":"mlp() models","title":"Fitting and predicting with parsnip","text":"’ll model ridership Chicago elevated trains function 14 day lagged ridership two stations. two predictors units (rides per day/1000) need normalized. last week data used training. last week predicted model fit. can define model specific parameters: Now create model fit object: holdout data can predicted: example data two predictors outcome two classes. predictors units. can define model specific parameters: Now create model fit object: holdout data can predicted hard class predictions probabilities. ’ll bind together one tibble: ’ll model ridership Chicago elevated trains function 14 day lagged ridership two stations. two predictors units (rides per day/1000) need normalized. last week data used training. last week predicted model fit. can define model specific parameters: Now create model fit object: holdout data can predicted: example data two predictors outcome two classes. predictors units. can define model specific parameters: Now create model fit object: holdout data can predicted hard class predictions probabilities. ’ll bind together one tibble:","code":"library(tidymodels) tidymodels_prefer() data(Chicago) n <- nrow(Chicago) Chicago <- Chicago %>% select(ridership, Clark_Lake, Quincy_Wells) Chicago_train <- Chicago[1:(n - 7), ] Chicago_test <- Chicago[(n - 6):n, ] mlp_reg_spec <- mlp(penalty = 0, epochs = 100) %>% # This model can be used for classification or regression, so set mode set_mode(\"regression\") %>% set_engine(\"nnet\") mlp_reg_spec ## Single Layer Neural Network Model Specification (regression) ## ## Main Arguments: ## penalty = 0 ## epochs = 100 ## ## Computational engine: nnet set.seed(1) mlp_reg_fit <- mlp_reg_spec %>% fit(ridership ~ ., data = Chicago_train) mlp_reg_fit ## parsnip model object ## ## a 2-5-1 network with 21 weights ## inputs: Clark_Lake Quincy_Wells ## output(s): ridership ## options were - linear output units predict(mlp_reg_fit, Chicago_test) ## # A tibble: 7 × 1 ## .pred ## ## 1 20.5 ## 2 20.8 ## 3 21.1 ## 4 20.8 ## 5 18.8 ## 6 8.09 ## 7 6.22 library(tidymodels) tidymodels_prefer() data(two_class_dat) data_train <- two_class_dat[-(1:10), ] data_test <- two_class_dat[ 1:10 , ] mlp_cls_spec <- mlp(penalty = 0, epochs = 100) %>% # This model can be used for classification or regression, so set mode set_mode(\"classification\") %>% set_engine(\"nnet\") mlp_cls_spec ## Single Layer Neural Network Model Specification (classification) ## ## Main Arguments: ## penalty = 0 ## epochs = 100 ## ## Computational engine: nnet set.seed(1) mlp_cls_fit <- mlp_cls_spec %>% fit(Class ~ ., data = data_train) mlp_cls_fit ## parsnip model object ## ## a 2-5-1 network with 21 weights ## inputs: A B ## output(s): Class ## options were - entropy fitting bind_cols( predict(mlp_cls_fit, data_test), predict(mlp_cls_fit, data_test, type = \"prob\") ) ## # A tibble: 10 × 3 ## .pred_class .pred_Class1 .pred_Class2 ## ## 1 Class2 0.364 0.636 ## 2 Class1 0.691 0.309 ## 3 Class1 0.577 0.423 ## 4 Class1 0.686 0.314 ## 5 Class2 0.466 0.534 ## 6 Class2 0.339 0.661 ## 7 Class1 0.670 0.330 ## 8 Class2 0.384 0.616 ## 9 Class1 0.692 0.308 ## 10 Class2 0.330 0.670 library(tidymodels) tidymodels_prefer() data(Chicago) n <- nrow(Chicago) Chicago <- Chicago %>% select(ridership, Clark_Lake, Quincy_Wells) Chicago_train <- Chicago[1:(n - 7), ] Chicago_test <- Chicago[(n - 6):n, ] mlp_reg_spec <- mlp(penalty = 0, epochs = 20) %>% # This model can be used for classification or regression, so set mode set_mode(\"regression\") %>% set_engine(\"keras\") mlp_reg_spec ## Single Layer Neural Network Model Specification (regression) ## ## Main Arguments: ## penalty = 0 ## epochs = 20 ## ## Computational engine: keras set.seed(1) mlp_reg_fit <- mlp_reg_spec %>% fit(ridership ~ ., data = Chicago_train) ## Epoch 1/20 ## 1/178 [..............................] - ETA: 33s - loss: 230.9207 69/178 [==========>...................] - ETA: 0s - loss: 216.2306 136/178 [=====================>........] - ETA: 0s - loss: 213.2218 178/178 [==============================] - 0s 759us/step - loss: 209.6853 ## Epoch 2/20 ## 1/178 [..............................] - ETA: 0s - loss: 226.3140 68/178 [==========>...................] - ETA: 0s - loss: 191.2608 135/178 [=====================>........] - ETA: 0s - loss: 191.1377 178/178 [==============================] - 0s 765us/step - loss: 190.0487 ## Epoch 3/20 ## 1/178 [..............................] - ETA: 0s - loss: 167.0655 67/178 [==========>...................] - ETA: 0s - loss: 183.1242 132/178 [=====================>........] - ETA: 0s - loss: 182.3996 178/178 [==============================] - 0s 773us/step - loss: 180.1502 ## Epoch 4/20 ## 1/178 [..............................] - ETA: 0s - loss: 202.8846 68/178 [==========>...................] - ETA: 0s - loss: 173.8117 135/178 [=====================>........] - ETA: 0s - loss: 171.4261 178/178 [==============================] - 0s 763us/step - loss: 171.5603 ## Epoch 5/20 ## 1/178 [..............................] - ETA: 0s - loss: 187.1993 68/178 [==========>...................] - ETA: 0s - loss: 164.3662 133/178 [=====================>........] - ETA: 0s - loss: 163.5835 178/178 [==============================] - 0s 772us/step - loss: 163.5888 ## Epoch 6/20 ## 1/178 [..............................] - ETA: 0s - loss: 187.5711 68/178 [==========>...................] - ETA: 0s - loss: 156.8455 134/178 [=====================>........] - ETA: 0s - loss: 156.9154 178/178 [==============================] - 0s 766us/step - loss: 156.0869 ## Epoch 7/20 ## 1/178 [..............................] - ETA: 0s - loss: 141.4862 68/178 [==========>...................] - ETA: 0s - loss: 149.5145 134/178 [=====================>........] - ETA: 0s - loss: 149.3982 178/178 [==============================] - 0s 768us/step - loss: 148.9828 ## Epoch 8/20 ## 1/178 [..............................] - ETA: 0s - loss: 114.8170 67/178 [==========>...................] - ETA: 0s - loss: 140.4720 134/178 [=====================>........] - ETA: 0s - loss: 142.2585 178/178 [==============================] - 0s 769us/step - loss: 142.2271 ## Epoch 9/20 ## 1/178 [..............................] - ETA: 0s - loss: 132.7867 68/178 [==========>...................] - ETA: 0s - loss: 136.8206 135/178 [=====================>........] - ETA: 0s - loss: 137.1821 178/178 [==============================] - 0s 759us/step - loss: 135.8030 ## Epoch 10/20 ## 1/178 [..............................] - ETA: 0s - loss: 138.3583 68/178 [==========>...................] - ETA: 0s - loss: 133.8351 135/178 [=====================>........] - ETA: 0s - loss: 130.3492 178/178 [==============================] - 0s 764us/step - loss: 129.6855 ## Epoch 11/20 ## 1/178 [..............................] - ETA: 0s - loss: 131.2519 67/178 [==========>...................] - ETA: 0s - loss: 126.0124 134/178 [=====================>........] - ETA: 0s - loss: 125.5755 178/178 [==============================] - 0s 764us/step - loss: 123.8528 ## Epoch 12/20 ## 1/178 [..............................] - ETA: 0s - loss: 152.5505 68/178 [==========>...................] - ETA: 0s - loss: 122.7017 135/178 [=====================>........] - ETA: 0s - loss: 118.2214 178/178 [==============================] - 0s 763us/step - loss: 118.2895 ## Epoch 13/20 ## 1/178 [..............................] - ETA: 0s - loss: 120.8791 65/178 [=========>....................] - ETA: 0s - loss: 112.7203 132/178 [=====================>........] - ETA: 0s - loss: 113.8766 178/178 [==============================] - 0s 774us/step - loss: 112.9745 ## Epoch 14/20 ## 1/178 [..............................] - ETA: 0s - loss: 117.3056 68/178 [==========>...................] - ETA: 0s - loss: 108.3989 135/178 [=====================>........] - ETA: 0s - loss: 108.3585 178/178 [==============================] - 0s 761us/step - loss: 107.8919 ## Epoch 15/20 ## 1/178 [..............................] - ETA: 0s - loss: 99.4304 67/178 [==========>...................] - ETA: 0s - loss: 103.0506 134/178 [=====================>........] - ETA: 0s - loss: 103.0960 178/178 [==============================] - 0s 767us/step - loss: 103.0151 ## Epoch 16/20 ## 1/178 [..............................] - ETA: 0s - loss: 89.3936 67/178 [==========>...................] - ETA: 0s - loss: 99.6111 132/178 [=====================>........] - ETA: 0s - loss: 98.9638 178/178 [==============================] - 0s 786us/step - loss: 98.3087 ## Epoch 17/20 ## 1/178 [..............................] - ETA: 0s - loss: 111.9582 66/178 [==========>...................] - ETA: 0s - loss: 96.8132 133/178 [=====================>........] - ETA: 0s - loss: 94.5995 178/178 [==============================] - 0s 768us/step - loss: 93.7212 ## Epoch 18/20 ## 1/178 [..............................] - ETA: 0s - loss: 101.5892 68/178 [==========>...................] - ETA: 0s - loss: 90.6691 135/178 [=====================>........] - ETA: 0s - loss: 88.8466 178/178 [==============================] - 0s 761us/step - loss: 89.2178 ## Epoch 19/20 ## 1/178 [..............................] - ETA: 0s - loss: 94.9479 68/178 [==========>...................] - ETA: 0s - loss: 86.5822 135/178 [=====================>........] - ETA: 0s - loss: 85.5444 178/178 [==============================] - 0s 759us/step - loss: 84.8844 ## Epoch 20/20 ## 1/178 [..............................] - ETA: 0s - loss: 77.3532 68/178 [==========>...................] - ETA: 0s - loss: 83.4353 132/178 [=====================>........] - ETA: 0s - loss: 81.5463 178/178 [==============================] - 0s 780us/step - loss: 80.6915 mlp_reg_fit ## parsnip model object ## ## Model: \"sequential_2\" ## __________________________________________________________________________ ## Layer (type) Output Shape Param # ## ========================================================================== ## dense_4 (Dense) (None, 5) 15 ## dense_5 (Dense) (None, 1) 6 ## ========================================================================== ## Total params: 21 ## Trainable params: 21 ## Non-trainable params: 0 ## __________________________________________________________________________ predict(mlp_reg_fit, Chicago_test) ## # A tibble: 7 × 1 ## .pred ## ## 1 7.50 ## 2 7.50 ## 3 7.50 ## 4 7.50 ## 5 7.50 ## 6 6.90 ## 7 6.74 library(tidymodels) tidymodels_prefer() data(two_class_dat) data_train <- two_class_dat[-(1:10), ] data_test <- two_class_dat[ 1:10 , ] mlp_cls_spec <- mlp(penalty = 0, epochs = 20) %>% # This model can be used for classification or regression, so set mode set_mode(\"classification\") %>% set_engine(\"keras\") mlp_cls_spec ## Single Layer Neural Network Model Specification (classification) ## ## Main Arguments: ## penalty = 0 ## epochs = 20 ## ## Computational engine: keras set.seed(1) mlp_cls_fit <- mlp_cls_spec %>% fit(Class ~ ., data = data_train) ## Epoch 1/20 ## 1/25 [>.............................] - ETA: 5s - loss: 0.6948 25/25 [==============================] - 0s 855us/step - loss: 0.6990 ## Epoch 2/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.7013 25/25 [==============================] - 0s 835us/step - loss: 0.6929 ## Epoch 3/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.6962 25/25 [==============================] - 0s 824us/step - loss: 0.6872 ## Epoch 4/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.6825 25/25 [==============================] - 0s 849us/step - loss: 0.6818 ## Epoch 5/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.6802 25/25 [==============================] - 0s 858us/step - loss: 0.6770 ## Epoch 6/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.6805 25/25 [==============================] - 0s 859us/step - loss: 0.6723 ## Epoch 7/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.6707 25/25 [==============================] - 0s 885us/step - loss: 0.6679 ## Epoch 8/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.6636 25/25 [==============================] - 0s 985us/step - loss: 0.6636 ## Epoch 9/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.6627 25/25 [==============================] - 0s 884us/step - loss: 0.6594 ## Epoch 10/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.6575 25/25 [==============================] - 0s 858us/step - loss: 0.6553 ## Epoch 11/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.6481 25/25 [==============================] - 0s 860us/step - loss: 0.6510 ## Epoch 12/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.6327 25/25 [==============================] - 0s 860us/step - loss: 0.6470 ## Epoch 13/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.6626 25/25 [==============================] - 0s 866us/step - loss: 0.6429 ## Epoch 14/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.6334 25/25 [==============================] - 0s 859us/step - loss: 0.6387 ## Epoch 15/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.6530 25/25 [==============================] - 0s 868us/step - loss: 0.6345 ## Epoch 16/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.6354 25/25 [==============================] - 0s 870us/step - loss: 0.6300 ## Epoch 17/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.6479 25/25 [==============================] - 0s 858us/step - loss: 0.6257 ## Epoch 18/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.6021 25/25 [==============================] - 0s 869us/step - loss: 0.6213 ## Epoch 19/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.6017 25/25 [==============================] - 0s 869us/step - loss: 0.6167 ## Epoch 20/20 ## 1/25 [>.............................] - ETA: 0s - loss: 0.5676 25/25 [==============================] - 0s 867us/step - loss: 0.6122 mlp_cls_fit ## parsnip model object ## ## Model: \"sequential_3\" ## __________________________________________________________________________ ## Layer (type) Output Shape Param # ## ========================================================================== ## dense_6 (Dense) (None, 5) 15 ## dense_7 (Dense) (None, 2) 12 ## ========================================================================== ## Total params: 27 ## Trainable params: 27 ## Non-trainable params: 0 ## __________________________________________________________________________ bind_cols( predict(mlp_cls_fit, data_test), predict(mlp_cls_fit, data_test, type = \"prob\") ) ## # A tibble: 10 × 3 ## .pred_class .pred_Class1 .pred_Class2 ## ## 1 Class1 0.542 0.458 ## 2 Class1 0.667 0.333 ## 3 Class1 0.600 0.400 ## 4 Class2 0.481 0.519 ## 5 Class2 0.478 0.522 ## 6 Class2 0.413 0.587 ## 7 Class1 0.557 0.443 ## 8 Class1 0.559 0.441 ## 9 Class1 0.749 0.251 ## 10 Class2 0.376 0.624"},{"path":"https://parsnip.tidymodels.org/dev/articles/Examples.html","id":"multinom_reg-models","dir":"Articles","previous_headings":"","what":"multinom_reg() models","title":"Fitting and predicting with parsnip","text":"’ll predict island penguins observed two variables unit (mm): bill length bill depth. can define model specific parameters: Now create model fit object: holdout data can predicted hard class predictions probabilities. ’ll bind together one tibble: ’ll predict island penguins observed two variables unit (mm): bill length bill depth. can define model specific parameters: Now create model fit object: holdout data can predicted hard class predictions probabilities. ’ll bind together one tibble: ’ll predict island penguins observed two variables unit (mm): bill length bill depth. can define model specific parameters: Now create model fit object: holdout data can predicted hard class predictions probabilities. ’ll bind together one tibble:","code":"library(tidymodels) tidymodels_prefer() data(penguins) penguins <- penguins %>% select(island, starts_with(\"bill_\")) penguins_train <- penguins[-c(21, 153, 31, 277, 1), ] penguins_test <- penguins[ c(21, 153, 31, 277, 1), ] mr_cls_spec <- multinom_reg(penalty = 0.1) %>% set_engine(\"glmnet\") mr_cls_spec ## Multinomial Regression Model Specification (classification) ## ## Main Arguments: ## penalty = 0.1 ## ## Computational engine: glmnet set.seed(1) mr_cls_fit <- mr_cls_spec %>% fit(island ~ ., data = penguins_train) mr_cls_fit ## parsnip model object ## ## ## Call: glmnet::glmnet(x = maybe_matrix(x), y = y, family = \"multinomial\") ## ## Df %Dev Lambda ## 1 0 0.00 0.31730 ## 2 1 3.43 0.28910 ## 3 1 6.30 0.26340 ## 4 1 8.74 0.24000 ## 5 1 10.83 0.21870 ## 6 1 12.62 0.19930 ## 7 1 14.17 0.18160 ## 8 1 15.51 0.16540 ## 9 1 16.67 0.15070 ## 10 1 17.68 0.13740 ## 11 1 18.56 0.12520 ## 12 2 19.93 0.11400 ## 13 2 21.31 0.10390 ## 14 2 22.50 0.09467 ## 15 2 23.52 0.08626 ## 16 2 24.40 0.07860 ## 17 2 25.16 0.07162 ## 18 2 25.81 0.06526 ## 19 2 26.37 0.05946 ## 20 2 26.86 0.05418 ## 21 2 27.27 0.04936 ## 22 2 27.63 0.04498 ## 23 2 27.94 0.04098 ## 24 2 28.21 0.03734 ## 25 2 28.44 0.03402 ## 26 2 28.63 0.03100 ## 27 2 28.80 0.02825 ## 28 2 28.94 0.02574 ## 29 2 29.06 0.02345 ## 30 2 29.17 0.02137 ## 31 2 29.26 0.01947 ## 32 2 29.33 0.01774 ## 33 2 29.39 0.01616 ## 34 2 29.45 0.01473 ## 35 2 29.49 0.01342 ## 36 2 29.53 0.01223 ## 37 2 29.56 0.01114 ## 38 2 29.59 0.01015 ## 39 2 29.61 0.00925 ## 40 2 29.63 0.00843 ## 41 2 29.65 0.00768 ## 42 2 29.67 0.00700 ## 43 2 29.68 0.00638 ## 44 2 29.69 0.00581 ## 45 2 29.70 0.00529 ## 46 2 29.71 0.00482 ## 47 2 29.71 0.00439 ## 48 2 29.72 0.00400 ## 49 2 29.72 0.00365 ## 50 2 29.73 0.00332 ## 51 2 29.73 0.00303 ## 52 2 29.74 0.00276 ## 53 2 29.74 0.00251 ## 54 2 29.74 0.00229 ## 55 2 29.75 0.00209 ## 56 2 29.75 0.00190 ## 57 2 29.75 0.00173 ## 58 2 29.75 0.00158 ## 59 2 29.75 0.00144 ## 60 2 29.75 0.00131 bind_cols( predict(mr_cls_fit, penguins_test), predict(mr_cls_fit, penguins_test, type = \"prob\") ) ## # A tibble: 5 × 4 ## .pred_class .pred_Biscoe .pred_Dream .pred_Torgersen ## ## 1 Dream 0.339 0.448 0.214 ## 2 Biscoe 0.879 0.0882 0.0331 ## 3 Biscoe 0.539 0.317 0.144 ## 4 Dream 0.403 0.435 0.162 ## 5 Dream 0.297 0.481 0.221 library(tidymodels) tidymodels_prefer() data(penguins) penguins <- penguins %>% select(island, starts_with(\"bill_\")) penguins_train <- penguins[-c(21, 153, 31, 277, 1), ] penguins_test <- penguins[ c(21, 153, 31, 277, 1), ] mr_cls_spec <- multinom_reg(penalty = 0.1) %>% set_engine(\"keras\") mr_cls_spec ## Multinomial Regression Model Specification (classification) ## ## Main Arguments: ## penalty = 0.1 ## ## Computational engine: keras set.seed(1) mr_cls_fit <- mr_cls_spec %>% fit(island ~ ., data = penguins_train) ## Epoch 1/20 ## 1/11 [=>............................] - ETA: 2s - loss: 3.4629 11/11 [==============================] - 0s 943us/step - loss: 4.0391 ## Epoch 2/20 ## 1/11 [=>............................] - ETA: 0s - loss: 3.5155 11/11 [==============================] - 0s 892us/step - loss: 3.7981 ## Epoch 3/20 ## 1/11 [=>............................] - ETA: 0s - loss: 3.6371 11/11 [==============================] - 0s 880us/step - loss: 3.5654 ## Epoch 4/20 ## 1/11 [=>............................] - ETA: 0s - loss: 3.3243 11/11 [==============================] - 0s 897us/step - loss: 3.3467 ## Epoch 5/20 ## 1/11 [=>............................] - ETA: 0s - loss: 3.5013 11/11 [==============================] - 0s 886us/step - loss: 3.1436 ## Epoch 6/20 ## 1/11 [=>............................] - ETA: 0s - loss: 2.7714 11/11 [==============================] - 0s 908us/step - loss: 2.9455 ## Epoch 7/20 ## 1/11 [=>............................] - ETA: 0s - loss: 3.0290 11/11 [==============================] - 0s 902us/step - loss: 2.7662 ## Epoch 8/20 ## 1/11 [=>............................] - ETA: 0s - loss: 2.6760 11/11 [==============================] - 0s 881us/step - loss: 2.6074 ## Epoch 9/20 ## 1/11 [=>............................] - ETA: 0s - loss: 2.3547 11/11 [==============================] - 0s 885us/step - loss: 2.4582 ## Epoch 10/20 ## 1/11 [=>............................] - ETA: 0s - loss: 2.0500 11/11 [==============================] - 0s 896us/step - loss: 2.3305 ## Epoch 11/20 ## 1/11 [=>............................] - ETA: 0s - loss: 2.0776 11/11 [==============================] - 0s 924us/step - loss: 2.2132 ## Epoch 12/20 ## 1/11 [=>............................] - ETA: 0s - loss: 1.8689 11/11 [==============================] - 0s 918us/step - loss: 2.1143 ## Epoch 13/20 ## 1/11 [=>............................] - ETA: 0s - loss: 1.5654 11/11 [==============================] - 0s 936us/step - loss: 2.0278 ## Epoch 14/20 ## 1/11 [=>............................] - ETA: 0s - loss: 1.9938 11/11 [==============================] - 0s 912us/step - loss: 1.9540 ## Epoch 15/20 ## 1/11 [=>............................] - ETA: 0s - loss: 1.9354 11/11 [==============================] - 0s 909us/step - loss: 1.8917 ## Epoch 16/20 ## 1/11 [=>............................] - ETA: 0s - loss: 1.7670 11/11 [==============================] - 0s 920us/step - loss: 1.8379 ## Epoch 17/20 ## 1/11 [=>............................] - ETA: 0s - loss: 1.6599 11/11 [==============================] - 0s 933us/step - loss: 1.7922 ## Epoch 18/20 ## 1/11 [=>............................] - ETA: 0s - loss: 2.1965 11/11 [==============================] - 0s 914us/step - loss: 1.7527 ## Epoch 19/20 ## 1/11 [=>............................] - ETA: 0s - loss: 1.5248 11/11 [==============================] - 0s 933us/step - loss: 1.7162 ## Epoch 20/20 ## 1/11 [=>............................] - ETA: 0s - loss: 1.9486 11/11 [==============================] - 0s 927us/step - loss: 1.6877 mr_cls_fit ## parsnip model object ## ## Model: \"sequential_4\" ## __________________________________________________________________________ ## Layer (type) Output Shape Param # ## ========================================================================== ## dense_8 (Dense) (None, 1) 3 ## dense_9 (Dense) (None, 3) 6 ## ========================================================================== ## Total params: 9 ## Trainable params: 9 ## Non-trainable params: 0 ## __________________________________________________________________________ bind_cols( predict(mr_cls_fit, penguins_test), predict(mr_cls_fit, penguins_test, type = \"prob\") ) ## # A tibble: 5 × 4 ## .pred_class .pred_Biscoe .pred_Dream .pred_Torgersen ## ## 1 Torgersen 0.285 0.0171 0.698 ## 2 Dream 0.000113 1.00 0.00000204 ## 3 Dream 0.320 0.470 0.210 ## 4 Dream 0.0502 0.938 0.0115 ## 5 Torgersen 0.303 0.0230 0.674 library(tidymodels) tidymodels_prefer() data(penguins) penguins <- penguins %>% select(island, starts_with(\"bill_\")) penguins_train <- penguins[-c(21, 153, 31, 277, 1), ] penguins_test <- penguins[ c(21, 153, 31, 277, 1), ] mr_cls_spec <- multinom_reg(penalty = 0.1) %>% set_engine(\"nnet\") mr_cls_spec ## Multinomial Regression Model Specification (classification) ## ## Main Arguments: ## penalty = 0.1 ## ## Computational engine: nnet set.seed(1) mr_cls_fit <- mr_cls_spec %>% fit(island ~ ., data = penguins_train) mr_cls_fit ## parsnip model object ## ## Call: ## nnet::multinom(formula = island ~ ., data = data, decay = ~0.1, ## trace = FALSE) ## ## Coefficients: ## (Intercept) bill_length_mm bill_depth_mm ## Dream -8.243575 -0.0580960 0.6168318 ## Torgersen -1.610588 -0.2789588 0.6978480 ## ## Residual Deviance: 502.5009 ## AIC: 514.5009 bind_cols( predict(mr_cls_fit, penguins_test), predict(mr_cls_fit, penguins_test, type = \"prob\") ) ## # A tibble: 5 × 4 ## .pred_class .pred_Biscoe .pred_Dream .pred_Torgersen ## ## 1 Dream 0.193 0.450 0.357 ## 2 Biscoe 0.937 0.0582 0.00487 ## 3 Biscoe 0.462 0.364 0.174 ## 4 Dream 0.450 0.495 0.0556 ## 5 Dream 0.183 0.506 0.311"},{"path":"https://parsnip.tidymodels.org/dev/articles/Examples.html","id":"nearest_neighbor-models","dir":"Articles","previous_headings":"","what":"nearest_neighbor() models","title":"Fitting and predicting with parsnip","text":"’ll model ridership Chicago elevated trains function 14 day lagged ridership two stations. two predictors units (rides per day/1000) need normalized. last week data used training. last week predicted model fit. can define model specific parameters: Now create model fit object: holdout data can predicted: example data two predictors outcome two classes. predictors units. Since two classes, ’ll use odd number neighbors avoid ties: Now create model fit object: holdout data can predicted hard class predictions probabilities. ’ll bind together one tibble:","code":"library(tidymodels) tidymodels_prefer() data(Chicago) n <- nrow(Chicago) Chicago <- Chicago %>% select(ridership, Clark_Lake, Quincy_Wells) Chicago_train <- Chicago[1:(n - 7), ] Chicago_test <- Chicago[(n - 6):n, ] knn_reg_spec <- nearest_neighbor(neighbors = 5, weight_func = \"triangular\") %>% # This model can be used for classification or regression, so set mode set_mode(\"regression\") %>% set_engine(\"kknn\") knn_reg_spec ## K-Nearest Neighbor Model Specification (regression) ## ## Main Arguments: ## neighbors = 5 ## weight_func = triangular ## ## Computational engine: kknn knn_reg_fit <- knn_reg_spec %>% fit(ridership ~ ., data = Chicago_train) knn_reg_fit ## parsnip model object ## ## ## Call: ## kknn::train.kknn(formula = ridership ~ ., data = data, ks = min_rows(5, data, 5), kernel = ~\"triangular\") ## ## Type of response variable: continuous ## minimal mean absolute error: 1.79223 ## Minimal mean squared error: 11.21809 ## Best kernel: triangular ## Best k: 5 predict(knn_reg_fit, Chicago_test) ## # A tibble: 7 × 1 ## .pred ## ## 1 20.5 ## 2 21.1 ## 3 21.4 ## 4 21.8 ## 5 19.5 ## 6 7.83 ## 7 5.54 library(tidymodels) tidymodels_prefer() data(two_class_dat) data_train <- two_class_dat[-(1:10), ] data_test <- two_class_dat[ 1:10 , ] knn_cls_spec <- nearest_neighbor(neighbors = 11, weight_func = \"triangular\") %>% # This model can be used for classification or regression, so set mode set_mode(\"classification\") %>% set_engine(\"kknn\") knn_cls_spec ## K-Nearest Neighbor Model Specification (classification) ## ## Main Arguments: ## neighbors = 11 ## weight_func = triangular ## ## Computational engine: kknn knn_cls_fit <- knn_cls_spec %>% fit(Class ~ ., data = data_train) knn_cls_fit ## parsnip model object ## ## ## Call: ## kknn::train.kknn(formula = Class ~ ., data = data, ks = min_rows(11, data, 5), kernel = ~\"triangular\") ## ## Type of response variable: nominal ## Minimal misclassification: 0.1869398 ## Best kernel: triangular ## Best k: 11 bind_cols( predict(knn_cls_fit, data_test), predict(knn_cls_fit, data_test, type = \"prob\") ) ## # A tibble: 10 × 3 ## .pred_class .pred_Class1 .pred_Class2 ## ## 1 Class2 0.177 0.823 ## 2 Class1 0.995 0.00515 ## 3 Class1 0.590 0.410 ## 4 Class1 0.770 0.230 ## 5 Class2 0.333 0.667 ## 6 Class2 0.182 0.818 ## 7 Class1 0.692 0.308 ## 8 Class2 0.400 0.600 ## 9 Class1 0.814 0.186 ## 10 Class2 0.0273 0.973"},{"path":"https://parsnip.tidymodels.org/dev/articles/Examples.html","id":"rand_forest-models","dir":"Articles","previous_headings":"","what":"rand_forest() models","title":"Fitting and predicting with parsnip","text":"’ll model ridership Chicago elevated trains function 14 day lagged ridership two stations. two predictors units (rides per day/1000) need normalized. last week data used training. last week predicted model fit. can define model specific parameters: Now create model fit object: holdout data can predicted: example data two predictors outcome two classes. predictors units. can define model specific parameters: Now create model fit object: holdout data can predicted hard class predictions probabilities. ’ll bind together one tibble: ’ll model ridership Chicago elevated trains function 14 day lagged ridership two stations. two predictors units (rides per day/1000) need normalized. last week data used training. last week predicted model fit. can define model specific parameters: Now create model fit object: holdout data can predicted: example data two predictors outcome two classes. predictors units. can define model specific parameters: Now create model fit object: holdout data can predicted hard class predictions probabilities. ’ll bind together one tibble:","code":"library(tidymodels) tidymodels_prefer() data(Chicago) n <- nrow(Chicago) Chicago <- Chicago %>% select(ridership, Clark_Lake, Quincy_Wells) Chicago_train <- Chicago[1:(n - 7), ] Chicago_test <- Chicago[(n - 6):n, ] rf_reg_spec <- rand_forest(trees = 200, min_n = 5) %>% # This model can be used for classification or regression, so set mode set_mode(\"regression\") %>% set_engine(\"ranger\") rf_reg_spec ## Random Forest Model Specification (regression) ## ## Main Arguments: ## trees = 200 ## min_n = 5 ## ## Computational engine: ranger set.seed(1) rf_reg_fit <- rf_reg_spec %>% fit(ridership ~ ., data = Chicago_train) rf_reg_fit ## parsnip model object ## ## Ranger result ## ## Call: ## ranger::ranger(x = maybe_data_frame(x), y = y, num.trees = ~200, min.node.size = min_rows(~5, x), num.threads = 1, verbose = FALSE, seed = sample.int(10^5, 1)) ## ## Type: Regression ## Number of trees: 200 ## Sample size: 5691 ## Number of independent variables: 2 ## Mtry: 1 ## Target node size: 5 ## Variable importance mode: none ## Splitrule: variance ## OOB prediction error (MSE): 9.72953 ## R squared (OOB): 0.7739986 predict(rf_reg_fit, Chicago_test) ## # A tibble: 7 × 1 ## .pred ## ## 1 20.4 ## 2 21.5 ## 3 20.8 ## 4 21.6 ## 5 19.4 ## 6 7.32 ## 7 6.03 library(tidymodels) tidymodels_prefer() data(two_class_dat) data_train <- two_class_dat[-(1:10), ] data_test <- two_class_dat[ 1:10 , ] rf_cls_spec <- rand_forest(trees = 200, min_n = 5) %>% # This model can be used for classification or regression, so set mode set_mode(\"classification\") %>% set_engine(\"ranger\") rf_cls_spec ## Random Forest Model Specification (classification) ## ## Main Arguments: ## trees = 200 ## min_n = 5 ## ## Computational engine: ranger set.seed(1) rf_cls_fit <- rf_cls_spec %>% fit(Class ~ ., data = data_train) rf_cls_fit ## parsnip model object ## ## Ranger result ## ## Call: ## ranger::ranger(x = maybe_data_frame(x), y = y, num.trees = ~200, min.node.size = min_rows(~5, x), num.threads = 1, verbose = FALSE, seed = sample.int(10^5, 1), probability = TRUE) ## ## Type: Probability estimation ## Number of trees: 200 ## Sample size: 781 ## Number of independent variables: 2 ## Mtry: 1 ## Target node size: 5 ## Variable importance mode: none ## Splitrule: gini ## OOB prediction error (Brier s.): 0.1534794 bind_cols( predict(rf_cls_fit, data_test), predict(rf_cls_fit, data_test, type = \"prob\") ) ## # A tibble: 10 × 3 ## .pred_class .pred_Class1 .pred_Class2 ## ## 1 Class2 0.274 0.725 ## 2 Class1 0.928 0.0716 ## 3 Class2 0.497 0.503 ## 4 Class1 0.703 0.297 ## 5 Class2 0.302 0.698 ## 6 Class2 0.151 0.849 ## 7 Class1 0.701 0.299 ## 8 Class1 0.592 0.409 ## 9 Class1 0.752 0.248 ## 10 Class2 0.00225 0.998 library(tidymodels) tidymodels_prefer() data(Chicago) n <- nrow(Chicago) Chicago <- Chicago %>% select(ridership, Clark_Lake, Quincy_Wells) Chicago_train <- Chicago[1:(n - 7), ] Chicago_test <- Chicago[(n - 6):n, ] rf_reg_spec <- rand_forest(trees = 200, min_n = 5) %>% # This model can be used for classification or regression, so set mode set_mode(\"regression\") %>% set_engine(\"randomForest\") rf_reg_spec ## Random Forest Model Specification (regression) ## ## Main Arguments: ## trees = 200 ## min_n = 5 ## ## Computational engine: randomForest set.seed(1) rf_reg_fit <- rf_reg_spec %>% fit(ridership ~ ., data = Chicago_train) rf_reg_fit ## parsnip model object ## ## ## Call: ## randomForest(x = maybe_data_frame(x), y = y, ntree = ~200, nodesize = min_rows(~5, x)) ## Type of random forest: regression ## Number of trees: 200 ## No. of variables tried at each split: 1 ## ## Mean of squared residuals: 9.696736 ## % Var explained: 77.47 predict(rf_reg_fit, Chicago_test) ## # A tibble: 7 × 1 ## .pred ## ## 1 20.4 ## 2 21.6 ## 3 20.9 ## 4 21.6 ## 5 19.3 ## 6 7.33 ## 7 6.16 library(tidymodels) tidymodels_prefer() data(two_class_dat) data_train <- two_class_dat[-(1:10), ] data_test <- two_class_dat[ 1:10 , ] rf_cls_spec <- rand_forest(trees = 200, min_n = 5) %>% # This model can be used for classification or regression, so set mode set_mode(\"classification\") %>% set_engine(\"randomForest\") rf_cls_spec ## Random Forest Model Specification (classification) ## ## Main Arguments: ## trees = 200 ## min_n = 5 ## ## Computational engine: randomForest set.seed(1) rf_cls_fit <- rf_cls_spec %>% fit(Class ~ ., data = data_train) rf_cls_fit ## parsnip model object ## ## ## Call: ## randomForest(x = maybe_data_frame(x), y = y, ntree = ~200, nodesize = min_rows(~5, x)) ## Type of random forest: classification ## Number of trees: 200 ## No. of variables tried at each split: 1 ## ## OOB estimate of error rate: 19.72% ## Confusion matrix: ## Class1 Class2 class.error ## Class1 363 70 0.1616628 ## Class2 84 264 0.2413793 bind_cols( predict(rf_cls_fit, data_test), predict(rf_cls_fit, data_test, type = \"prob\") ) ## # A tibble: 10 × 3 ## .pred_class .pred_Class1 .pred_Class2 ## ## 1 Class2 0.23 0.77 ## 2 Class1 0.95 0.05 ## 3 Class1 0.59 0.41 ## 4 Class1 0.75 0.25 ## 5 Class2 0.305 0.695 ## 6 Class2 0.105 0.895 ## 7 Class1 0.685 0.315 ## 8 Class1 0.63 0.37 ## 9 Class1 0.79 0.21 ## 10 Class2 0.02 0.98"},{"path":"https://parsnip.tidymodels.org/dev/articles/Examples.html","id":"svm_linear-models","dir":"Articles","previous_headings":"","what":"svm_linear() models","title":"Fitting and predicting with parsnip","text":"’ll model ridership Chicago elevated trains function 14 day lagged ridership two stations. two predictors units (rides per day/1000) need normalized. last week data used training. last week predicted model fit. can define model specific parameters: Now create model fit object: holdout data can predicted: example data two predictors outcome two classes. predictors units. can define model specific parameters: Now create model fit object: holdout data can predicted hard class predictions. ’ll model ridership Chicago elevated trains function 14 day lagged ridership two stations. two predictors units (rides per day/1000) need normalized. last week data used training. last week predicted model fit. can define model specific parameters: Now create model fit object: holdout data can predicted: example data two predictors outcome two classes. predictors units. can define model specific parameters: Now create model fit object: holdout data can predicted hard class predictions probabilities. ’ll bind together one tibble:","code":"library(tidymodels) tidymodels_prefer() data(Chicago) n <- nrow(Chicago) Chicago <- Chicago %>% select(ridership, Clark_Lake, Quincy_Wells) Chicago_train <- Chicago[1:(n - 7), ] Chicago_test <- Chicago[(n - 6):n, ] svm_reg_spec <- svm_linear(cost = 1, margin = 0.1) %>% # This model can be used for classification or regression, so set mode set_mode(\"regression\") %>% set_engine(\"LiblineaR\") svm_reg_spec ## Linear Support Vector Machine Model Specification (regression) ## ## Main Arguments: ## cost = 1 ## margin = 0.1 ## ## Computational engine: LiblineaR set.seed(1) svm_reg_fit <- svm_reg_spec %>% fit(ridership ~ ., data = Chicago_train) svm_reg_fit ## parsnip model object ## ## $TypeDetail ## [1] \"L2-regularized L2-loss support vector regression primal (L2R_L2LOSS_SVR)\" ## ## $Type ## [1] 11 ## ## $W ## Clark_Lake Quincy_Wells Bias ## [1,] 0.8277352 0.3430336 0.05042585 ## ## $Bias ## [1] 1 ## ## $NbClass ## [1] 2 ## ## attr(,\"class\") ## [1] \"LiblineaR\" predict(svm_reg_fit, Chicago_test) ## # A tibble: 7 × 1 ## .pred ## ## 1 20.6 ## 2 20.8 ## 3 21.1 ## 4 20.8 ## 5 18.9 ## 6 6.40 ## 7 5.90 library(tidymodels) tidymodels_prefer() data(two_class_dat) data_train <- two_class_dat[-(1:10), ] data_test <- two_class_dat[ 1:10 , ] svm_cls_spec <- svm_linear(cost = 1) %>% # This model can be used for classification or regression, so set mode set_mode(\"classification\") %>% set_engine(\"LiblineaR\") svm_cls_spec ## Linear Support Vector Machine Model Specification (classification) ## ## Main Arguments: ## cost = 1 ## ## Computational engine: LiblineaR set.seed(1) svm_cls_fit <- svm_cls_spec %>% fit(Class ~ ., data = data_train) svm_cls_fit ## parsnip model object ## ## $TypeDetail ## [1] \"L2-regularized L2-loss support vector classification dual (L2R_L2LOSS_SVC_DUAL)\" ## ## $Type ## [1] 1 ## ## $W ## A B Bias ## [1,] 0.4067922 -1.314783 1.321851 ## ## $Bias ## [1] 1 ## ## $ClassNames ## [1] Class1 Class2 ## Levels: Class1 Class2 ## ## $NbClass ## [1] 2 ## ## attr(,\"class\") ## [1] \"LiblineaR\" predict(svm_cls_fit, data_test) ## # A tibble: 10 × 1 ## .pred_class ## ## 1 Class1 ## 2 Class1 ## 3 Class1 ## 4 Class1 ## 5 Class2 ## 6 Class2 ## 7 Class1 ## 8 Class1 ## 9 Class1 ## 10 Class2 library(tidymodels) tidymodels_prefer() data(Chicago) n <- nrow(Chicago) Chicago <- Chicago %>% select(ridership, Clark_Lake, Quincy_Wells) Chicago_train <- Chicago[1:(n - 7), ] Chicago_test <- Chicago[(n - 6):n, ] svm_reg_spec <- svm_linear(cost = 1, margin = 0.1) %>% # This model can be used for classification or regression, so set mode set_mode(\"regression\") %>% set_engine(\"kernlab\") svm_reg_spec ## Linear Support Vector Machine Model Specification (regression) ## ## Main Arguments: ## cost = 1 ## margin = 0.1 ## ## Computational engine: kernlab set.seed(1) svm_reg_fit <- svm_reg_spec %>% fit(ridership ~ ., data = Chicago_train) ## Setting default kernel parameters svm_reg_fit ## parsnip model object ## ## Support Vector Machine object of class \"ksvm\" ## ## SV type: eps-svr (regression) ## parameter : epsilon = 0.1 cost C = 1 ## ## Linear (vanilla) kernel function. ## ## Number of Support Vectors : 2283 ## ## Objective Function Value : -825.1632 ## Training error : 0.226456 predict(svm_reg_fit, Chicago_test) ## # A tibble: 7 × 1 ## .pred ## ## 1 21.0 ## 2 21.2 ## 3 21.5 ## 4 21.2 ## 5 19.4 ## 6 6.87 ## 7 6.41 library(tidymodels) tidymodels_prefer() data(two_class_dat) data_train <- two_class_dat[-(1:10), ] data_test <- two_class_dat[ 1:10 , ] svm_cls_spec <- svm_linear(cost = 1) %>% # This model can be used for classification or regression, so set mode set_mode(\"classification\") %>% set_engine(\"kernlab\") svm_cls_spec ## Linear Support Vector Machine Model Specification (classification) ## ## Main Arguments: ## cost = 1 ## ## Computational engine: kernlab set.seed(1) svm_cls_fit <- svm_cls_spec %>% fit(Class ~ ., data = data_train) ## Setting default kernel parameters svm_cls_fit ## parsnip model object ## ## Support Vector Machine object of class \"ksvm\" ## ## SV type: C-svc (classification) ## parameter : cost C = 1 ## ## Linear (vanilla) kernel function. ## ## Number of Support Vectors : 353 ## ## Objective Function Value : -349.425 ## Training error : 0.174136 ## Probability model included. bind_cols( predict(svm_cls_fit, data_test), predict(svm_cls_fit, data_test, type = \"prob\") ) ## # A tibble: 10 × 3 ## .pred_class .pred_Class1 .pred_Class2 ## ## 1 Class1 0.517 0.483 ## 2 Class1 0.904 0.0956 ## 3 Class1 0.645 0.355 ## 4 Class1 0.610 0.390 ## 5 Class2 0.445 0.555 ## 6 Class2 0.212 0.788 ## 7 Class1 0.704 0.296 ## 8 Class1 0.565 0.435 ## 9 Class1 0.994 0.00646 ## 10 Class2 0.114 0.886"},{"path":"https://parsnip.tidymodels.org/dev/articles/Examples.html","id":"svm_poly-models","dir":"Articles","previous_headings":"","what":"svm_poly() models","title":"Fitting and predicting with parsnip","text":"’ll model ridership Chicago elevated trains function 14 day lagged ridership two stations. two predictors units (rides per day/1000) need normalized. last week data used training. last week predicted model fit. can define model specific parameters: Now create model fit object: holdout data can predicted: example data two predictors outcome two classes. predictors units. can define model specific parameters: Now create model fit object: holdout data can predicted hard class predictions probabilities. ’ll bind together one tibble:","code":"library(tidymodels) tidymodels_prefer() data(Chicago) n <- nrow(Chicago) Chicago <- Chicago %>% select(ridership, Clark_Lake, Quincy_Wells) Chicago_train <- Chicago[1:(n - 7), ] Chicago_test <- Chicago[(n - 6):n, ] svm_reg_spec <- svm_poly(cost = 1, margin = 0.1) %>% # This model can be used for classification or regression, so set mode set_mode(\"regression\") %>% set_engine(\"kernlab\") svm_reg_spec ## Polynomial Support Vector Machine Model Specification (regression) ## ## Main Arguments: ## cost = 1 ## margin = 0.1 ## ## Computational engine: kernlab set.seed(1) svm_reg_fit <- svm_reg_spec %>% fit(ridership ~ ., data = Chicago_train) ## Setting default kernel parameters svm_reg_fit ## parsnip model object ## ## Support Vector Machine object of class \"ksvm\" ## ## SV type: eps-svr (regression) ## parameter : epsilon = 0.1 cost C = 1 ## ## Polynomial kernel function. ## Hyperparameters : degree = 1 scale = 1 offset = 1 ## ## Number of Support Vectors : 2283 ## ## Objective Function Value : -825.1628 ## Training error : 0.226471 predict(svm_reg_fit, Chicago_test) ## # A tibble: 7 × 1 ## .pred ## ## 1 21.0 ## 2 21.2 ## 3 21.5 ## 4 21.2 ## 5 19.4 ## 6 6.87 ## 7 6.41 library(tidymodels) tidymodels_prefer() data(two_class_dat) data_train <- two_class_dat[-(1:10), ] data_test <- two_class_dat[ 1:10 , ] svm_cls_spec <- svm_poly(cost = 1) %>% # This model can be used for classification or regression, so set mode set_mode(\"classification\") %>% set_engine(\"kernlab\") svm_cls_spec ## Polynomial Support Vector Machine Model Specification (classification) ## ## Main Arguments: ## cost = 1 ## ## Computational engine: kernlab set.seed(1) svm_cls_fit <- svm_cls_spec %>% fit(Class ~ ., data = data_train) ## Setting default kernel parameters svm_cls_fit ## parsnip model object ## ## Support Vector Machine object of class \"ksvm\" ## ## SV type: C-svc (classification) ## parameter : cost C = 1 ## ## Polynomial kernel function. ## Hyperparameters : degree = 1 scale = 1 offset = 1 ## ## Number of Support Vectors : 353 ## ## Objective Function Value : -349.425 ## Training error : 0.174136 ## Probability model included. bind_cols( predict(svm_cls_fit, data_test), predict(svm_cls_fit, data_test, type = \"prob\") ) ## # A tibble: 10 × 3 ## .pred_class .pred_Class1 .pred_Class2 ## ## 1 Class1 0.517 0.483 ## 2 Class1 0.904 0.0956 ## 3 Class1 0.645 0.355 ## 4 Class1 0.610 0.390 ## 5 Class2 0.445 0.555 ## 6 Class2 0.212 0.788 ## 7 Class1 0.704 0.296 ## 8 Class1 0.565 0.435 ## 9 Class1 0.994 0.00646 ## 10 Class2 0.114 0.886"},{"path":"https://parsnip.tidymodels.org/dev/articles/Examples.html","id":"svm_rbf-models","dir":"Articles","previous_headings":"","what":"svm_rbf() models","title":"Fitting and predicting with parsnip","text":"’ll model ridership Chicago elevated trains function 14 day lagged ridership two stations. two predictors units (rides per day/1000) need normalized. last week data used training. last week predicted model fit. can define model specific parameters: Now create model fit object: holdout data can predicted: example data two predictors outcome two classes. predictors units. can define model specific parameters: Now create model fit object: holdout data can predicted hard class predictions probabilities. ’ll bind together one tibble:","code":"library(tidymodels) tidymodels_prefer() data(Chicago) n <- nrow(Chicago) Chicago <- Chicago %>% select(ridership, Clark_Lake, Quincy_Wells) Chicago_train <- Chicago[1:(n - 7), ] Chicago_test <- Chicago[(n - 6):n, ] svm_reg_spec <- svm_rbf(cost = 1, margin = 0.1) %>% # This model can be used for classification or regression, so set mode set_mode(\"regression\") %>% set_engine(\"kernlab\") svm_reg_spec ## Radial Basis Function Support Vector Machine Model Specification (regression) ## ## Main Arguments: ## cost = 1 ## margin = 0.1 ## ## Computational engine: kernlab set.seed(1) svm_reg_fit <- svm_reg_spec %>% fit(ridership ~ ., data = Chicago_train) svm_reg_fit ## parsnip model object ## ## Support Vector Machine object of class \"ksvm\" ## ## SV type: eps-svr (regression) ## parameter : epsilon = 0.1 cost C = 1 ## ## Gaussian Radial Basis kernel function. ## Hyperparameter : sigma = 10.8262370251485 ## ## Number of Support Vectors : 2233 ## ## Objective Function Value : -746.584 ## Training error : 0.205567 predict(svm_reg_fit, Chicago_test) ## # A tibble: 7 × 1 ## .pred ## ## 1 20.7 ## 2 21.2 ## 3 21.3 ## 4 21.1 ## 5 19.4 ## 6 6.77 ## 7 6.13 library(tidymodels) tidymodels_prefer() data(two_class_dat) data_train <- two_class_dat[-(1:10), ] data_test <- two_class_dat[ 1:10 , ] svm_cls_spec <- svm_rbf(cost = 1) %>% # This model can be used for classification or regression, so set mode set_mode(\"classification\") %>% set_engine(\"kernlab\") svm_cls_spec ## Radial Basis Function Support Vector Machine Model Specification (classification) ## ## Main Arguments: ## cost = 1 ## ## Computational engine: kernlab set.seed(1) svm_cls_fit <- svm_cls_spec %>% fit(Class ~ ., data = data_train) svm_cls_fit ## parsnip model object ## ## Support Vector Machine object of class \"ksvm\" ## ## SV type: C-svc (classification) ## parameter : cost C = 1 ## ## Gaussian Radial Basis kernel function. ## Hyperparameter : sigma = 1.63216688499952 ## ## Number of Support Vectors : 327 ## ## Objective Function Value : -294.4344 ## Training error : 0.169014 ## Probability model included. bind_cols( predict(svm_cls_fit, data_test), predict(svm_cls_fit, data_test, type = \"prob\") ) ## # A tibble: 10 × 3 ## .pred_class .pred_Class1 .pred_Class2 ## ## 1 Class2 0.238 0.762 ## 2 Class1 0.905 0.0950 ## 3 Class1 0.619 0.381 ## 4 Class1 0.879 0.121 ## 5 Class1 0.641 0.359 ## 6 Class2 0.153 0.847 ## 7 Class1 0.745 0.255 ## 8 Class2 0.313 0.687 ## 9 Class1 0.878 0.122 ## 10 Class2 0.137 0.863"},{"path":"https://parsnip.tidymodels.org/dev/articles/parsnip.html","id":"motivation","dir":"Articles","previous_headings":"","what":"Motivation","title":"Introduction to parsnip","text":"Modeling functions across different R packages can different interfaces. like try different approaches, lot syntactical minutiae remember. problem worsens move -platforms (e.g. logistic regression R’s glm versus Spark’s implementation). parsnip tries solve providing similar interfaces models. example, fitting random forest model like adjust number trees forest different argument names remember: randomForest::randomForest uses ntree, ranger::ranger uses num.trees, Spark’s sparklyr::ml_random_forest uses num_trees. Rather remembering values, common interface models can used package makes translation trees real names implementations. terminology: model type differentiates models. Example types : random forests, logistic regression, linear support vector machines, etc. mode model denotes used. Two common modes classification regression. Others include “censored regression” “risk regression” (parametric Cox PH models censored data, respectively), well unsupervised models (e.g. “clustering”). computational engine indicates actual model might fit. often R packages (randomForest ranger) might also methods outside R (e.g. Stan, Spark, others). parsnip, similar ggplot2, dplyr recipes, separates specification want actual . allows us create broader functionality modeling.","code":"library(parsnip) rf_mod <- rand_forest(trees = 2000)"},{"path":"https://parsnip.tidymodels.org/dev/articles/parsnip.html","id":"placeholders-for-parameters","dir":"Articles","previous_headings":"","what":"Placeholders for Parameters","title":"Introduction to parsnip","text":"times like change parameter default sure final value . basis model tuning use tune package. Since model executing created, types parameters can changed using tune() function. provides simple placeholder value. come handy later fit model different values mtry.","code":"tune_mtry <- rand_forest(trees = 2000, mtry = tune()) tune_mtry #> Random Forest Model Specification (unknown mode) #> #> Main Arguments: #> mtry = tune() #> trees = 2000 #> #> Computational engine: ranger"},{"path":"https://parsnip.tidymodels.org/dev/articles/parsnip.html","id":"specifying-arguments","dir":"Articles","previous_headings":"","what":"Specifying Arguments","title":"Introduction to parsnip","text":"Commonly used arguments modeling functions parameters exposed function. example, rand_forest arguments : mtry: number predictors randomly sampled split creating tree models. trees: number trees contained ensemble. min_n: minimum number data points node required node split . arguments default function : However, might arguments like change allow vary. accessible using set_engine. example, ranger option set internal random number seed. set specific value:","code":"args(rand_forest) #> function (mode = \"unknown\", engine = \"ranger\", mtry = NULL, trees = NULL, #> min_n = NULL) #> NULL rf_with_seed <- rand_forest(trees = 2000, mtry = tune(), mode = \"regression\") %>% set_engine(\"ranger\", seed = 63233) rf_with_seed #> Random Forest Model Specification (regression) #> #> Main Arguments: #> mtry = tune() #> trees = 2000 #> #> Engine-Specific Arguments: #> seed = 63233 #> #> Computational engine: ranger"},{"path":"https://parsnip.tidymodels.org/dev/articles/parsnip.html","id":"process","dir":"Articles","previous_headings":"","what":"Process","title":"Introduction to parsnip","text":"fit model, must: defined model, including mode, tune() parameters, specify computational engine. example, rf_with_seed ready fitting due tune() parameter. can set parameter’s value create model fit: , using randomForest package: Note call objects show num.trees = ~2000. tilde consequence parsnip using quosures process model specification’s arguments. Normally, function executed, function’s arguments immediately evaluated. case parsnip, model specification’s arguments ; expression captured along environment evaluated. quosure . parsnip uses expressions make model fit call evaluated. tilde call reflects argument captured using quosure.","code":"rf_with_seed %>% set_args(mtry = 4) %>% set_engine(\"ranger\") %>% fit(mpg ~ ., data = mtcars) #> parsnip model object #> #> Ranger result #> #> Call: #> ranger::ranger(x = maybe_data_frame(x), y = y, mtry = min_cols(~4, x), num.trees = ~2000, num.threads = 1, verbose = FALSE, seed = sample.int(10^5, 1)) #> #> Type: Regression #> Number of trees: 2000 #> Sample size: 32 #> Number of independent variables: 10 #> Mtry: 4 #> Target node size: 5 #> Variable importance mode: none #> Splitrule: variance #> OOB prediction error (MSE): 5.57 #> R squared (OOB): 0.847 set.seed(56982) rf_with_seed %>% set_args(mtry = 4) %>% set_engine(\"randomForest\") %>% fit(mpg ~ ., data = mtcars) #> parsnip model object #> #> #> Call: #> randomForest(x = maybe_data_frame(x), y = y, ntree = ~2000, mtry = min_cols(~4, x)) #> Type of random forest: regression #> Number of trees: 2000 #> No. of variables tried at each split: 4 #> #> Mean of squared residuals: 5.52 #> % Var explained: 84.3"},{"path":"https://parsnip.tidymodels.org/dev/authors.html","id":null,"dir":"","previous_headings":"","what":"Authors","title":"Authors and Citation","text":"Max Kuhn. Author, maintainer. Davis Vaughan. Author. Emil Hvitfeldt. Contributor. . Copyright holder, funder.","code":""},{"path":"https://parsnip.tidymodels.org/dev/authors.html","id":"citation","dir":"","previous_headings":"","what":"Citation","title":"Authors and Citation","text":"Kuhn M, Vaughan D (2024). parsnip: Common API Modeling Analysis Functions. R package version 1.1.1.9007, https://parsnip.tidymodels.org/, https://github.com/tidymodels/parsnip.","code":"@Manual{, title = {parsnip: A Common API to Modeling and Analysis Functions}, author = {Max Kuhn and Davis Vaughan}, year = {2024}, note = {R package version 1.1.1.9007, https://parsnip.tidymodels.org/}, url = {https://github.com/tidymodels/parsnip}, }"},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/index.html","id":"introduction","dir":"","previous_headings":"","what":"Introduction","title":"A Common API to Modeling and Analysis Functions","text":"goal parsnip provide tidy, unified interface models can used try range models without getting bogged syntactical minutiae underlying packages.","code":""},{"path":"https://parsnip.tidymodels.org/dev/index.html","id":"installation","dir":"","previous_headings":"","what":"Installation","title":"A Common API to Modeling and Analysis Functions","text":"","code":"# The easiest way to get parsnip is to install all of tidymodels: install.packages(\"tidymodels\") # Alternatively, install just parsnip: install.packages(\"parsnip\") # Or the development version from GitHub: # install.packages(\"pak\") pak::pak(\"tidymodels/parsnip\")"},{"path":"https://parsnip.tidymodels.org/dev/index.html","id":"getting-started","dir":"","previous_headings":"","what":"Getting started","title":"A Common API to Modeling and Analysis Functions","text":"One challenge different modeling functions available R thing can different interfaces arguments. example, fit random forest regression model, might : Note model syntax can different argument names (formats) also different. pain switch implementations. example: type model “random forest”, mode model “regression” (opposed classification, etc), computational engine name R package. goals parsnip : Separate definition model evaluation. Decouple model specification implementation (whether implementation R, spark, something else). example, user call rand_forest instead ranger::ranger specific packages. Harmonize argument names (e.g. n.trees, ntrees, trees) users need remember single name. help across model types trees argument across random forest well boosting bagging. Using example , parsnip approach : engine can easily changed. use Spark, change straightforward: Either one model specifications can fit way: list parsnip models across different CRAN packages can found https://www.tidymodels.org/find/parsnip.","code":"# From randomForest rf_1 <- randomForest( y ~ ., data = dat, mtry = 10, ntree = 2000, importance = TRUE ) # From ranger rf_2 <- ranger( y ~ ., data = dat, mtry = 10, num.trees = 2000, importance = \"impurity\" ) # From sparklyr rf_3 <- ml_random_forest( dat, intercept = FALSE, response = \"y\", features = names(dat)[names(dat) != \"y\"], col.sample.rate = 10, num.trees = 2000 ) library(parsnip) rand_forest(mtry = 10, trees = 2000) %>% set_engine(\"ranger\", importance = \"impurity\") %>% set_mode(\"regression\") #> Random Forest Model Specification (regression) #> #> Main Arguments: #> mtry = 10 #> trees = 2000 #> #> Engine-Specific Arguments: #> importance = impurity #> #> Computational engine: ranger rand_forest(mtry = 10, trees = 2000) %>% set_engine(\"spark\") %>% set_mode(\"regression\") #> Random Forest Model Specification (regression) #> #> Main Arguments: #> mtry = 10 #> trees = 2000 #> #> Computational engine: spark set.seed(192) rand_forest(mtry = 10, trees = 2000) %>% set_engine(\"ranger\", importance = \"impurity\") %>% set_mode(\"regression\") %>% fit(mpg ~ ., data = mtcars) #> parsnip model object #> #> Ranger result #> #> Call: #> ranger::ranger(x = maybe_data_frame(x), y = y, mtry = min_cols(~10, x), num.trees = ~2000, importance = ~\"impurity\", num.threads = 1, verbose = FALSE, seed = sample.int(10^5, 1)) #> #> Type: Regression #> Number of trees: 2000 #> Sample size: 32 #> Number of independent variables: 10 #> Mtry: 10 #> Target node size: 5 #> Variable importance mode: impurity #> Splitrule: variance #> OOB prediction error (MSE): 5.976917 #> R squared (OOB): 0.8354559"},{"path":"https://parsnip.tidymodels.org/dev/index.html","id":"contributing","dir":"","previous_headings":"","what":"Contributing","title":"A Common API to Modeling and Analysis Functions","text":"project released Contributor Code Conduct. contributing project, agree abide terms. questions discussions tidymodels packages, modeling, machine learning, please post RStudio Community. think encountered bug, please submit issue. Either way, learn create share reprex (minimal, reproducible example), clearly communicate code. Check details contributing guidelines tidymodels packages get help.","code":""},{"path":"https://parsnip.tidymodels.org/dev/issue_template.html","id":null,"dir":"","previous_headings":"","what":"PLEASE READ: Making a new issue for parsnip","title":"PLEASE READ: Making a new issue for parsnip","text":"Please follow template . question related specific data analysis, please include minimal reprex (reproducible example). ’ve never heard reprex , start reading “reprex”, follow advice page. Tips: good example issue: #139 Issues without reprex lower priority others. don’t want use confidential data; can blind data simulate data demonstrate issue. functions caret::twoClassSim() caret::SLC14_1() might good tools simulate data . Unless problem explicitly parallel processing, please run sequentially. Even parallel processing, please make sure runs sequentially first. Please use set.seed() ensure randomness code reproducible. Please check https://stackoverflow.com/ https://community.rstudio.com/ see someone already asked question (see: Yihui’s Rule). might need install : ready file issue, please delete parts line: < – ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ –>","code":"install.packages(c(\"reprex\", \"sessioninfo\"), repos = \"http://cran.r-project.org\")"},{"path":"https://parsnip.tidymodels.org/dev/issue_template.html","id":"the-problem","dir":"","previous_headings":"","what":"The problem","title":"PLEASE READ: Making a new issue for parsnip","text":"’m trouble … considered …","code":""},{"path":"https://parsnip.tidymodels.org/dev/issue_template.html","id":"reproducible-example","dir":"","previous_headings":"","what":"Reproducible example","title":"PLEASE READ: Making a new issue for parsnip","text":"Copy code clipboard run:","code":"reprex::reprex(si = TRUE)"},{"path":"https://parsnip.tidymodels.org/dev/reference/C5.0_train.html","id":null,"dir":"Reference","previous_headings":"","what":"Boosted trees via C5.0 — C5.0_train","title":"Boosted trees via C5.0 — C5.0_train","text":"C5.0_train wrapper C5.0() function C50 package fits tree-based models model arguments main function.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/C5.0_train.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Boosted trees via C5.0 — C5.0_train","text":"","code":"C5.0_train(x, y, weights = NULL, trials = 15, minCases = 2, sample = 0, ...)"},{"path":"https://parsnip.tidymodels.org/dev/reference/C5.0_train.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Boosted trees via C5.0 — C5.0_train","text":"x data frame matrix predictors. y factor vector 2 levels weights optional numeric vector case weights. Note data used case weights used splitting variable model (see https://www.rulequest.com/see5-info.html Quinlan's notes case weights). trials integer specifying number boosting iterations. value one indicates single model used. minCases integer smallest number samples must put least two splits. sample value (0, .999) specifies random proportion data used train model. default, samples used model training. Samples used training used evaluate accuracy model printed output. value zero means training data used. ... arguments pass.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/C5.0_train.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Boosted trees via C5.0 — C5.0_train","text":"fitted C5.0 model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/C5_rules.html","id":null,"dir":"Reference","previous_headings":"","what":"C5.0 rule-based classification models — C5_rules","title":"C5.0 rule-based classification models — C5_rules","text":"C5_rules() defines model derives feature rules tree prediction. single tree boosted ensemble can used. function can fit classification models. different ways fit model, method estimation chosen setting model engine. engine-specific pages model listed . C5.0¹² information parsnip used modeling https://www.tidymodels.org/.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/C5_rules.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"C5.0 rule-based classification models — C5_rules","text":"","code":"C5_rules(mode = \"classification\", trees = NULL, min_n = NULL, engine = \"C5.0\")"},{"path":"https://parsnip.tidymodels.org/dev/reference/C5_rules.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"C5.0 rule-based classification models — C5_rules","text":"mode single character string type model. possible value model \"classification\". trees non-negative integer (greater 100) number members ensemble. min_n integer greater zero nine minimum number data points node required node split . engine single character string specifying computational engine use fitting.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/C5_rules.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"C5.0 rule-based classification models — C5_rules","text":"C5.0 classification model extension C4.5 model Quinlan (1993). tree- rule-based versions also include boosting capabilities. C5_rules() enables version model uses series rules (see examples ). make set rules, initial C5.0 tree created flattened rules. rules pruned, simplified, ordered. Rule sets created within iteration boosting. function defines type model fit. engine specified, method fit model also defined. See set_engine() setting engine, including set engine arguments. model trained fit fit() function used data. arguments function mode engine captured quosures. pass values programmatically, use injection operator like :","code":"value <- 1 C5_rules(argument = !!value)"},{"path":"https://parsnip.tidymodels.org/dev/reference/C5_rules.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"C5.0 rule-based classification models — C5_rules","text":"Quinlan R (1993). C4.5: Programs Machine Learning. Morgan Kaufmann Publishers. https://www.tidymodels.org, Tidy Modeling R, searchable table parsnip models","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/C5_rules.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"C5.0 rule-based classification models — C5_rules","text":"","code":"show_engines(\"C5_rules\") #> # A tibble: 0 × 2 #> # ℹ 2 variables: engine , mode C5_rules() #> ! parsnip could not locate an implementation for `C5_rules` model #> specifications. #> ℹ The parsnip extension package rules implements support for this #> specification. #> ℹ Please install (if needed) and load to continue. #> C5.0 Model Specification (classification) #> #> Computational engine: C5.0 #>"},{"path":"https://parsnip.tidymodels.org/dev/reference/add_on_exports.html","id":null,"dir":"Reference","previous_headings":"","what":"Functions required for parsnip-adjacent packages — null_value","title":"Functions required for parsnip-adjacent packages — null_value","text":"functions helpful creating new packages register new model specifications.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/add_on_exports.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Functions required for parsnip-adjacent packages — null_value","text":"","code":"null_value(x) show_fit(model, eng) check_args(object) update_dot_check(...) new_model_spec( cls, args, eng_args, mode, user_specified_mode = TRUE, method, engine, user_specified_engine = TRUE ) check_final_param(x) update_main_parameters(args, param) update_engine_parameters(eng_args, fresh, ...) print_model_spec(x, cls = class(x)[1], desc = get_model_desc(cls), ...) update_spec(object, parameters, args_enquo_list, fresh, cls, ...) is_varying(x)"},{"path":"https://parsnip.tidymodels.org/dev/reference/add_rowindex.html","id":null,"dir":"Reference","previous_headings":"","what":"Add a column of row numbers to a data frame — add_rowindex","title":"Add a column of row numbers to a data frame — add_rowindex","text":"Add column row numbers data frame","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/add_rowindex.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Add a column of row numbers to a data frame — add_rowindex","text":"","code":"add_rowindex(x)"},{"path":"https://parsnip.tidymodels.org/dev/reference/add_rowindex.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Add a column of row numbers to a data frame — add_rowindex","text":"x data frame","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/add_rowindex.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Add a column of row numbers to a data frame — add_rowindex","text":"data frame column 1-based integers named .row.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/add_rowindex.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Add a column of row numbers to a data frame — add_rowindex","text":"","code":"mtcars %>% add_rowindex() #> mpg cyl disp hp drat wt qsec vs am gear carb #> Mazda RX4 21.0 6 160.0 110 3.90 2.620 16.46 0 1 4 4 #> Mazda RX4 Wag 21.0 6 160.0 110 3.90 2.875 17.02 0 1 4 4 #> Datsun 710 22.8 4 108.0 93 3.85 2.320 18.61 1 1 4 1 #> Hornet 4 Drive 21.4 6 258.0 110 3.08 3.215 19.44 1 0 3 1 #> Hornet Sportabout 18.7 8 360.0 175 3.15 3.440 17.02 0 0 3 2 #> Valiant 18.1 6 225.0 105 2.76 3.460 20.22 1 0 3 1 #> Duster 360 14.3 8 360.0 245 3.21 3.570 15.84 0 0 3 4 #> Merc 240D 24.4 4 146.7 62 3.69 3.190 20.00 1 0 4 2 #> Merc 230 22.8 4 140.8 95 3.92 3.150 22.90 1 0 4 2 #> Merc 280 19.2 6 167.6 123 3.92 3.440 18.30 1 0 4 4 #> Merc 280C 17.8 6 167.6 123 3.92 3.440 18.90 1 0 4 4 #> Merc 450SE 16.4 8 275.8 180 3.07 4.070 17.40 0 0 3 3 #> Merc 450SL 17.3 8 275.8 180 3.07 3.730 17.60 0 0 3 3 #> Merc 450SLC 15.2 8 275.8 180 3.07 3.780 18.00 0 0 3 3 #> Cadillac Fleetwood 10.4 8 472.0 205 2.93 5.250 17.98 0 0 3 4 #> Lincoln Continental 10.4 8 460.0 215 3.00 5.424 17.82 0 0 3 4 #> Chrysler Imperial 14.7 8 440.0 230 3.23 5.345 17.42 0 0 3 4 #> Fiat 128 32.4 4 78.7 66 4.08 2.200 19.47 1 1 4 1 #> Honda Civic 30.4 4 75.7 52 4.93 1.615 18.52 1 1 4 2 #> Toyota Corolla 33.9 4 71.1 65 4.22 1.835 19.90 1 1 4 1 #> Toyota Corona 21.5 4 120.1 97 3.70 2.465 20.01 1 0 3 1 #> Dodge Challenger 15.5 8 318.0 150 2.76 3.520 16.87 0 0 3 2 #> AMC Javelin 15.2 8 304.0 150 3.15 3.435 17.30 0 0 3 2 #> Camaro Z28 13.3 8 350.0 245 3.73 3.840 15.41 0 0 3 4 #> Pontiac Firebird 19.2 8 400.0 175 3.08 3.845 17.05 0 0 3 2 #> Fiat X1-9 27.3 4 79.0 66 4.08 1.935 18.90 1 1 4 1 #> Porsche 914-2 26.0 4 120.3 91 4.43 2.140 16.70 0 1 5 2 #> Lotus Europa 30.4 4 95.1 113 3.77 1.513 16.90 1 1 5 2 #> Ford Pantera L 15.8 8 351.0 264 4.22 3.170 14.50 0 1 5 4 #> Ferrari Dino 19.7 6 145.0 175 3.62 2.770 15.50 0 1 5 6 #> Maserati Bora 15.0 8 301.0 335 3.54 3.570 14.60 0 1 5 8 #> Volvo 142E 21.4 4 121.0 109 4.11 2.780 18.60 1 1 4 2 #> .row #> Mazda RX4 1 #> Mazda RX4 Wag 2 #> Datsun 710 3 #> Hornet 4 Drive 4 #> Hornet Sportabout 5 #> Valiant 6 #> Duster 360 7 #> Merc 240D 8 #> Merc 230 9 #> Merc 280 10 #> Merc 280C 11 #> Merc 450SE 12 #> Merc 450SL 13 #> Merc 450SLC 14 #> Cadillac Fleetwood 15 #> Lincoln Continental 16 #> Chrysler Imperial 17 #> Fiat 128 18 #> Honda Civic 19 #> Toyota Corolla 20 #> Toyota Corona 21 #> Dodge Challenger 22 #> AMC Javelin 23 #> Camaro Z28 24 #> Pontiac Firebird 25 #> Fiat X1-9 26 #> Porsche 914-2 27 #> Lotus Europa 28 #> Ford Pantera L 29 #> Ferrari Dino 30 #> Maserati Bora 31 #> Volvo 142E 32"},{"path":"https://parsnip.tidymodels.org/dev/reference/augment.html","id":null,"dir":"Reference","previous_headings":"","what":"Augment data with predictions — augment.model_fit","title":"Augment data with predictions — augment.model_fit","text":"augment() add column(s) predictions given data.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/augment.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Augment data with predictions — augment.model_fit","text":"","code":"# S3 method for model_fit augment(x, new_data, eval_time = NULL, ...)"},{"path":"https://parsnip.tidymodels.org/dev/reference/augment.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Augment data with predictions — augment.model_fit","text":"x model_fit object produced fit.model_spec() fit_xy.model_spec(). new_data data frame matrix. eval_time censored regression models, vector time points survival probability estimated. ... currently used.","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/augment.html","id":"regression","dir":"Reference","previous_headings":"","what":"Regression","title":"Augment data with predictions — augment.model_fit","text":"regression models, .pred column added. x created using fit.model_spec() new_data contains regression outcome column, .resid column also added.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/augment.html","id":"classification","dir":"Reference","previous_headings":"","what":"Classification","title":"Augment data with predictions — augment.model_fit","text":"classification models, results can include column called .pred_class well class probability columns named .pred_{level}. depends type prediction types available model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/augment.html","id":"censored-regression","dir":"Reference","previous_headings":"","what":"Censored Regression","title":"Augment data with predictions — augment.model_fit","text":"models, predictions expected time survival probability created (model engine supports ). model supports survival prediction, eval_time argument required. survival predictions created new_data contains survival::Surv() object, additional columns added inverse probability censoring weights (IPCW) also created (see tidymodels.org page references ). enables user compute performance metrics yardstick package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/augment.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Augment data with predictions — augment.model_fit","text":"https://www.tidymodels.org/learn/statistics/survival-metrics/","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/augment.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Augment data with predictions — augment.model_fit","text":"","code":"car_trn <- mtcars[11:32,] car_tst <- mtcars[ 1:10,] reg_form <- linear_reg() %>% set_engine(\"lm\") %>% fit(mpg ~ ., data = car_trn) reg_xy <- linear_reg() %>% set_engine(\"lm\") %>% fit_xy(car_trn[, -1], car_trn$mpg) augment(reg_form, car_tst) #> # A tibble: 10 × 13 #> .pred .resid mpg cyl disp hp drat wt qsec vs am #> #> 1 23.4 -2.43 21 6 160 110 3.9 2.62 16.5 0 1 #> 2 23.3 -2.30 21 6 160 110 3.9 2.88 17.0 0 1 #> 3 27.6 -4.83 22.8 4 108 93 3.85 2.32 18.6 1 1 #> 4 21.5 -0.147 21.4 6 258 110 3.08 3.22 19.4 1 0 #> 5 17.6 1.13 18.7 8 360 175 3.15 3.44 17.0 0 0 #> 6 21.6 -3.48 18.1 6 225 105 2.76 3.46 20.2 1 0 #> 7 13.9 0.393 14.3 8 360 245 3.21 3.57 15.8 0 0 #> 8 21.7 2.70 24.4 4 147. 62 3.69 3.19 20 1 0 #> 9 25.6 -2.81 22.8 4 141. 95 3.92 3.15 22.9 1 0 #> 10 17.1 2.09 19.2 6 168. 123 3.92 3.44 18.3 1 0 #> # ℹ 2 more variables: gear , carb augment(reg_form, car_tst[, -1]) #> # A tibble: 10 × 11 #> .pred cyl disp hp drat wt qsec vs am gear carb #> #> 1 23.4 6 160 110 3.9 2.62 16.5 0 1 4 4 #> 2 23.3 6 160 110 3.9 2.88 17.0 0 1 4 4 #> 3 27.6 4 108 93 3.85 2.32 18.6 1 1 4 1 #> 4 21.5 6 258 110 3.08 3.22 19.4 1 0 3 1 #> 5 17.6 8 360 175 3.15 3.44 17.0 0 0 3 2 #> 6 21.6 6 225 105 2.76 3.46 20.2 1 0 3 1 #> 7 13.9 8 360 245 3.21 3.57 15.8 0 0 3 4 #> 8 21.7 4 147. 62 3.69 3.19 20 1 0 4 2 #> 9 25.6 4 141. 95 3.92 3.15 22.9 1 0 4 2 #> 10 17.1 6 168. 123 3.92 3.44 18.3 1 0 4 4 augment(reg_xy, car_tst) #> # A tibble: 10 × 12 #> .pred mpg cyl disp hp drat wt qsec vs am gear carb #> #> 1 23.4 21 6 160 110 3.9 2.62 16.5 0 1 4 4 #> 2 23.3 21 6 160 110 3.9 2.88 17.0 0 1 4 4 #> 3 27.6 22.8 4 108 93 3.85 2.32 18.6 1 1 4 1 #> 4 21.5 21.4 6 258 110 3.08 3.22 19.4 1 0 3 1 #> 5 17.6 18.7 8 360 175 3.15 3.44 17.0 0 0 3 2 #> 6 21.6 18.1 6 225 105 2.76 3.46 20.2 1 0 3 1 #> 7 13.9 14.3 8 360 245 3.21 3.57 15.8 0 0 3 4 #> 8 21.7 24.4 4 147. 62 3.69 3.19 20 1 0 4 2 #> 9 25.6 22.8 4 141. 95 3.92 3.15 22.9 1 0 4 2 #> 10 17.1 19.2 6 168. 123 3.92 3.44 18.3 1 0 4 4 augment(reg_xy, car_tst[, -1]) #> # A tibble: 10 × 11 #> .pred cyl disp hp drat wt qsec vs am gear carb #> #> 1 23.4 6 160 110 3.9 2.62 16.5 0 1 4 4 #> 2 23.3 6 160 110 3.9 2.88 17.0 0 1 4 4 #> 3 27.6 4 108 93 3.85 2.32 18.6 1 1 4 1 #> 4 21.5 6 258 110 3.08 3.22 19.4 1 0 3 1 #> 5 17.6 8 360 175 3.15 3.44 17.0 0 0 3 2 #> 6 21.6 6 225 105 2.76 3.46 20.2 1 0 3 1 #> 7 13.9 8 360 245 3.21 3.57 15.8 0 0 3 4 #> 8 21.7 4 147. 62 3.69 3.19 20 1 0 4 2 #> 9 25.6 4 141. 95 3.92 3.15 22.9 1 0 4 2 #> 10 17.1 6 168. 123 3.92 3.44 18.3 1 0 4 4 # ------------------------------------------------------------------------------ data(two_class_dat, package = \"modeldata\") cls_trn <- two_class_dat[-(1:10), ] cls_tst <- two_class_dat[ 1:10 , ] cls_form <- logistic_reg() %>% set_engine(\"glm\") %>% fit(Class ~ ., data = cls_trn) cls_xy <- logistic_reg() %>% set_engine(\"glm\") %>% fit_xy(cls_trn[, -3], cls_trn$Class) augment(cls_form, cls_tst) #> # A tibble: 10 × 6 #> .pred_class .pred_Class1 .pred_Class2 A B Class #> #> 1 Class1 0.518 0.482 2.07 1.63 Class1 #> 2 Class1 0.909 0.0913 2.02 1.04 Class1 #> 3 Class1 0.648 0.352 1.69 1.37 Class2 #> 4 Class1 0.610 0.390 3.43 1.98 Class2 #> 5 Class2 0.443 0.557 2.88 1.98 Class1 #> 6 Class2 0.206 0.794 3.31 2.41 Class2 #> 7 Class1 0.708 0.292 2.50 1.56 Class2 #> 8 Class1 0.567 0.433 1.98 1.55 Class2 #> 9 Class1 0.994 0.00582 2.88 0.580 Class1 #> 10 Class2 0.108 0.892 3.74 2.74 Class2 augment(cls_form, cls_tst[, -3]) #> # A tibble: 10 × 5 #> .pred_class .pred_Class1 .pred_Class2 A B #> #> 1 Class1 0.518 0.482 2.07 1.63 #> 2 Class1 0.909 0.0913 2.02 1.04 #> 3 Class1 0.648 0.352 1.69 1.37 #> 4 Class1 0.610 0.390 3.43 1.98 #> 5 Class2 0.443 0.557 2.88 1.98 #> 6 Class2 0.206 0.794 3.31 2.41 #> 7 Class1 0.708 0.292 2.50 1.56 #> 8 Class1 0.567 0.433 1.98 1.55 #> 9 Class1 0.994 0.00582 2.88 0.580 #> 10 Class2 0.108 0.892 3.74 2.74 augment(cls_xy, cls_tst) #> # A tibble: 10 × 6 #> .pred_class .pred_Class1 .pred_Class2 A B Class #> #> 1 Class1 0.518 0.482 2.07 1.63 Class1 #> 2 Class1 0.909 0.0913 2.02 1.04 Class1 #> 3 Class1 0.648 0.352 1.69 1.37 Class2 #> 4 Class1 0.610 0.390 3.43 1.98 Class2 #> 5 Class2 0.443 0.557 2.88 1.98 Class1 #> 6 Class2 0.206 0.794 3.31 2.41 Class2 #> 7 Class1 0.708 0.292 2.50 1.56 Class2 #> 8 Class1 0.567 0.433 1.98 1.55 Class2 #> 9 Class1 0.994 0.00582 2.88 0.580 Class1 #> 10 Class2 0.108 0.892 3.74 2.74 Class2 augment(cls_xy, cls_tst[, -3]) #> # A tibble: 10 × 5 #> .pred_class .pred_Class1 .pred_Class2 A B #> #> 1 Class1 0.518 0.482 2.07 1.63 #> 2 Class1 0.909 0.0913 2.02 1.04 #> 3 Class1 0.648 0.352 1.69 1.37 #> 4 Class1 0.610 0.390 3.43 1.98 #> 5 Class2 0.443 0.557 2.88 1.98 #> 6 Class2 0.206 0.794 3.31 2.41 #> 7 Class1 0.708 0.292 2.50 1.56 #> 8 Class1 0.567 0.433 1.98 1.55 #> 9 Class1 0.994 0.00582 2.88 0.580 #> 10 Class2 0.108 0.892 3.74 2.74"},{"path":"https://parsnip.tidymodels.org/dev/reference/auto_ml.html","id":null,"dir":"Reference","previous_headings":"","what":"Automatic Machine Learning — auto_ml","title":"Automatic Machine Learning — auto_ml","text":"auto_ml() defines automated searching tuning process many models different families trained ranked given performance training data. different ways fit model, method estimation chosen setting model engine. engine-specific pages model listed . h2o¹² information parsnip used modeling https://www.tidymodels.org/.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/auto_ml.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Automatic Machine Learning — auto_ml","text":"","code":"auto_ml(mode = \"unknown\", engine = \"h2o\")"},{"path":"https://parsnip.tidymodels.org/dev/reference/auto_ml.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Automatic Machine Learning — auto_ml","text":"mode single character string prediction outcome mode. Possible values model \"unknown\", \"regression\", \"classification\". engine single character string specifying computational engine use fitting.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/auto_ml.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Automatic Machine Learning — auto_ml","text":"function defines type model fit. engine specified, method fit model also defined. See set_engine() setting engine, including set engine arguments. model trained fit fit() function used data. arguments function mode engine captured quosures. pass values programmatically, use injection operator like :","code":"value <- 1 auto_ml(argument = !!value)"},{"path":"https://parsnip.tidymodels.org/dev/reference/auto_ml.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Automatic Machine Learning — auto_ml","text":"https://www.tidymodels.org, Tidy Modeling R, searchable table parsnip models","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/autoplot.model_fit.html","id":null,"dir":"Reference","previous_headings":"","what":"Create a ggplot for a model object — autoplot.model_fit","title":"Create a ggplot for a model object — autoplot.model_fit","text":"method provides good visualization method model results. Currently, methods glmnet models implemented.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/autoplot.model_fit.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Create a ggplot for a model object — autoplot.model_fit","text":"","code":"# S3 method for model_fit autoplot(object, ...) # S3 method for glmnet autoplot(object, ..., min_penalty = 0, best_penalty = NULL, top_n = 3L)"},{"path":"https://parsnip.tidymodels.org/dev/reference/autoplot.model_fit.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Create a ggplot for a model object — autoplot.model_fit","text":"object model fit object. ... autoplot.glmnet(), options pass ggrepel::geom_label_repel(). Otherwise, argument ignored. min_penalty single, non-negative number smallest penalty value shown plot. left NULL, whole data range used. best_penalty single, non-negative number show vertical line marker. left NULL, line shown. argument used, ggrepl package required. top_n non-negative integer many model predictors label. top predictors ranked absolute coefficient value. multinomial multivariate models, top_n terms selected within class response, respectively.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/autoplot.model_fit.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Create a ggplot for a model object — autoplot.model_fit","text":"ggplot object penalty x-axis coefficients y-axis. multinomial multivariate models, plot faceted.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/autoplot.model_fit.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Create a ggplot for a model object — autoplot.model_fit","text":"glmnet package need attached loaded autoplot() method work correctly.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/bag_mars.html","id":null,"dir":"Reference","previous_headings":"","what":"Ensembles of MARS models — bag_mars","title":"Ensembles of MARS models — bag_mars","text":"bag_mars() defines ensemble generalized linear models use artificial features predictors. features resemble hinge functions result model segmented regression small dimensions. function can fit classification regression models. different ways fit model, method estimation chosen setting model engine. engine-specific pages model listed . earth¹² information parsnip used modeling https://www.tidymodels.org/.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/bag_mars.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Ensembles of MARS models — bag_mars","text":"","code":"bag_mars( mode = \"unknown\", num_terms = NULL, prod_degree = NULL, prune_method = NULL, engine = \"earth\" )"},{"path":"https://parsnip.tidymodels.org/dev/reference/bag_mars.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Ensembles of MARS models — bag_mars","text":"mode single character string prediction outcome mode. Possible values model \"unknown\", \"regression\", \"classification\". num_terms number features retained final model, including intercept. prod_degree highest possible interaction degree. prune_method pruning method. engine single character string specifying computational engine use fitting.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/bag_mars.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Ensembles of MARS models — bag_mars","text":"function defines type model fit. engine specified, method fit model also defined. See set_engine() setting engine, including set engine arguments. model trained fit fit() function used data. arguments function mode engine captured quosures. pass values programmatically, use injection operator like :","code":"value <- 1 bag_mars(argument = !!value)"},{"path":"https://parsnip.tidymodels.org/dev/reference/bag_mars.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Ensembles of MARS models — bag_mars","text":"https://www.tidymodels.org, Tidy Modeling R, searchable table parsnip models","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/bag_mlp.html","id":null,"dir":"Reference","previous_headings":"","what":"Ensembles of neural networks — bag_mlp","title":"Ensembles of neural networks — bag_mlp","text":"bag_mlp() defines ensemble single layer, feed-forward neural networks. function can fit classification regression models. different ways fit model, method estimation chosen setting model engine. engine-specific pages model listed . nnet¹² information parsnip used modeling https://www.tidymodels.org/.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/bag_mlp.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Ensembles of neural networks — bag_mlp","text":"","code":"bag_mlp( mode = \"unknown\", hidden_units = NULL, penalty = NULL, epochs = NULL, engine = \"nnet\" )"},{"path":"https://parsnip.tidymodels.org/dev/reference/bag_mlp.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Ensembles of neural networks — bag_mlp","text":"mode single character string prediction outcome mode. Possible values model \"unknown\", \"regression\", \"classification\". hidden_units integer number units hidden model. penalty non-negative numeric value amount weight decay. epochs integer number training iterations. engine single character string specifying computational engine use fitting.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/bag_mlp.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Ensembles of neural networks — bag_mlp","text":"function defines type model fit. engine specified, method fit model also defined. See set_engine() setting engine, including set engine arguments. model trained fit fit() function used data. arguments function mode engine captured quosures. pass values programmatically, use injection operator like :","code":"value <- 1 bag_mlp(argument = !!value)"},{"path":"https://parsnip.tidymodels.org/dev/reference/bag_mlp.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Ensembles of neural networks — bag_mlp","text":"https://www.tidymodels.org, Tidy Modeling R, searchable table parsnip models","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/bag_tree.html","id":null,"dir":"Reference","previous_headings":"","what":"Ensembles of decision trees — bag_tree","title":"Ensembles of decision trees — bag_tree","text":"bag_tree() defines ensemble decision trees. function can fit classification, regression, censored regression models. different ways fit model, method estimation chosen setting model engine. engine-specific pages model listed . rpart¹² C5.0² information parsnip used modeling https://www.tidymodels.org/.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/bag_tree.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Ensembles of decision trees — bag_tree","text":"","code":"bag_tree( mode = \"unknown\", cost_complexity = 0, tree_depth = NULL, min_n = 2, class_cost = NULL, engine = \"rpart\" )"},{"path":"https://parsnip.tidymodels.org/dev/reference/bag_tree.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Ensembles of decision trees — bag_tree","text":"mode single character string prediction outcome mode. Possible values model \"unknown\", \"regression\", \"classification\", \"censored regression\". cost_complexity positive number cost/complexity parameter (.k.. Cp) used CART models (specific engines ). tree_depth integer maximum depth tree (.e. number splits) (specific engines ). min_n integer minimum number data points node required node split . class_cost non-negative scalar class cost (cost 1 means extra cost). useful first level outcome factor minority class. case, values zero one can used bias second level factor. engine single character string specifying computational engine use fitting.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/bag_tree.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Ensembles of decision trees — bag_tree","text":"function defines type model fit. engine specified, method fit model also defined. See set_engine() setting engine, including set engine arguments. model trained fit fit() function used data. arguments function mode engine captured quosures. pass values programmatically, use injection operator like :","code":"value <- 1 bag_tree(argument = !!value)"},{"path":"https://parsnip.tidymodels.org/dev/reference/bag_tree.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Ensembles of decision trees — bag_tree","text":"https://www.tidymodels.org, Tidy Modeling R, searchable table parsnip models","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/bart-internal.html","id":null,"dir":"Reference","previous_headings":"","what":"Developer functions for predictions via BART models — bart-internal","title":"Developer functions for predictions via BART models — bart-internal","text":"Developer functions predictions via BART models","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/bart-internal.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Developer functions for predictions via BART models — bart-internal","text":"","code":"bartMachine_interval_calc(new_data, obj, ci = TRUE, level = 0.95) dbart_predict_calc(obj, new_data, type, level = 0.95, std_err = FALSE)"},{"path":"https://parsnip.tidymodels.org/dev/reference/bart-internal.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Developer functions for predictions via BART models — bart-internal","text":"new_data rectangular data object, data frame. obj parsnip object. ci Confidence (TRUE) prediction interval (FALSE) level Confidence level. type single character value NULL. Possible values \"numeric\", \"class\", \"prob\", \"conf_int\", \"pred_int\", \"quantile\", \"time\", \"hazard\", \"survival\", \"raw\". NULL, predict() choose appropriate value based model's mode. std_err Attach column standard error prediction .","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/bart.html","id":null,"dir":"Reference","previous_headings":"","what":"Bayesian additive regression trees (BART) — bart","title":"Bayesian additive regression trees (BART) — bart","text":"bart() defines tree ensemble model uses Bayesian analysis assemble ensemble. function can fit classification regression models. different ways fit model, method estimation chosen setting model engine. engine-specific pages model listed . dbarts¹ information parsnip used modeling https://www.tidymodels.org/.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/bart.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Bayesian additive regression trees (BART) — bart","text":"","code":"bart( mode = \"unknown\", engine = \"dbarts\", trees = NULL, prior_terminal_node_coef = NULL, prior_terminal_node_expo = NULL, prior_outcome_range = NULL )"},{"path":"https://parsnip.tidymodels.org/dev/reference/bart.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Bayesian additive regression trees (BART) — bart","text":"mode single character string prediction outcome mode. Possible values model \"unknown\", \"regression\", \"classification\". engine single character string specifying computational engine use fitting. trees integer number trees contained ensemble. prior_terminal_node_coef coefficient prior probability node terminal node. Values usually 0 one default 0.95. affects baseline probability; smaller numbers make probabilities larger overall. See Details . prior_terminal_node_expo exponent prior probability node terminal node. Values usually non-negative default 2 affects rate prior probability decreases depth tree increases. Larger values make deeper trees less likely. prior_outcome_range positive value defines width prior predicted outcome within certain range. regression related observed range data; prior number standard deviations Gaussian distribution defined observed range data. classification, defined range +/-3 (assumed logit scale). default value 2.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/bart.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Bayesian additive regression trees (BART) — bart","text":"prior terminal node probability expressed prior = * (1 + d)^(-b) d depth node, prior_terminal_node_coef b prior_terminal_node_expo. See Examples section example graph prior probability terminal node different values parameters. function defines type model fit. engine specified, method fit model also defined. See set_engine() setting engine, including set engine arguments. model trained fit fit() function used data. arguments function mode engine captured quosures. pass values programmatically, use injection operator like :","code":"value <- 1 bart(argument = !!value)"},{"path":"https://parsnip.tidymodels.org/dev/reference/bart.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Bayesian additive regression trees (BART) — bart","text":"https://www.tidymodels.org, Tidy Modeling R, searchable table parsnip models","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/bart.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Bayesian additive regression trees (BART) — bart","text":"","code":"show_engines(\"bart\") #> # A tibble: 2 × 2 #> engine mode #> #> 1 dbarts classification #> 2 dbarts regression bart(mode = \"regression\", trees = 5) #> BART Model Specification (regression) #> #> Main Arguments: #> trees = 5 #> #> Computational engine: dbarts #> # ------------------------------------------------------------------------------ # Examples for terminal node prior library(ggplot2) library(dplyr) #> #> Attaching package: ‘dplyr’ #> The following objects are masked from ‘package:stats’: #> #> filter, lag #> The following objects are masked from ‘package:base’: #> #> intersect, setdiff, setequal, union prior_test <- function(coef = 0.95, expo = 2, depths = 1:10) { tidyr::crossing(coef = coef, expo = expo, depth = depths) %>% mutate( `terminial node prior` = coef * (1 + depth)^(-expo), coef = format(coef), expo = format(expo)) } prior_test(coef = c(0.05, 0.5, .95), expo = c(1/2, 1, 2)) %>% ggplot(aes(depth, `terminial node prior`, col = coef)) + geom_line() + geom_point() + facet_wrap(~ expo)"},{"path":"https://parsnip.tidymodels.org/dev/reference/boost_tree.html","id":null,"dir":"Reference","previous_headings":"","what":"Boosted trees — boost_tree","title":"Boosted trees — boost_tree","text":"boost_tree() defines model creates series decision trees forming ensemble. tree depends results previous trees. trees ensemble combined produce final prediction. function can fit classification, regression, censored regression models. different ways fit model, method estimation chosen setting model engine. engine-specific pages model listed . xgboost¹ C5.0 h2o² lightgbm² mboost² spark information parsnip used modeling https://www.tidymodels.org/.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/boost_tree.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Boosted trees — boost_tree","text":"","code":"boost_tree( mode = \"unknown\", engine = \"xgboost\", mtry = NULL, trees = NULL, min_n = NULL, tree_depth = NULL, learn_rate = NULL, loss_reduction = NULL, sample_size = NULL, stop_iter = NULL )"},{"path":"https://parsnip.tidymodels.org/dev/reference/boost_tree.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Boosted trees — boost_tree","text":"mode single character string prediction outcome mode. Possible values model \"unknown\", \"regression\", \"classification\", \"censored regression\". engine single character string specifying computational engine use fitting. mtry number number (proportion) predictors randomly sampled split creating tree models (specific engines ). trees integer number trees contained ensemble. min_n integer minimum number data points node required node split . tree_depth integer maximum depth tree (.e. number splits) (specific engines ). learn_rate number rate boosting algorithm adapts iteration--iteration (specific engines ). sometimes referred shrinkage parameter. loss_reduction number reduction loss function required split (specific engines ). sample_size number number (proportion) data exposed fitting routine. xgboost, sampling done iteration C5.0 samples training. stop_iter number iterations without improvement stopping (specific engines ).","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/boost_tree.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Boosted trees — boost_tree","text":"function defines type model fit. engine specified, method fit model also defined. See set_engine() setting engine, including set engine arguments. model trained fit fit() function used data. arguments function mode engine captured quosures. pass values programmatically, use injection operator like :","code":"value <- 1 boost_tree(argument = !!value)"},{"path":"https://parsnip.tidymodels.org/dev/reference/boost_tree.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Boosted trees — boost_tree","text":"https://www.tidymodels.org, Tidy Modeling R, searchable table parsnip models","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/boost_tree.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Boosted trees — boost_tree","text":"","code":"show_engines(\"boost_tree\") #> # A tibble: 5 × 2 #> engine mode #> #> 1 xgboost classification #> 2 xgboost regression #> 3 C5.0 classification #> 4 spark classification #> 5 spark regression boost_tree(mode = \"classification\", trees = 20) #> Boosted Tree Model Specification (classification) #> #> Main Arguments: #> trees = 20 #> #> Computational engine: xgboost #>"},{"path":"https://parsnip.tidymodels.org/dev/reference/case_weights.html","id":null,"dir":"Reference","previous_headings":"","what":"Using case weights with parsnip — case_weights","title":"Using case weights with parsnip — case_weights","text":"Case weights positive numeric values influence much data point model fitting process. variety situations case weights can used.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/case_weights.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Using case weights with parsnip — case_weights","text":"tidymodels packages differentiate different types case weights used entire data analysis process, including preprocessing data, model fitting, performance calculations, etc. tidymodels packages require users convert numeric vectors vector class reflects used. example, situations weights affect operations centering scaling preprocessing operations. types weights allowed tidymodels : Frequency weights via hardhat::frequency_weights() Importance weights via hardhat::importance_weights() types can added request. parsnip, fit() fit_xy functions contain case_weight argument takes data. Spark models, argument value character value.","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/case_weights_allowed.html","id":null,"dir":"Reference","previous_headings":"","what":"Determine if case weights are used — case_weights_allowed","title":"Determine if case weights are used — case_weights_allowed","text":"modeling engines can incorporate case weights calculations. function can determine whether can used.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/case_weights_allowed.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Determine if case weights are used — case_weights_allowed","text":"","code":"case_weights_allowed(spec)"},{"path":"https://parsnip.tidymodels.org/dev/reference/case_weights_allowed.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Determine if case weights are used — case_weights_allowed","text":"spec parsnip model specification.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/case_weights_allowed.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Determine if case weights are used — case_weights_allowed","text":"single logical.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/case_weights_allowed.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Determine if case weights are used — case_weights_allowed","text":"","code":"case_weights_allowed(linear_reg()) #> [1] TRUE case_weights_allowed(linear_reg(engine = \"keras\")) #> [1] FALSE"},{"path":"https://parsnip.tidymodels.org/dev/reference/censoring_weights.html","id":null,"dir":"Reference","previous_headings":"","what":"Calculations for inverse probability of censoring weights (IPCW) — censoring_weights","title":"Calculations for inverse probability of censoring weights (IPCW) — censoring_weights","text":"method Graf et al (1999) used compute weights specific evaluation times can used help measure model's time-dependent performance (e.g. time-dependent Brier score area ROC curve). internal function.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/censoring_weights.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Calculations for inverse probability of censoring weights (IPCW) — censoring_weights","text":"","code":".censoring_weights_graf(object, ...) # S3 method for default .censoring_weights_graf(object, ...) # S3 method for model_fit .censoring_weights_graf( object, predictions, cens_predictors = NULL, trunc = 0.05, eps = 10^-10, ... )"},{"path":"https://parsnip.tidymodels.org/dev/reference/censoring_weights.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Calculations for inverse probability of censoring weights (IPCW) — censoring_weights","text":"object fitted parsnip model object fitted workflow mode \"censored regression\". predictions data frame column containing survival::Surv() object well list column called .pred contains data structure produced predict.model_fit(). cens_predictors currently used. potential future slot models informative censoring based columns predictions. trunc potential lower bound probability censoring avoid large weight values. eps small value subtracted evaluation time computing censoring probabilities. See Details .","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/censoring_weights.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Calculations for inverse probability of censoring weights (IPCW) — censoring_weights","text":"data returned pred tibbles containing several new columns: .weight_time: time inverse censoring probability weights computed. function observed time time analysis (.e., eval_time). See Details information. .pred_censored: probability censored .weight_time. .weight_censored: inverse censoring probability.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/censoring_weights.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Calculations for inverse probability of censoring weights (IPCW) — censoring_weights","text":"probability data censored immediately prior specific time computed. , must determine time make prediction. two time values row data set: observed time (either censored ) time model evaluated (e.g. survival function prediction time point), constant across rows. . Graf et al (1999) three cases: observed time censoring time evaluation time, data point make contribution performance metric (\"category 3\"). values missing value probability estimate (also weight column). observed time corresponds actual event, time prior evaluation time (category 1), probability censored predicted observed time (minus epsilon). observed time evaluation time (category 2), regardless status, probability censored predicted evaluation time (minus epsilon). epsilon used since, actual information time t data point predicted time t (data prior time t available). censoring probability computed, trunc option used avoid using numbers pathologically close zero. , weight computed inverting censoring probability. eps argument used avoid information leakage computing censoring probability. Subtracting small number avoids using data known time prediction. example, making survival probability predictions eval_time = 3.0, know probability censored exact time (since occurred yet). creating weights inverting probabilities, risk cases severe outliers due probabilities close zero. mitigate , trunc argument can used put cap weights. smallest probability greater trunc, probabilities values less trunc given value. Otherwise, trunc adjusted half smallest probability value used lower bound.. Note n rows data t time points, resulting data, unnested, n * t rows. Computations easily scale well t becomes large.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/censoring_weights.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Calculations for inverse probability of censoring weights (IPCW) — censoring_weights","text":"Graf, E., Schmoor, C., Sauerbrei, W. Schumacher, M. (1999), Assessment comparison prognostic classification schemes survival data. Statist. Med., 18: 2529-2545.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/check_empty_ellipse.html","id":null,"dir":"Reference","previous_headings":"","what":"Check to ensure that ellipses are empty — check_empty_ellipse","title":"Check to ensure that ellipses are empty — check_empty_ellipse","text":"Check ensure ellipses empty","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/check_empty_ellipse.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Check to ensure that ellipses are empty — check_empty_ellipse","text":"","code":"check_empty_ellipse(...)"},{"path":"https://parsnip.tidymodels.org/dev/reference/check_empty_ellipse.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Check to ensure that ellipses are empty — check_empty_ellipse","text":"... Extra arguments.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/check_empty_ellipse.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Check to ensure that ellipses are empty — check_empty_ellipse","text":"error thrown (non-empty ellipses), NULL list.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/condense_control.html","id":null,"dir":"Reference","previous_headings":"","what":"Condense control object into strictly smaller control object — condense_control","title":"Condense control object into strictly smaller control object — condense_control","text":"function used help hierarchy control functions used throughout tidymodels packages. now assumed control function either subset superset another control function.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/condense_control.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Condense control object into strictly smaller control object — condense_control","text":"","code":"condense_control(x, ref)"},{"path":"https://parsnip.tidymodels.org/dev/reference/condense_control.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Condense control object into strictly smaller control object — condense_control","text":"x control object condensed. ref control object used determine element kept.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/condense_control.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Condense control object into strictly smaller control object — condense_control","text":"control object elements classes ref, values x.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/condense_control.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Condense control object into strictly smaller control object — condense_control","text":"","code":"ctrl <- control_parsnip(catch = TRUE) ctrl$allow_par <- TRUE str(ctrl) #> List of 3 #> $ verbosity: int 1 #> $ catch : logi TRUE #> $ allow_par: logi TRUE #> - attr(*, \"class\")= chr \"control_parsnip\" ctrl <- condense_control(ctrl, control_parsnip()) str(ctrl) #> List of 2 #> $ verbosity: int 1 #> $ catch : logi TRUE #> - attr(*, \"class\")= chr \"control_parsnip\""},{"path":"https://parsnip.tidymodels.org/dev/reference/contr_one_hot.html","id":null,"dir":"Reference","previous_headings":"","what":"Contrast function for one-hot encodings — contr_one_hot","title":"Contrast function for one-hot encodings — contr_one_hot","text":"contrast function produces model matrix indicator columns level factor.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/contr_one_hot.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Contrast function for one-hot encodings — contr_one_hot","text":"","code":"contr_one_hot(n, contrasts = TRUE, sparse = FALSE)"},{"path":"https://parsnip.tidymodels.org/dev/reference/contr_one_hot.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Contrast function for one-hot encodings — contr_one_hot","text":"n vector character factor levels number unique levels. contrasts argument backwards compatibility default TRUE supported. sparse argument backwards compatibility default FALSE supported.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/contr_one_hot.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Contrast function for one-hot encodings — contr_one_hot","text":"diagonal matrix n--n.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/contr_one_hot.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Contrast function for one-hot encodings — contr_one_hot","text":"default, model.matrix() generates binary indicator variables factor predictors. formula remove intercept, incomplete set indicators created; indicator made first level factor. example, species island three levels model.matrix() creates two indicator variables : formula intercept, first factor expanded indicators factor levels factors expanded one (): inference, hybrid encoding can problematic. generate indicators, use contrast: Removing intercept affect factor encodings.","code":"library(dplyr) library(modeldata) data(penguins) levels(penguins$species) ## [1] \"Adelie\" \"Chinstrap\" \"Gentoo\" levels(penguins$island) ## [1] \"Biscoe\" \"Dream\" \"Torgersen\" model.matrix(~ species + island, data = penguins) %>% colnames() ## [1] \"(Intercept)\" \"speciesChinstrap\" \"speciesGentoo\" \"islandDream\" ## [5] \"islandTorgersen\" model.matrix(~ 0 + species + island, data = penguins) %>% colnames() ## [1] \"speciesAdelie\" \"speciesChinstrap\" \"speciesGentoo\" \"islandDream\" ## [5] \"islandTorgersen\" # Switch out the contrast method old_contr <- options(\"contrasts\")$contrasts new_contr <- old_contr new_contr[\"unordered\"] <- \"contr_one_hot\" options(contrasts = new_contr) model.matrix(~ species + island, data = penguins) %>% colnames() ## [1] \"(Intercept)\" \"speciesAdelie\" \"speciesChinstrap\" \"speciesGentoo\" ## [5] \"islandBiscoe\" \"islandDream\" \"islandTorgersen\" options(contrasts = old_contr)"},{"path":"https://parsnip.tidymodels.org/dev/reference/control_parsnip.html","id":null,"dir":"Reference","previous_headings":"","what":"Control the fit function — control_parsnip","title":"Control the fit function — control_parsnip","text":"Pass options fit.model_spec() function control output computations","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/control_parsnip.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Control the fit function — control_parsnip","text":"","code":"control_parsnip(verbosity = 1L, catch = FALSE)"},{"path":"https://parsnip.tidymodels.org/dev/reference/control_parsnip.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Control the fit function — control_parsnip","text":"verbosity integer control verbose output . value zero, messages output shown packages loaded model fit. value 1, package loading quiet model fits can produce output screen (depending contain verbose-type argument). value 2 , output displayed execution time fit recorded printed. catch logical value TRUE evaluate model inside try(, silent = TRUE). model fails, object still returned (without error) inherits class \"try-error\".","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/control_parsnip.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Control the fit function — control_parsnip","text":"S3 object class \"control_parsnip\" named list results function call","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/control_parsnip.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Control the fit function — control_parsnip","text":"","code":"control_parsnip(verbosity = 2L) #> parsnip control object #> - verbose level 2"},{"path":"https://parsnip.tidymodels.org/dev/reference/convert_helpers.html","id":null,"dir":"Reference","previous_headings":"","what":"Helper functions to convert between formula and matrix interface — .convert_form_to_xy_fit","title":"Helper functions to convert between formula and matrix interface — .convert_form_to_xy_fit","text":"Functions take formula interface get resulting objects (y, x, weights, etc) back way around. functions intended developer use. part, emulates internals lm() (also see notes https://developer.r-project.org/model-fitting-functions.html). .convert_form_to_xy_fit() .convert_xy_to_form_fit() data created modeling. .convert_form_to_xy_fit() saves data objects well objects needed new data predicted (e.g. terms, etc.). .convert_form_to_xy_new() .convert_xy_to_form_new() used new samples predicted require predictors available.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/convert_helpers.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Helper functions to convert between formula and matrix interface — .convert_form_to_xy_fit","text":"","code":".convert_form_to_xy_fit( formula, data, ..., na.action = na.omit, indicators = \"traditional\", composition = \"data.frame\", remove_intercept = TRUE ) .convert_form_to_xy_new( object, new_data, na.action = na.pass, composition = \"data.frame\" ) .convert_xy_to_form_fit( x, y, weights = NULL, y_name = \"..y\", remove_intercept = TRUE ) .convert_xy_to_form_new(object, new_data)"},{"path":"https://parsnip.tidymodels.org/dev/reference/convert_helpers.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Helper functions to convert between formula and matrix interface — .convert_form_to_xy_fit","text":"formula object class formula (one can coerced class): symbolic description model fitted. data data frame containing relevant variables (e.g. outcome(s), predictors, case weights, etc). ... Additional arguments passed stats::model.frame(). na.action function indicates happen data contain NAs. indicators string describing whether create indicator/dummy variables factor predictors. Possible options \"none\", \"traditional\", \"one_hot\". composition string describing whether resulting x y returned \"matrix\" \"data.frame\". remove_intercept logical indicating whether remove intercept column model.matrix() finished. object object class model_fit. new_data rectangular data object, data frame. x matrix, sparse matrix, data frame predictors. models support sparse matrix input. See parsnip::get_encoding() details. x column names. y vector, matrix data frame outcome data. weights numeric vector containing weights. y_name string specifying name outcome.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/convert_stan_interval.html","id":null,"dir":"Reference","previous_headings":"","what":"Convenience function for intervals — convert_stan_interval","title":"Convenience function for intervals — convert_stan_interval","text":"Convenience function intervals","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/convert_stan_interval.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Convenience function for intervals — convert_stan_interval","text":"","code":"convert_stan_interval(x, level = 0.95, lower = TRUE)"},{"path":"https://parsnip.tidymodels.org/dev/reference/convert_stan_interval.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Convenience function for intervals — convert_stan_interval","text":"x fitted model object level Level uncertainty intervals lower level lower level?","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/ctree_train.html","id":null,"dir":"Reference","previous_headings":"","what":"A wrapper function for conditional inference tree models — ctree_train","title":"A wrapper function for conditional inference tree models — ctree_train","text":"functions slightly different APIs partykit::ctree() partykit::cforest() several important arguments top-level arguments (opposed specified partykit::ctree_control()).","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/ctree_train.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"A wrapper function for conditional inference tree models — ctree_train","text":"","code":"ctree_train( formula, data, weights = NULL, minsplit = 20L, maxdepth = Inf, teststat = \"quadratic\", testtype = \"Bonferroni\", mincriterion = 0.95, ... ) cforest_train( formula, data, weights = NULL, minsplit = 20L, maxdepth = Inf, teststat = \"quadratic\", testtype = \"Univariate\", mincriterion = 0, mtry = ceiling(sqrt(ncol(data) - 1)), ntree = 500L, ... )"},{"path":"https://parsnip.tidymodels.org/dev/reference/ctree_train.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"A wrapper function for conditional inference tree models — ctree_train","text":"formula symbolic description model fit. data data frame containing variables model. weights vector weights whose length nrow(data). partykit::ctree() models, required non-negative integers partykit::cforest() can non-negative integers doubles. minsplit minimum sum weights node order considered splitting. maxdepth maximum depth tree. default maxdepth = Inf means restrictions applied tree sizes. teststat character specifying type test statistic applied. testtype character specifying compute distribution test statistic. mincriterion value test statistic (testtype == \"Teststatistic\"), 1 - p-value (values testtype) must exceeded order implement split. ... options pass partykit::ctree() partykit::cforest(). mtry Number input variables randomly sampled candidates node random forest like algorithms. default mtry = Inf means random selection takes place. ntree Number trees grow forest.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/ctree_train.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"A wrapper function for conditional inference tree models — ctree_train","text":"object class party (ctree) cforest.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/ctree_train.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"A wrapper function for conditional inference tree models — ctree_train","text":"","code":"if (rlang::is_installed(c(\"modeldata\", \"partykit\"))) { data(bivariate, package = \"modeldata\") ctree_train(Class ~ ., data = bivariate_train) ctree_train(Class ~ ., data = bivariate_train, maxdepth = 1) } #> #> Model formula: #> Class ~ A + B #> #> Fitted party: #> [1] root #> | [2] B <= 56.77622: Two (n = 100, err = 34.0%) #> | [3] B > 56.77622: One (n = 909, err = 33.8%) #> #> Number of inner nodes: 1 #> Number of terminal nodes: 2"},{"path":"https://parsnip.tidymodels.org/dev/reference/cubist_rules.html","id":null,"dir":"Reference","previous_headings":"","what":"Cubist rule-based regression models — cubist_rules","title":"Cubist rule-based regression models — cubist_rules","text":"cubist_rules() defines model derives simple feature rules tree ensemble creates regression models within rule. function can fit regression models. different ways fit model, method estimation chosen setting model engine. engine-specific pages model listed . Cubist¹² information parsnip used modeling https://www.tidymodels.org/.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/cubist_rules.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Cubist rule-based regression models — cubist_rules","text":"","code":"cubist_rules( mode = \"regression\", committees = NULL, neighbors = NULL, max_rules = NULL, engine = \"Cubist\" )"},{"path":"https://parsnip.tidymodels.org/dev/reference/cubist_rules.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Cubist rule-based regression models — cubist_rules","text":"mode single character string type model. possible value model \"regression\". committees non-negative integer (greater 100) number members ensemble. neighbors integer zero nine number training set instances used adjust model-based prediction. max_rules largest number rules. engine single character string specifying computational engine use fitting.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/cubist_rules.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Cubist rule-based regression models — cubist_rules","text":"Cubist rule-based ensemble regression model. basic model tree (Quinlan, 1992) created separate linear regression model corresponding terminal node. paths along model tree flattened rules rules simplified pruned. parameter min_n primary method controlling size tree max_rules controls number rules. Cubist ensembles created using committees, similar boosting. first model committee created, second model uses modified version outcome data based whether previous model - -predicted outcome. iteration m, new outcome y* computed using sample -predicted previous iteration, outcome adjusted next time likely -predicted compensate. adjustment continues ensemble iteration. See Kuhn Johnson (2013) details. model created, also option post-hoc adjustment uses training set (Quinlan, 1993). new sample predicted model, can modified nearest neighbors original training set. K neighbors, model-based predicted value adjusted neighbor using: t training set prediction w weight inverse distance neighbor. function defines type model fit. engine specified, method fit model also defined. See set_engine() setting engine, including set engine arguments. model trained fit fit() function used data. arguments function mode engine captured quosures. pass values programmatically, use injection operator like :","code":"value <- 1 cubist_rules(argument = !!value)"},{"path":"https://parsnip.tidymodels.org/dev/reference/cubist_rules.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Cubist rule-based regression models — cubist_rules","text":"https://www.tidymodels.org, Tidy Modeling R, searchable table parsnip models Quinlan R (1992). \"Learning Continuous Classes.\" Proceedings 5th Australian Joint Conference Artificial Intelligence, pp. 343-348. Quinlan R (1993).\"Combining Instance-Based Model-Based Learning.\" Proceedings Tenth International Conference Machine Learning, pp. 236-243. Kuhn M Johnson K (2013). Applied Predictive Modeling. Springer.","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/decision_tree.html","id":null,"dir":"Reference","previous_headings":"","what":"Decision trees — decision_tree","title":"Decision trees — decision_tree","text":"decision_tree() defines model set /statements creates tree-based structure. function can fit classification, regression, censored regression models. different ways fit model, method estimation chosen setting model engine. engine-specific pages model listed . rpart¹² C5.0 partykit² spark information parsnip used modeling https://www.tidymodels.org/.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/decision_tree.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Decision trees — decision_tree","text":"","code":"decision_tree( mode = \"unknown\", engine = \"rpart\", cost_complexity = NULL, tree_depth = NULL, min_n = NULL )"},{"path":"https://parsnip.tidymodels.org/dev/reference/decision_tree.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Decision trees — decision_tree","text":"mode single character string prediction outcome mode. Possible values model \"unknown\", \"regression\", \"classification\", \"censored regression\". engine single character string specifying computational engine use fitting. cost_complexity positive number cost/complexity parameter (.k.. Cp) used CART models (specific engines ). tree_depth integer maximum depth tree. min_n integer minimum number data points node required node split .","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/decision_tree.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Decision trees — decision_tree","text":"function defines type model fit. engine specified, method fit model also defined. See set_engine() setting engine, including set engine arguments. model trained fit fit() function used data. arguments function mode engine captured quosures. pass values programmatically, use injection operator like :","code":"value <- 1 decision_tree(argument = !!value)"},{"path":"https://parsnip.tidymodels.org/dev/reference/decision_tree.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Decision trees — decision_tree","text":"https://www.tidymodels.org, Tidy Modeling R, searchable table parsnip models","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/decision_tree.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Decision trees — decision_tree","text":"","code":"show_engines(\"decision_tree\") #> # A tibble: 5 × 2 #> engine mode #> #> 1 rpart classification #> 2 rpart regression #> 3 C5.0 classification #> 4 spark classification #> 5 spark regression decision_tree(mode = \"classification\", tree_depth = 5) #> Decision Tree Model Specification (classification) #> #> Main Arguments: #> tree_depth = 5 #> #> Computational engine: rpart #>"},{"path":"https://parsnip.tidymodels.org/dev/reference/descriptors.html","id":null,"dir":"Reference","previous_headings":"","what":"Data Set Characteristics Available when Fitting Models — descriptors","title":"Data Set Characteristics Available when Fitting Models — descriptors","text":"using fit() functions variables available use arguments. example, user like choose argument value based current number rows data set, .obs() function can used. See Details .","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/descriptors.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Data Set Characteristics Available when Fitting Models — descriptors","text":"","code":".cols() .preds() .obs() .lvls() .facts() .x() .y() .dat()"},{"path":"https://parsnip.tidymodels.org/dev/reference/descriptors.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Data Set Characteristics Available when Fitting Models — descriptors","text":"Existing functions: .obs(): current number rows data set. .preds(): number columns data set associated predictors prior dummy variable creation. .cols(): number predictor columns available dummy variables created (). .facts(): number factor predictors data set. .lvls(): outcome factor, table counts level (NA otherwise). .x(): predictors returned format given. Either data frame matrix. .y(): known outcomes returned format given. Either vector, matrix, data frame. .dat(): data frame containing predictors outcomes. fit_xy() used, outcomes attached column, ..y. example, use model formula circumference ~ . built-Orange data, values formula Tree ~ . used: use model fit, pass model specification. evaluation delayed time model run via fit() (variables listed available). example: descriptors found, computation descriptor values executed.","code":".preds() = 2 (the 2 remaining columns in `Orange`) .cols() = 5 (1 numeric column + 4 from Tree dummy variables) .obs() = 35 .lvls() = NA (no factor outcome) .facts() = 1 (the Tree predictor) .y() = (circumference as a vector) .x() = (The other 2 columns as a data frame) .dat() = (The full data set) .preds() = 2 (the 2 numeric columns in `Orange`) .cols() = 2 (same) .obs() = 35 .lvls() = c(\"1\" = 7, \"2\" = 7, \"3\" = 7, \"4\" = 7, \"5\" = 7) .facts() = 0 .y() = (Tree as a vector) .x() = (The other 2 columns as a data frame) .dat() = (The full data set) library(modeldata) data(\"lending_club\") rand_forest(mode = \"classification\", mtry = .cols() - 2)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_C5_rules_C5.0.html","id":null,"dir":"Reference","previous_headings":"","what":"C5.0 rule-based classification models — details_C5_rules_C5.0","title":"C5.0 rule-based classification models — details_C5_rules_C5.0","text":"C50::C5.0() fits model derives feature rules tree prediction. single tree boosted ensemble can used. rules::c5_fit() wrapper around function.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_C5_rules_C5.0.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"C5.0 rule-based classification models — details_C5_rules_C5.0","text":"engine, single mode: classification","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_C5_rules_C5.0.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"C5.0 rule-based classification models — details_C5_rules_C5.0","text":"model 2 tuning parameters: trees: # Trees (type: integer, default: 1L) min_n: Minimal Node Size (type: integer, default: 2L) Note C5.0 tool early stopping boosting less iterations boosting performed number requested. C5_rules() turns feature (although can re-enabled using C50::C5.0Control()).","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_C5_rules_C5.0.html","id":"translation-from-parsnip-to-the-underlying-model-call-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the underlying model call (classification)","title":"C5.0 rule-based classification models — details_C5_rules_C5.0","text":"rules extension package required fit model.","code":"library(rules) C5_rules( trees = integer(1), min_n = integer(1) ) %>% set_engine(\"C5.0\") %>% set_mode(\"classification\") %>% translate() ## C5.0 Model Specification (classification) ## ## Main Arguments: ## trees = integer(1) ## min_n = integer(1) ## ## Computational engine: C5.0 ## ## Model fit template: ## rules::c5_fit(x = missing_arg(), y = missing_arg(), weights = missing_arg(), ## trials = integer(1), minCases = integer(1))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_C5_rules_C5.0.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"C5.0 rule-based classification models — details_C5_rules_C5.0","text":"engine require special encoding predictors. Categorical predictors can partitioned groups factor levels (e.g. {, c} vs {b, d}) splitting node. Dummy variables required model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_C5_rules_C5.0.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"C5.0 rule-based classification models — details_C5_rules_C5.0","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_C5_rules_C5.0.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"C5.0 rule-based classification models — details_C5_rules_C5.0","text":"model object contains data required make predictions. saving model purpose prediction, size saved object might substantially reduced using functions butcher package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_C5_rules_C5.0.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"C5.0 rule-based classification models — details_C5_rules_C5.0","text":"Quinlan R (1992). “Learning Continuous Classes.” Proceedings 5th Australian Joint Conference Artificial Intelligence, pp. 343-348. Quinlan R (1993).”Combining Instance-Based Model-Based Learning.” Proceedings Tenth International Conference Machine Learning, pp. 236-243. Kuhn M Johnson K (2013). Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_auto_ml_h2o.html","id":null,"dir":"Reference","previous_headings":"","what":"Automatic machine learning via h2o — details_auto_ml_h2o","title":"Automatic machine learning via h2o — details_auto_ml_h2o","text":"h2o::h2o.automl defines automated model training process returns leaderboard models best performances.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_auto_ml_h2o.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Automatic machine learning via h2o — details_auto_ml_h2o","text":"engine, multiple modes: classification regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_auto_ml_h2o.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Automatic machine learning via h2o — details_auto_ml_h2o","text":"model tuning parameters. Engine arguments interest max_runtime_secs max_models: controls maximum running time number models build automatic process. exclude_algos include_algos: character vector indicating excluded included algorithms model building. see full list supported models, see details section h2o::h2o.automl(). validation: integer 0 1 specifying proportion training data reserved validation set. used h2o performance assessment potential early stopping.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_auto_ml_h2o.html","id":"translation-from-parsnip-to-the-original-package-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (regression)","title":"Automatic machine learning via h2o — details_auto_ml_h2o","text":"agua::h2o_train_auto() wrapper around h2o::h2o.automl().","code":"auto_ml() %>% set_engine(\"h2o\") %>% set_mode(\"regression\") %>% translate() ## Automatic Machine Learning Model Specification (regression) ## ## Computational engine: h2o ## ## Model fit template: ## agua::h2o_train_auto(x = missing_arg(), y = missing_arg(), weights = missing_arg(), ## validation_frame = missing_arg(), verbosity = NULL)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_auto_ml_h2o.html","id":"translation-from-parsnip-to-the-original-package-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (classification)","title":"Automatic machine learning via h2o — details_auto_ml_h2o","text":"","code":"auto_ml() %>% set_engine(\"h2o\") %>% set_mode(\"classification\") %>% translate() ## Automatic Machine Learning Model Specification (classification) ## ## Computational engine: h2o ## ## Model fit template: ## agua::h2o_train_auto(x = missing_arg(), y = missing_arg(), weights = missing_arg(), ## validation_frame = missing_arg(), verbosity = NULL)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_auto_ml_h2o.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Automatic machine learning via h2o — details_auto_ml_h2o","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_auto_ml_h2o.html","id":"initializing-h-o","dir":"Reference","previous_headings":"","what":"Initializing h2o","title":"Automatic machine learning via h2o — details_auto_ml_h2o","text":"use h2o engine tidymodels, please run h2o::h2o.init() first. default, connects R local h2o server. needs done every new R session. can also connect remote h2o server IP address, details see h2o::h2o.init(). can control number threads thread pool used h2o nthreads argument. default, uses CPUs host. different usual parallel processing mechanism tidymodels tuning, tidymodels parallelizes resamples, h2o parallelizes hyperparameter combinations given resample. h2o automatically shut local h2o instance started R R terminated. manually stop h2o server, run h2o::h2o.shutdown().","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_auto_ml_h2o.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Automatic machine learning via h2o — details_auto_ml_h2o","text":"Models fitted engine may require native serialization methods properly saved /passed R sessions. learn preparing fitted models serialization, see bundle package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bag_mars_earth.html","id":null,"dir":"Reference","previous_headings":"","what":"Bagged MARS via earth — details_bag_mars_earth","title":"Bagged MARS via earth — details_bag_mars_earth","text":"baguette::bagger() creates collection MARS models forming ensemble. models ensemble combined produce final prediction.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bag_mars_earth.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Bagged MARS via earth — details_bag_mars_earth","text":"engine, multiple modes: classification regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bag_mars_earth.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Bagged MARS via earth — details_bag_mars_earth","text":"model 3 tuning parameters: prod_degree: Degree Interaction (type: integer, default: 1L) prune_method: Pruning Method (type: character, default: ‘backward’) num_terms: # Model Terms (type: integer, default: see ) default value num_terms depends number predictor columns. data frame x, default min(200, max(20, 2 * ncol(x))) + 1 (see earth::earth() reference ).","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bag_mars_earth.html","id":"translation-from-parsnip-to-the-original-package-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (regression)","title":"Bagged MARS via earth — details_bag_mars_earth","text":"baguette extension package required fit model.","code":"bag_mars(num_terms = integer(1), prod_degree = integer(1), prune_method = character(1)) %>% set_engine(\"earth\") %>% set_mode(\"regression\") %>% translate() ## Bagged MARS Model Specification (regression) ## ## Main Arguments: ## num_terms = integer(1) ## prod_degree = integer(1) ## prune_method = character(1) ## ## Computational engine: earth ## ## Model fit template: ## baguette::bagger(formula = missing_arg(), data = missing_arg(), ## weights = missing_arg(), nprune = integer(1), degree = integer(1), ## pmethod = character(1), base_model = \"MARS\")"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bag_mars_earth.html","id":"translation-from-parsnip-to-the-original-package-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (classification)","title":"Bagged MARS via earth — details_bag_mars_earth","text":"baguette extension package required fit model.","code":"library(baguette) bag_mars( num_terms = integer(1), prod_degree = integer(1), prune_method = character(1) ) %>% set_engine(\"earth\") %>% set_mode(\"classification\") %>% translate() ## Bagged MARS Model Specification (classification) ## ## Main Arguments: ## num_terms = integer(1) ## prod_degree = integer(1) ## prune_method = character(1) ## ## Computational engine: earth ## ## Model fit template: ## baguette::bagger(formula = missing_arg(), data = missing_arg(), ## weights = missing_arg(), nprune = integer(1), degree = integer(1), ## pmethod = character(1), base_model = \"MARS\")"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bag_mars_earth.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Bagged MARS via earth — details_bag_mars_earth","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bag_mars_earth.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Bagged MARS via earth — details_bag_mars_earth","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights. Note earth package documentation : “current implementation, building models weights can slow.”","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bag_mars_earth.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Bagged MARS via earth — details_bag_mars_earth","text":"Breiman, L. 1996. “Bagging predictors”. Machine Learning. 24 (2): 123-140 Friedman, J. 1991. “Multivariate Adaptive Regression Splines.” Annals Statistics, vol. 19, . 1, pp. 1-67. Milborrow, S. “Notes earth package.” Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bag_mlp_nnet.html","id":null,"dir":"Reference","previous_headings":"","what":"Bagged neural networks via nnet — details_bag_mlp_nnet","title":"Bagged neural networks via nnet — details_bag_mlp_nnet","text":"baguette::bagger() creates collection neural networks forming ensemble. trees ensemble combined produce final prediction.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bag_mlp_nnet.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Bagged neural networks via nnet — details_bag_mlp_nnet","text":"engine, multiple modes: classification regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bag_mlp_nnet.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Bagged neural networks via nnet — details_bag_mlp_nnet","text":"model 3 tuning parameters: hidden_units: # Hidden Units (type: integer, default: 10L) penalty: Amount Regularization (type: double, default: 0.0) epochs: # Epochs (type: integer, default: 1000L) defaults set baguette package different nnet::nnet().","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bag_mlp_nnet.html","id":"translation-from-parsnip-to-the-original-package-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (classification)","title":"Bagged neural networks via nnet — details_bag_mlp_nnet","text":"baguette extension package required fit model.","code":"library(baguette) bag_mlp(penalty = double(1), hidden_units = integer(1)) %>% set_engine(\"nnet\") %>% set_mode(\"classification\") %>% translate() ## Bagged Neural Network Model Specification (classification) ## ## Main Arguments: ## hidden_units = integer(1) ## penalty = double(1) ## ## Computational engine: nnet ## ## Model fit template: ## baguette::bagger(formula = missing_arg(), data = missing_arg(), ## weights = missing_arg(), size = integer(1), decay = double(1), ## base_model = \"nnet\")"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bag_mlp_nnet.html","id":"translation-from-parsnip-to-the-original-package-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (regression)","title":"Bagged neural networks via nnet — details_bag_mlp_nnet","text":"baguette extension package required fit model.","code":"library(baguette) bag_mlp(penalty = double(1), hidden_units = integer(1)) %>% set_engine(\"nnet\") %>% set_mode(\"regression\") %>% translate() ## Bagged Neural Network Model Specification (regression) ## ## Main Arguments: ## hidden_units = integer(1) ## penalty = double(1) ## ## Computational engine: nnet ## ## Model fit template: ## baguette::bagger(formula = missing_arg(), data = missing_arg(), ## weights = missing_arg(), size = integer(1), decay = double(1), ## base_model = \"nnet\")"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bag_mlp_nnet.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Bagged neural networks via nnet — details_bag_mlp_nnet","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Predictors scale. One way achieve center scale predictor mean zero variance one.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bag_mlp_nnet.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Bagged neural networks via nnet — details_bag_mlp_nnet","text":"underlying model implementation allow case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bag_mlp_nnet.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Bagged neural networks via nnet — details_bag_mlp_nnet","text":"Breiman L. 1996. “Bagging predictors”. Machine Learning. 24 (2): 123-140 Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bag_tree_C5.0.html","id":null,"dir":"Reference","previous_headings":"","what":"Bagged trees via C5.0 — details_bag_tree_C5.0","title":"Bagged trees via C5.0 — details_bag_tree_C5.0","text":"baguette::bagger() creates collection decision trees forming ensemble. trees ensemble combined produce final prediction.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bag_tree_C5.0.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Bagged trees via C5.0 — details_bag_tree_C5.0","text":"engine, single mode: classification","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bag_tree_C5.0.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Bagged trees via C5.0 — details_bag_tree_C5.0","text":"model 1 tuning parameters: min_n: Minimal Node Size (type: integer, default: 2L)","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bag_tree_C5.0.html","id":"translation-from-parsnip-to-the-original-package-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (classification)","title":"Bagged trees via C5.0 — details_bag_tree_C5.0","text":"baguette extension package required fit model.","code":"library(baguette) bag_tree(min_n = integer()) %>% set_engine(\"C5.0\") %>% set_mode(\"classification\") %>% translate() ## Bagged Decision Tree Model Specification (classification) ## ## Main Arguments: ## cost_complexity = 0 ## min_n = integer() ## ## Computational engine: C5.0 ## ## Model fit template: ## baguette::bagger(x = missing_arg(), y = missing_arg(), weights = missing_arg(), ## minCases = integer(), base_model = \"C5.0\")"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bag_tree_C5.0.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Bagged trees via C5.0 — details_bag_tree_C5.0","text":"engine require special encoding predictors. Categorical predictors can partitioned groups factor levels (e.g. {, c} vs {b, d}) splitting node. Dummy variables required model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bag_tree_C5.0.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Bagged trees via C5.0 — details_bag_tree_C5.0","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bag_tree_C5.0.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Bagged trees via C5.0 — details_bag_tree_C5.0","text":"Breiman, L. 1996. “Bagging predictors”. Machine Learning. 24 (2): 123-140 Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bag_tree_rpart.html","id":null,"dir":"Reference","previous_headings":"","what":"Bagged trees via rpart — details_bag_tree_rpart","title":"Bagged trees via rpart — details_bag_tree_rpart","text":"baguette::bagger() ipred::bagging() create collections decision trees forming ensemble. trees ensemble combined produce final prediction.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bag_tree_rpart.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Bagged trees via rpart — details_bag_tree_rpart","text":"engine, multiple modes: classification, regression, censored regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bag_tree_rpart.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Bagged trees via rpart — details_bag_tree_rpart","text":"model 4 tuning parameters: class_cost: Class Cost (type: double, default: (see )) tree_depth: Tree Depth (type: integer, default: 30L) min_n: Minimal Node Size (type: integer, default: 2L) cost_complexity: Cost-Complexity Parameter (type: double, default: 0.01) class_cost parameter, value can non-negative scalar class cost (cost 1 means extra cost). useful first level outcome factor minority class. case, values zero one can used bias second level factor.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bag_tree_rpart.html","id":"translation-from-parsnip-to-the-original-package-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (classification)","title":"Bagged trees via rpart — details_bag_tree_rpart","text":"baguette extension package required fit model.","code":"library(baguette) bag_tree(tree_depth = integer(1), min_n = integer(1), cost_complexity = double(1)) %>% set_engine(\"rpart\") %>% set_mode(\"classification\") %>% translate() ## Bagged Decision Tree Model Specification (classification) ## ## Main Arguments: ## cost_complexity = double(1) ## tree_depth = integer(1) ## min_n = integer(1) ## ## Computational engine: rpart ## ## Model fit template: ## baguette::bagger(formula = missing_arg(), data = missing_arg(), ## weights = missing_arg(), cp = double(1), maxdepth = integer(1), ## minsplit = integer(1), base_model = \"CART\")"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bag_tree_rpart.html","id":"translation-from-parsnip-to-the-original-package-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (regression)","title":"Bagged trees via rpart — details_bag_tree_rpart","text":"baguette extension package required fit model.","code":"library(baguette) bag_tree(tree_depth = integer(1), min_n = integer(1), cost_complexity = double(1)) %>% set_engine(\"rpart\") %>% set_mode(\"regression\") %>% translate() ## Bagged Decision Tree Model Specification (regression) ## ## Main Arguments: ## cost_complexity = double(1) ## tree_depth = integer(1) ## min_n = integer(1) ## ## Computational engine: rpart ## ## Model fit template: ## baguette::bagger(formula = missing_arg(), data = missing_arg(), ## weights = missing_arg(), cp = double(1), maxdepth = integer(1), ## minsplit = integer(1), base_model = \"CART\")"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bag_tree_rpart.html","id":"translation-from-parsnip-to-the-original-package-censored-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (censored regression)","title":"Bagged trees via rpart — details_bag_tree_rpart","text":"censored extension package required fit model.","code":"library(censored) bag_tree(tree_depth = integer(1), min_n = integer(1), cost_complexity = double(1)) %>% set_engine(\"rpart\") %>% set_mode(\"censored regression\") %>% translate() ## Bagged Decision Tree Model Specification (censored regression) ## ## Main Arguments: ## cost_complexity = double(1) ## tree_depth = integer(1) ## min_n = integer(1) ## ## Computational engine: rpart ## ## Model fit template: ## ipred::bagging(formula = missing_arg(), data = missing_arg(), ## weights = missing_arg(), cp = double(1), maxdepth = integer(1), ## minsplit = integer(1))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bag_tree_rpart.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Bagged trees via rpart — details_bag_tree_rpart","text":"engine require special encoding predictors. Categorical predictors can partitioned groups factor levels (e.g. {, c} vs {b, d}) splitting node. Dummy variables required model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bag_tree_rpart.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Bagged trees via rpart — details_bag_tree_rpart","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bag_tree_rpart.html","id":"other-details","dir":"Reference","previous_headings":"","what":"Other details","title":"Bagged trees via rpart — details_bag_tree_rpart","text":"Predictions type \"time\" predictions median survival time.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bag_tree_rpart.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Bagged trees via rpart — details_bag_tree_rpart","text":"Breiman L. 1996. “Bagging predictors”. Machine Learning. 24 (2): 123-140 Hothorn T, Lausen B, Benner , Radespiel-Troeger M. 2004. Bagging Survival Trees. Statistics Medicine, 23(1), 77–91. Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bart_dbarts.html","id":null,"dir":"Reference","previous_headings":"","what":"Bayesian additive regression trees via dbarts — details_bart_dbarts","title":"Bayesian additive regression trees via dbarts — details_bart_dbarts","text":"dbarts::bart() creates ensemble tree-based model whose training assembly determined using Bayesian analysis.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bart_dbarts.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Bayesian additive regression trees via dbarts — details_bart_dbarts","text":"engine, multiple modes: classification regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bart_dbarts.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Bayesian additive regression trees via dbarts — details_bart_dbarts","text":"model 4 tuning parameters: trees: # Trees (type: integer, default: 200L) prior_terminal_node_coef: Terminal Node Prior Coefficient (type: double, default: 0.95) prior_terminal_node_expo: Terminal Node Prior Exponent (type: double, default: 2.00) prior_outcome_range: Prior Outcome Range (type: double, default: 2.00)","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bart_dbarts.html","id":"important-engine-specific-options","dir":"Reference","previous_headings":"","what":"Important engine-specific options","title":"Bayesian additive regression trees via dbarts — details_bart_dbarts","text":"relevant arguments can passed set_engine(): keepevery, n.thin: Every keepevery draw kept returned user. Useful “thinning” samples. ntree, n.trees: number trees sum--trees formulation. ndpost, n.samples: number posterior draws burn , ndpost / keepevery actually returned. nskip, n.burn: Number MCMC iterations treated burn . nchain, n.chains: Integer specifying many independent tree sets fits calculated. nthread, n.threads: Integer specifying many threads use. Depending CPU architecture, using number chains can degrade performance small/medium data sets. calculations may executed single threaded regardless. combinechains, combineChains: Logical; TRUE, samples returned arrays dimensions equal nchain times ndpost times number observations.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bart_dbarts.html","id":"translation-from-parsnip-to-the-original-package-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (classification)","title":"Bayesian additive regression trees via dbarts — details_bart_dbarts","text":"","code":"bart( trees = integer(1), prior_terminal_node_coef = double(1), prior_terminal_node_expo = double(1), prior_outcome_range = double(1) ) %>% set_engine(\"dbarts\") %>% set_mode(\"classification\") %>% translate() ## BART Model Specification (classification) ## ## Main Arguments: ## trees = integer(1) ## prior_terminal_node_coef = double(1) ## prior_terminal_node_expo = double(1) ## prior_outcome_range = double(1) ## ## Computational engine: dbarts ## ## Model fit template: ## dbarts::bart(x = missing_arg(), y = missing_arg(), ntree = integer(1), ## base = double(1), power = double(1), k = double(1), verbose = FALSE, ## keeptrees = TRUE, keepcall = FALSE)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bart_dbarts.html","id":"translation-from-parsnip-to-the-original-package-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (regression)","title":"Bayesian additive regression trees via dbarts — details_bart_dbarts","text":"","code":"bart( trees = integer(1), prior_terminal_node_coef = double(1), prior_terminal_node_expo = double(1), prior_outcome_range = double(1) ) %>% set_engine(\"dbarts\") %>% set_mode(\"regression\") %>% translate() ## BART Model Specification (regression) ## ## Main Arguments: ## trees = integer(1) ## prior_terminal_node_coef = double(1) ## prior_terminal_node_expo = double(1) ## prior_outcome_range = double(1) ## ## Computational engine: dbarts ## ## Model fit template: ## dbarts::bart(x = missing_arg(), y = missing_arg(), ntree = integer(1), ## base = double(1), power = double(1), k = double(1), verbose = FALSE, ## keeptrees = TRUE, keepcall = FALSE)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bart_dbarts.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Bayesian additive regression trees via dbarts — details_bart_dbarts","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. dbarts::bart() also convert factors indicators user create first.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_bart_dbarts.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Bayesian additive regression trees via dbarts — details_bart_dbarts","text":"Chipman, George, McCulloch. “BART: Bayesian additive regression trees.” Ann. Appl. Stat. 4 (1) 266 - 298, March 2010.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_C5.0.html","id":null,"dir":"Reference","previous_headings":"","what":"Boosted trees via C5.0 — details_boost_tree_C5.0","title":"Boosted trees via C5.0 — details_boost_tree_C5.0","text":"C50::C5.0() creates series classification trees forming ensemble. tree depends results previous trees. trees ensemble combined produce final prediction.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_C5.0.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Boosted trees via C5.0 — details_boost_tree_C5.0","text":"engine, single mode: classification","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_C5.0.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Boosted trees via C5.0 — details_boost_tree_C5.0","text":"model 3 tuning parameters: trees: # Trees (type: integer, default: 15L) min_n: Minimal Node Size (type: integer, default: 2L) sample_size: Proportion Observations Sampled (type: double, default: 1.0) implementation C5.0 limits number trees 1 100.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_C5.0.html","id":"translation-from-parsnip-to-the-original-package-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (classification)","title":"Boosted trees via C5.0 — details_boost_tree_C5.0","text":"C5.0_train() wrapper around C50::C5.0() makes easier run model.","code":"boost_tree(trees = integer(), min_n = integer(), sample_size = numeric()) %>% set_engine(\"C5.0\") %>% set_mode(\"classification\") %>% translate() ## Boosted Tree Model Specification (classification) ## ## Main Arguments: ## trees = integer() ## min_n = integer() ## sample_size = numeric() ## ## Computational engine: C5.0 ## ## Model fit template: ## parsnip::C5.0_train(x = missing_arg(), y = missing_arg(), weights = missing_arg(), ## trials = integer(), minCases = integer(), sample = numeric())"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_C5.0.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Boosted trees via C5.0 — details_boost_tree_C5.0","text":"engine require special encoding predictors. Categorical predictors can partitioned groups factor levels (e.g. {, c} vs {b, d}) splitting node. Dummy variables required model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_C5.0.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Boosted trees via C5.0 — details_boost_tree_C5.0","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_C5.0.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Boosted trees via C5.0 — details_boost_tree_C5.0","text":"model object contains data required make predictions. saving model purpose prediction, size saved object might substantially reduced using functions butcher package.","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_C5.0.html","id":"early-stopping","dir":"Reference","previous_headings":"","what":"Early stopping","title":"Boosted trees via C5.0 — details_boost_tree_C5.0","text":"default, early stopping used. use complete set boosting iterations, pass earlyStopping = FALSE set_engine(). Also, unlikely early stopping occur sample_size = 1.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_C5.0.html","id":"examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Boosted trees via C5.0 — details_boost_tree_C5.0","text":"“Fitting Predicting parsnip” article contains examples boost_tree() \"C5.0\" engine.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_C5.0.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Boosted trees via C5.0 — details_boost_tree_C5.0","text":"Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_h2o.html","id":null,"dir":"Reference","previous_headings":"","what":"Boosted trees via h2o — details_boost_tree_h2o","title":"Boosted trees via h2o — details_boost_tree_h2o","text":"h2o::h2o.xgboost() creates series decision trees forming ensemble. tree depends results previous trees. trees ensemble combined produce final prediction.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_h2o.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Boosted trees via h2o — details_boost_tree_h2o","text":"engine, multiple modes: classification regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_h2o.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Boosted trees via h2o — details_boost_tree_h2o","text":"model 8 tuning parameters: trees: # Trees (type: integer, default: 50) tree_depth: Tree Depth (type: integer, default: 6) min_n: Minimal Node Size (type: integer, default: 1) learn_rate: Learning Rate (type: double, default: 0.3) sample_size: # Observations Sampled (type: integer, default: 1) mtry: # Randomly Selected Predictors (type: integer, default: 1) loss_reduction: Minimum Loss Reduction (type: double, default: 0) stop_iter: # Iterations Stopping (type: integer, default: 0) min_n represents fewest allowed observations terminal node, h2o::h2o.xgboost() allows one row leaf default. stop_iter controls early stopping rounds based convergence engine parameter stopping_metric. default, h2o::h2o.xgboost() use early stopping. stop_iter 0, h2o::h2o.xgboost() uses logloss classification, deviance regression anonomaly score Isolation Forest. mostly useful used alongside engine parameter validation, proportion train-validation split, parsnip split pass two data frames h2o. h2o::h2o.xgboost() evaluate metric early stopping criteria validation set.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_h2o.html","id":"translation-from-parsnip-to-the-original-package-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (regression)","title":"Boosted trees via h2o — details_boost_tree_h2o","text":"agua::h2o_train_xgboost() wrapper around h2o::h2o.xgboost(). agua extension package required fit model.","code":"boost_tree( mtry = integer(), trees = integer(), tree_depth = integer(), learn_rate = numeric(), min_n = integer(), loss_reduction = numeric(), stop_iter = integer() ) %>% set_engine(\"h2o\") %>% set_mode(\"regression\") %>% translate() ## Boosted Tree Model Specification (regression) ## ## Main Arguments: ## mtry = integer() ## trees = integer() ## min_n = integer() ## tree_depth = integer() ## learn_rate = numeric() ## loss_reduction = numeric() ## stop_iter = integer() ## ## Computational engine: h2o ## ## Model fit template: ## agua::h2o_train_xgboost(x = missing_arg(), y = missing_arg(), ## weights = missing_arg(), validation_frame = missing_arg(), ## col_sample_rate = integer(), ntrees = integer(), min_rows = integer(), ## max_depth = integer(), learn_rate = numeric(), min_split_improvement = numeric(), ## stopping_rounds = integer())"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_h2o.html","id":"translation-from-parsnip-to-the-original-package-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (classification)","title":"Boosted trees via h2o — details_boost_tree_h2o","text":"agua extension package required fit model.","code":"boost_tree( mtry = integer(), trees = integer(), tree_depth = integer(), learn_rate = numeric(), min_n = integer(), loss_reduction = numeric(), stop_iter = integer() ) %>% set_engine(\"h2o\") %>% set_mode(\"classification\") %>% translate() ## Boosted Tree Model Specification (classification) ## ## Main Arguments: ## mtry = integer() ## trees = integer() ## min_n = integer() ## tree_depth = integer() ## learn_rate = numeric() ## loss_reduction = numeric() ## stop_iter = integer() ## ## Computational engine: h2o ## ## Model fit template: ## agua::h2o_train_xgboost(x = missing_arg(), y = missing_arg(), ## weights = missing_arg(), validation_frame = missing_arg(), ## col_sample_rate = integer(), ntrees = integer(), min_rows = integer(), ## max_depth = integer(), learn_rate = numeric(), min_split_improvement = numeric(), ## stopping_rounds = integer())"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_h2o.html","id":"preprocessing","dir":"Reference","previous_headings":"","what":"Preprocessing","title":"Boosted trees via h2o — details_boost_tree_h2o","text":"engine require special encoding predictors. Categorical predictors can partitioned groups factor levels (e.g. {, c} vs {b, d}) splitting node. Dummy variables required model. Non-numeric predictors (.e., factors) internally converted numeric. classification context, non-numeric outcomes (.e., factors) also internally converted numeric.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_h2o.html","id":"interpreting-mtry","dir":"Reference","previous_headings":"","what":"Interpreting mtry","title":"Boosted trees via h2o — details_boost_tree_h2o","text":"mtry argument denotes number predictors randomly sampled split creating tree models. engines, \"xgboost\", \"xrf\", \"lightgbm\", interpret analogue mtry argument proportion predictors randomly sampled split rather count. settings, tuning preprocessors influence number predictors, parameterization quite helpful—interpreting mtry proportion means [0, 1] always valid range parameter, regardless input data. parsnip extensions accommodate parameterization using counts argument: logical indicating whether mtry interpreted number predictors randomly sampled split. TRUE indicates mtry interpreted sense count, FALSE indicates argument interpreted sense proportion. mtry main model argument boost_tree() rand_forest(), thus engine-specific interface. , regardless engine, counts defaults TRUE. engines support proportion interpretation (currently \"xgboost\" \"xrf\", via rules package, \"lightgbm\" via bonsai package) user can pass counts = FALSE argument set_engine() supply mtry values within [0, 1].","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_h2o.html","id":"initializing-h-o","dir":"Reference","previous_headings":"","what":"Initializing h2o","title":"Boosted trees via h2o — details_boost_tree_h2o","text":"use h2o engine tidymodels, please run h2o::h2o.init() first. default, connects R local h2o server. needs done every new R session. can also connect remote h2o server IP address, details see h2o::h2o.init(). can control number threads thread pool used h2o nthreads argument. default, uses CPUs host. different usual parallel processing mechanism tidymodels tuning, tidymodels parallelizes resamples, h2o parallelizes hyperparameter combinations given resample. h2o automatically shut local h2o instance started R R terminated. manually stop h2o server, run h2o::h2o.shutdown().","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_h2o.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Boosted trees via h2o — details_boost_tree_h2o","text":"Models fitted engine may require native serialization methods properly saved /passed R sessions. learn preparing fitted models serialization, see bundle package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_lightgbm.html","id":null,"dir":"Reference","previous_headings":"","what":"Boosted trees via lightgbm — details_boost_tree_lightgbm","title":"Boosted trees via lightgbm — details_boost_tree_lightgbm","text":"lightgbm::lgb.train() creates series decision trees forming ensemble. tree depends results previous trees. trees ensemble combined produce final prediction.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_lightgbm.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Boosted trees via lightgbm — details_boost_tree_lightgbm","text":"engine, multiple modes: regression classification","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_lightgbm.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Boosted trees via lightgbm — details_boost_tree_lightgbm","text":"model 6 tuning parameters: tree_depth: Tree Depth (type: integer, default: -1) trees: # Trees (type: integer, default: 100) learn_rate: Learning Rate (type: double, default: 0.1) mtry: # Randomly Selected Predictors (type: integer, default: see ) min_n: Minimal Node Size (type: integer, default: 20) loss_reduction: Minimum Loss Reduction (type: double, default: 0) mtry parameter gives number predictors randomly sampled split. default use predictors. Rather number, lightgbm::lgb.train()’s feature_fraction argument encodes mtry proportion predictors randomly sampled split. parsnip translates mtry, supplied number predictors, proportion hood. , user still supply argument mtry boost_tree(), sense number rather proportion; passing mtry lightgbm::lgb.train(), parsnip convert mtry value proportion. Note parsnip’s translation can overridden via counts argument, supplied set_engine(). default, counts set TRUE, supplying argument counts = FALSE allows user supply mtry proportion rather number.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_lightgbm.html","id":"translation-from-parsnip-to-the-original-package-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (regression)","title":"Boosted trees via lightgbm — details_boost_tree_lightgbm","text":"bonsai extension package required fit model.","code":"boost_tree( mtry = integer(), trees = integer(), tree_depth = integer(), learn_rate = numeric(), min_n = integer(), loss_reduction = numeric() ) %>% set_engine(\"lightgbm\") %>% set_mode(\"regression\") %>% translate() ## Boosted Tree Model Specification (regression) ## ## Main Arguments: ## mtry = integer() ## trees = integer() ## min_n = integer() ## tree_depth = integer() ## learn_rate = numeric() ## loss_reduction = numeric() ## ## Computational engine: lightgbm ## ## Model fit template: ## bonsai::train_lightgbm(x = missing_arg(), y = missing_arg(), ## feature_fraction_bynode = integer(), num_iterations = integer(), ## min_data_in_leaf = integer(), max_depth = integer(), learning_rate = numeric(), ## min_gain_to_split = numeric(), verbose = -1, num_threads = 0, ## seed = sample.int(10^5, 1), deterministic = TRUE)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_lightgbm.html","id":"translation-from-parsnip-to-the-original-package-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (classification)","title":"Boosted trees via lightgbm — details_boost_tree_lightgbm","text":"bonsai extension package required fit model. bonsai::train_lightgbm() wrapper around lightgbm::lgb.train() (functions) make easier run model.","code":"boost_tree( mtry = integer(), trees = integer(), tree_depth = integer(), learn_rate = numeric(), min_n = integer(), loss_reduction = numeric() ) %>% set_engine(\"lightgbm\") %>% set_mode(\"classification\") %>% translate() ## Boosted Tree Model Specification (classification) ## ## Main Arguments: ## mtry = integer() ## trees = integer() ## min_n = integer() ## tree_depth = integer() ## learn_rate = numeric() ## loss_reduction = numeric() ## ## Computational engine: lightgbm ## ## Model fit template: ## bonsai::train_lightgbm(x = missing_arg(), y = missing_arg(), ## feature_fraction_bynode = integer(), num_iterations = integer(), ## min_data_in_leaf = integer(), max_depth = integer(), learning_rate = numeric(), ## min_gain_to_split = numeric(), verbose = -1, num_threads = 0, ## seed = sample.int(10^5, 1), deterministic = TRUE)"},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_lightgbm.html","id":"preprocessing","dir":"Reference","previous_headings":"","what":"Preprocessing","title":"Boosted trees via lightgbm — details_boost_tree_lightgbm","text":"engine require special encoding predictors. Categorical predictors can partitioned groups factor levels (e.g. {, c} vs {b, d}) splitting node. Dummy variables required model. Non-numeric predictors (.e., factors) internally converted numeric. classification context, non-numeric outcomes (.e., factors) also internally converted numeric.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_lightgbm.html","id":"interpreting-mtry","dir":"Reference","previous_headings":"","what":"Interpreting mtry","title":"Boosted trees via lightgbm — details_boost_tree_lightgbm","text":"mtry argument denotes number predictors randomly sampled split creating tree models. engines, \"xgboost\", \"xrf\", \"lightgbm\", interpret analogue mtry argument proportion predictors randomly sampled split rather count. settings, tuning preprocessors influence number predictors, parameterization quite helpful—interpreting mtry proportion means [0, 1] always valid range parameter, regardless input data. parsnip extensions accommodate parameterization using counts argument: logical indicating whether mtry interpreted number predictors randomly sampled split. TRUE indicates mtry interpreted sense count, FALSE indicates argument interpreted sense proportion. mtry main model argument boost_tree() rand_forest(), thus engine-specific interface. , regardless engine, counts defaults TRUE. engines support proportion interpretation (currently \"xgboost\" \"xrf\", via rules package, \"lightgbm\" via bonsai package) user can pass counts = FALSE argument set_engine() supply mtry values within [0, 1].","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_lightgbm.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Boosted trees via lightgbm — details_boost_tree_lightgbm","text":"Models fitted engine may require native serialization methods properly saved /passed R sessions. learn preparing fitted models serialization, see bundle package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_lightgbm.html","id":"bagging","dir":"Reference","previous_headings":"","what":"Bagging","title":"Boosted trees via lightgbm — details_boost_tree_lightgbm","text":"sample_size argument translated bagging_fraction parameter param argument lgb.train. argument interpreted lightgbm proportion rather count, bonsai internally reparameterizes sample_size argument dials::sample_prop() tuning. effectively enable bagging, user also need set bagging_freq argument lightgbm. bagging_freq defaults 0, means bagging disabled, bagging_freq argument k means booster perform bagging every kth boosting iteration. Thus, default, sample_size argument ignored without setting argument manually. boosting libraries, like xgboost, analogous argument bagging_freq use k = 1 analogue bagging_fraction $(0, 1)$. bonsai thus automatically set bagging_freq = 1 set_engine(\"lightgbm\", ...) sample_size (.e. bagging_fraction) equal 1 bagging_freq value supplied. default can overridden setting bagging_freq argument set_engine() manually.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_lightgbm.html","id":"verbosity","dir":"Reference","previous_headings":"","what":"Verbosity","title":"Boosted trees via lightgbm — details_boost_tree_lightgbm","text":"bonsai quiets much logging output lightgbm::lgb.train() default. default settings, logged warnings errors still passed user. print logs training, set quiet = TRUE.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_lightgbm.html","id":"examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Boosted trees via lightgbm — details_boost_tree_lightgbm","text":"“Introduction bonsai” article contains examples boost_tree() \"lightgbm\" engine.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_lightgbm.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Boosted trees via lightgbm — details_boost_tree_lightgbm","text":"LightGBM: Highly Efficient Gradient Boosting Decision Tree Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_mboost.html","id":null,"dir":"Reference","previous_headings":"","what":"Boosted trees — details_boost_tree_mboost","title":"Boosted trees — details_boost_tree_mboost","text":"mboost::blackboost() fits series decision trees forming ensemble. tree depends results previous trees. trees ensemble combined produce final prediction.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_mboost.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Boosted trees — details_boost_tree_mboost","text":"engine, single mode: censored regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_mboost.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Boosted trees — details_boost_tree_mboost","text":"model 5 tuning parameters: mtry: # Randomly Selected Predictors (type: integer, default: see ) trees: # Trees (type: integer, default: 100L) tree_depth: Tree Depth (type: integer, default: 2L) min_n: Minimal Node Size (type: integer, default: 10L) loss_reduction: Minimum Loss Reduction (type: double, default: 0) mtry parameter related number predictors. default use predictors.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_mboost.html","id":"translation-from-parsnip-to-the-original-package-censored-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (censored regression)","title":"Boosted trees — details_boost_tree_mboost","text":"censored extension package required fit model. censored::blackboost_train() wrapper around mboost::blackboost() (functions) makes easier run model.","code":"library(censored) boost_tree() %>% set_engine(\"mboost\") %>% set_mode(\"censored regression\") %>% translate() ## Boosted Tree Model Specification (censored regression) ## ## Computational engine: mboost ## ## Model fit template: ## censored::blackboost_train(formula = missing_arg(), data = missing_arg(), ## weights = missing_arg(), family = mboost::CoxPH())"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_mboost.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Boosted trees — details_boost_tree_mboost","text":"engine require special encoding predictors. Categorical predictors can partitioned groups factor levels (e.g. {, c} vs {b, d}) splitting node. Dummy variables required model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_mboost.html","id":"other-details","dir":"Reference","previous_headings":"","what":"Other details","title":"Boosted trees — details_boost_tree_mboost","text":"Predictions type \"time\" predictions mean survival time.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_mboost.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Boosted trees — details_boost_tree_mboost","text":"Buehlmann P, Hothorn T. 2007. Boosting algorithms: regularization, prediction model fitting. Statistical Science, 22(4), 477–505. Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_spark.html","id":null,"dir":"Reference","previous_headings":"","what":"Boosted trees via Spark — details_boost_tree_spark","title":"Boosted trees via Spark — details_boost_tree_spark","text":"sparklyr::ml_gradient_boosted_trees() creates series decision trees forming ensemble. tree depends results previous trees. trees ensemble combined produce final prediction.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_spark.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Boosted trees via Spark — details_boost_tree_spark","text":"engine, multiple modes: classification regression. However, multiclass classification supported yet.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_spark.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Boosted trees via Spark — details_boost_tree_spark","text":"model 7 tuning parameters: tree_depth: Tree Depth (type: integer, default: 5L) trees: # Trees (type: integer, default: 20L) learn_rate: Learning Rate (type: double, default: 0.1) mtry: # Randomly Selected Predictors (type: integer, default: see ) min_n: Minimal Node Size (type: integer, default: 1L) loss_reduction: Minimum Loss Reduction (type: double, default: 0.0) sample_size: # Observations Sampled (type: integer, default: 1.0) mtry parameter related number predictors. default depends model mode. classification, square root number predictors used regression, one third predictors sampled.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_spark.html","id":"translation-from-parsnip-to-the-original-package-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (regression)","title":"Boosted trees via Spark — details_boost_tree_spark","text":"","code":"boost_tree( mtry = integer(), trees = integer(), min_n = integer(), tree_depth = integer(), learn_rate = numeric(), loss_reduction = numeric(), sample_size = numeric() ) %>% set_engine(\"spark\") %>% set_mode(\"regression\") %>% translate() ## Boosted Tree Model Specification (regression) ## ## Main Arguments: ## mtry = integer() ## trees = integer() ## min_n = integer() ## tree_depth = integer() ## learn_rate = numeric() ## loss_reduction = numeric() ## sample_size = numeric() ## ## Computational engine: spark ## ## Model fit template: ## sparklyr::ml_gradient_boosted_trees(x = missing_arg(), formula = missing_arg(), ## type = \"regression\", feature_subset_strategy = integer(), ## max_iter = integer(), min_instances_per_node = min_rows(integer(0), ## x), max_depth = integer(), step_size = numeric(), min_info_gain = numeric(), ## subsampling_rate = numeric(), seed = sample.int(10^5, 1))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_spark.html","id":"translation-from-parsnip-to-the-original-package-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (classification)","title":"Boosted trees via Spark — details_boost_tree_spark","text":"","code":"boost_tree( mtry = integer(), trees = integer(), min_n = integer(), tree_depth = integer(), learn_rate = numeric(), loss_reduction = numeric(), sample_size = numeric() ) %>% set_engine(\"spark\") %>% set_mode(\"classification\") %>% translate() ## Boosted Tree Model Specification (classification) ## ## Main Arguments: ## mtry = integer() ## trees = integer() ## min_n = integer() ## tree_depth = integer() ## learn_rate = numeric() ## loss_reduction = numeric() ## sample_size = numeric() ## ## Computational engine: spark ## ## Model fit template: ## sparklyr::ml_gradient_boosted_trees(x = missing_arg(), formula = missing_arg(), ## type = \"classification\", feature_subset_strategy = integer(), ## max_iter = integer(), min_instances_per_node = min_rows(integer(0), ## x), max_depth = integer(), step_size = numeric(), min_info_gain = numeric(), ## subsampling_rate = numeric(), seed = sample.int(10^5, 1))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_spark.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Boosted trees via Spark — details_boost_tree_spark","text":"engine require special encoding predictors. Categorical predictors can partitioned groups factor levels (e.g. {, c} vs {b, d}) splitting node. Dummy variables required model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_spark.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Boosted trees via Spark — details_boost_tree_spark","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights. Note , spark engines, case_weight argument value character string specify column numeric case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_spark.html","id":"other-details","dir":"Reference","previous_headings":"","what":"Other details","title":"Boosted trees via Spark — details_boost_tree_spark","text":"models created using \"spark\" engine, several things consider. formula interface via fit() available; using fit_xy() generate error. predictions always Spark table format. names documented without dots. equivalent factor columns Spark tables class predictions returned character columns. retain model object new R session (via save()), model$fit element parsnip object serialized via ml_save(object$fit) separately saved disk. new session, object can reloaded reattached parsnip object.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_spark.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Boosted trees via Spark — details_boost_tree_spark","text":"Luraschi, J, K Kuo, E Ruiz. 2019. Mastering Spark R. O’Reilly Media Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_xgboost.html","id":null,"dir":"Reference","previous_headings":"","what":"Boosted trees via xgboost — details_boost_tree_xgboost","title":"Boosted trees via xgboost — details_boost_tree_xgboost","text":"xgboost::xgb.train() creates series decision trees forming ensemble. tree depends results previous trees. trees ensemble combined produce final prediction.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_xgboost.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Boosted trees via xgboost — details_boost_tree_xgboost","text":"engine, multiple modes: classification regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_xgboost.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Boosted trees via xgboost — details_boost_tree_xgboost","text":"model 8 tuning parameters: tree_depth: Tree Depth (type: integer, default: 6L) trees: # Trees (type: integer, default: 15L) learn_rate: Learning Rate (type: double, default: 0.3) mtry: # Randomly Selected Predictors (type: integer, default: see ) min_n: Minimal Node Size (type: integer, default: 1L) loss_reduction: Minimum Loss Reduction (type: double, default: 0.0) sample_size: Proportion Observations Sampled (type: double, default: 1.0) stop_iter: # Iterations Stopping (type: integer, default: Inf) mtry, default value NULL translates using available columns.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_xgboost.html","id":"translation-from-parsnip-to-the-original-package-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (regression)","title":"Boosted trees via xgboost — details_boost_tree_xgboost","text":"","code":"boost_tree( mtry = integer(), trees = integer(), min_n = integer(), tree_depth = integer(), learn_rate = numeric(), loss_reduction = numeric(), sample_size = numeric(), stop_iter = integer() ) %>% set_engine(\"xgboost\") %>% set_mode(\"regression\") %>% translate() ## Boosted Tree Model Specification (regression) ## ## Main Arguments: ## mtry = integer() ## trees = integer() ## min_n = integer() ## tree_depth = integer() ## learn_rate = numeric() ## loss_reduction = numeric() ## sample_size = numeric() ## stop_iter = integer() ## ## Computational engine: xgboost ## ## Model fit template: ## parsnip::xgb_train(x = missing_arg(), y = missing_arg(), weights = missing_arg(), ## colsample_bynode = integer(), nrounds = integer(), min_child_weight = integer(), ## max_depth = integer(), eta = numeric(), gamma = numeric(), ## subsample = numeric(), early_stop = integer(), nthread = 1, ## verbose = 0)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_xgboost.html","id":"translation-from-parsnip-to-the-original-package-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (classification)","title":"Boosted trees via xgboost — details_boost_tree_xgboost","text":"xgb_train() wrapper around xgboost::xgb.train() (functions) makes easier run model.","code":"boost_tree( mtry = integer(), trees = integer(), min_n = integer(), tree_depth = integer(), learn_rate = numeric(), loss_reduction = numeric(), sample_size = numeric(), stop_iter = integer() ) %>% set_engine(\"xgboost\") %>% set_mode(\"classification\") %>% translate() ## Boosted Tree Model Specification (classification) ## ## Main Arguments: ## mtry = integer() ## trees = integer() ## min_n = integer() ## tree_depth = integer() ## learn_rate = numeric() ## loss_reduction = numeric() ## sample_size = numeric() ## stop_iter = integer() ## ## Computational engine: xgboost ## ## Model fit template: ## parsnip::xgb_train(x = missing_arg(), y = missing_arg(), weights = missing_arg(), ## colsample_bynode = integer(), nrounds = integer(), min_child_weight = integer(), ## max_depth = integer(), eta = numeric(), gamma = numeric(), ## subsample = numeric(), early_stop = integer(), nthread = 1, ## verbose = 0)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_xgboost.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Boosted trees via xgboost — details_boost_tree_xgboost","text":"xgboost means translate factor predictors grouped splits. Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit.model_spec(), parsnip convert factor columns indicators using one-hot encoding. classification, non-numeric outcomes (.e., factors) internally converted numeric. binary classification, event_level argument set_engine() can set either \"first\" \"second\" specify level used event. can helpful watchlist used monitor performance xgboost training process.","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_xgboost.html","id":"interfacing-with-the-params-argument","dir":"Reference","previous_headings":"","what":"Interfacing with the params argument","title":"Boosted trees via xgboost — details_boost_tree_xgboost","text":"xgboost function parsnip indirectly wraps, xgboost::xgb.train(), takes arguments via params list argument. supply engine-specific arguments documented xgboost::xgb.train() arguments passed via params, supply list elements directly named arguments set_engine() rather elements params. example, pass non-default evaluation metric like : …rather : parsnip route arguments needed. case arguments passed params via set_engine(), parsnip warn re-route arguments needed. Note, though, arguments passed params tuned.","code":"# good boost_tree() %>% set_engine(\"xgboost\", eval_metric = \"mae\") ## Boosted Tree Model Specification (unknown mode) ## ## Engine-Specific Arguments: ## eval_metric = mae ## ## Computational engine: xgboost # bad boost_tree() %>% set_engine(\"xgboost\", params = list(eval_metric = \"mae\")) ## Boosted Tree Model Specification (unknown mode) ## ## Engine-Specific Arguments: ## params = list(eval_metric = \"mae\") ## ## Computational engine: xgboost"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_xgboost.html","id":"sparse-matrices","dir":"Reference","previous_headings":"","what":"Sparse matrices","title":"Boosted trees via xgboost — details_boost_tree_xgboost","text":"xgboost requires data sparse format. predictor data already format, use fit_xy.model_spec() pass model function. Otherwise, parsnip converts data format.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_xgboost.html","id":"parallel-processing","dir":"Reference","previous_headings":"","what":"Parallel processing","title":"Boosted trees via xgboost — details_boost_tree_xgboost","text":"default, model trained without parallel processing. can change passing nthread parameter set_engine(). However, unwise combine external parallel processing using package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_xgboost.html","id":"interpreting-mtry","dir":"Reference","previous_headings":"","what":"Interpreting mtry","title":"Boosted trees via xgboost — details_boost_tree_xgboost","text":"mtry argument denotes number predictors randomly sampled split creating tree models. engines, \"xgboost\", \"xrf\", \"lightgbm\", interpret analogue mtry argument proportion predictors randomly sampled split rather count. settings, tuning preprocessors influence number predictors, parameterization quite helpful—interpreting mtry proportion means [0, 1] always valid range parameter, regardless input data. parsnip extensions accommodate parameterization using counts argument: logical indicating whether mtry interpreted number predictors randomly sampled split. TRUE indicates mtry interpreted sense count, FALSE indicates argument interpreted sense proportion. mtry main model argument boost_tree() rand_forest(), thus engine-specific interface. , regardless engine, counts defaults TRUE. engines support proportion interpretation (currently \"xgboost\" \"xrf\", via rules package, \"lightgbm\" via bonsai package) user can pass counts = FALSE argument set_engine() supply mtry values within [0, 1].","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_xgboost.html","id":"early-stopping","dir":"Reference","previous_headings":"","what":"Early stopping","title":"Boosted trees via xgboost — details_boost_tree_xgboost","text":"stop_iter() argument allows model prematurely stop training objective function improve within early_stop iterations. best way use feature conjunction internal validation set. , pass validation parameter xgb_train() via parsnip set_engine() function. proportion training set reserved measuring performance (stopping early). model specification early_stop >= trees, early_stop converted trees - 1 warning issued. Note , since validation argument provides alternative interface watchlist, watchlist argument guarded parsnip ignored (warning) passed.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_xgboost.html","id":"objective-function","dir":"Reference","previous_headings":"","what":"Objective function","title":"Boosted trees via xgboost — details_boost_tree_xgboost","text":"parsnip chooses objective function based characteristics outcome. use different loss, pass objective argument set_engine() directly.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_xgboost.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Boosted trees via xgboost — details_boost_tree_xgboost","text":"model object contains data required make predictions. saving model purpose prediction, size saved object might substantially reduced using functions butcher package. Models fitted engine may require native serialization methods properly saved /passed R sessions. learn preparing fitted models serialization, see bundle package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_xgboost.html","id":"examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Boosted trees via xgboost — details_boost_tree_xgboost","text":"“Fitting Predicting parsnip” article contains examples boost_tree() \"xgboost\" engine.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_boost_tree_xgboost.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Boosted trees via xgboost — details_boost_tree_xgboost","text":"XGBoost: Scalable Tree Boosting System Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_cubist_rules_Cubist.html","id":null,"dir":"Reference","previous_headings":"","what":"Cubist rule-based regression models — details_cubist_rules_Cubist","title":"Cubist rule-based regression models — details_cubist_rules_Cubist","text":"Cubist::cubist() fits model derives simple feature rules tree ensemble uses creates regression models within rule. rules::cubist_fit() wrapper around function.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_cubist_rules_Cubist.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Cubist rule-based regression models — details_cubist_rules_Cubist","text":"engine, single mode: regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_cubist_rules_Cubist.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Cubist rule-based regression models — details_cubist_rules_Cubist","text":"model 3 tuning parameters: committees: # Committees (type: integer, default: 1L) neighbors: # Nearest Neighbors (type: integer, default: 0L) max_rules: Max. Rules (type: integer, default: NA_integer)","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_cubist_rules_Cubist.html","id":"translation-from-parsnip-to-the-underlying-model-call-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the underlying model call (regression)","title":"Cubist rule-based regression models — details_cubist_rules_Cubist","text":"rules extension package required fit model.","code":"library(rules) cubist_rules( committees = integer(1), neighbors = integer(1), max_rules = integer(1) ) %>% set_engine(\"Cubist\") %>% set_mode(\"regression\") %>% translate() ## Cubist Model Specification (regression) ## ## Main Arguments: ## committees = integer(1) ## neighbors = integer(1) ## max_rules = integer(1) ## ## Computational engine: Cubist ## ## Model fit template: ## rules::cubist_fit(x = missing_arg(), y = missing_arg(), weights = missing_arg(), ## committees = integer(1), neighbors = integer(1), max_rules = integer(1))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_cubist_rules_Cubist.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Cubist rule-based regression models — details_cubist_rules_Cubist","text":"engine require special encoding predictors. Categorical predictors can partitioned groups factor levels (e.g. {, c} vs {b, d}) splitting node. Dummy variables required model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_cubist_rules_Cubist.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Cubist rule-based regression models — details_cubist_rules_Cubist","text":"Quinlan R (1992). “Learning Continuous Classes.” Proceedings 5th Australian Joint Conference Artificial Intelligence, pp. 343-348. Quinlan R (1993).”Combining Instance-Based Model-Based Learning.” Proceedings Tenth International Conference Machine Learning, pp. 236-243. Kuhn M Johnson K (2013). Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_C5.0.html","id":null,"dir":"Reference","previous_headings":"","what":"Decision trees via C5.0 — details_decision_tree_C5.0","title":"Decision trees via C5.0 — details_decision_tree_C5.0","text":"C50::C5.0() fits model set /statements creates tree-based structure.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_C5.0.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Decision trees via C5.0 — details_decision_tree_C5.0","text":"engine, single mode: classification","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_C5.0.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Decision trees via C5.0 — details_decision_tree_C5.0","text":"model 1 tuning parameters: min_n: Minimal Node Size (type: integer, default: 2L)","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_C5.0.html","id":"translation-from-parsnip-to-the-original-package-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (classification)","title":"Decision trees via C5.0 — details_decision_tree_C5.0","text":"C5.0_train() wrapper around C50::C5.0() makes easier run model.","code":"decision_tree(min_n = integer()) %>% set_engine(\"C5.0\") %>% set_mode(\"classification\") %>% translate() ## Decision Tree Model Specification (classification) ## ## Main Arguments: ## min_n = integer() ## ## Computational engine: C5.0 ## ## Model fit template: ## parsnip::C5.0_train(x = missing_arg(), y = missing_arg(), weights = missing_arg(), ## minCases = integer(), trials = 1)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_C5.0.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Decision trees via C5.0 — details_decision_tree_C5.0","text":"engine require special encoding predictors. Categorical predictors can partitioned groups factor levels (e.g. {, c} vs {b, d}) splitting node. Dummy variables required model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_C5.0.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Decision trees via C5.0 — details_decision_tree_C5.0","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_C5.0.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Decision trees via C5.0 — details_decision_tree_C5.0","text":"model object contains data required make predictions. saving model purpose prediction, size saved object might substantially reduced using functions butcher package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_C5.0.html","id":"examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Decision trees via C5.0 — details_decision_tree_C5.0","text":"“Fitting Predicting parsnip” article contains examples decision_tree() \"C5.0\" engine.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_C5.0.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Decision trees via C5.0 — details_decision_tree_C5.0","text":"Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_partykit.html","id":null,"dir":"Reference","previous_headings":"","what":"Decision trees via partykit — details_decision_tree_partykit","title":"Decision trees via partykit — details_decision_tree_partykit","text":"partykit::ctree() fits model set /statements creates tree-based structure using hypothesis testing methods.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_partykit.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Decision trees via partykit — details_decision_tree_partykit","text":"engine, multiple modes: censored regression, regression, classification","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_partykit.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Decision trees via partykit — details_decision_tree_partykit","text":"model 2 tuning parameters: tree_depth: Tree Depth (type: integer, default: see ) min_n: Minimal Node Size (type: integer, default: 20L) tree_depth parameter defaults 0 means restrictions applied tree depth. engine-specific parameter model : mtry: number predictors, selected random, evaluated splitting. default use predictors.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_partykit.html","id":"translation-from-parsnip-to-the-original-package-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (regression)","title":"Decision trees via partykit — details_decision_tree_partykit","text":"bonsai extension package required fit model.","code":"library(bonsai) decision_tree(tree_depth = integer(1), min_n = integer(1)) %>% set_engine(\"partykit\") %>% set_mode(\"regression\") %>% translate() ## Decision Tree Model Specification (regression) ## ## Main Arguments: ## tree_depth = integer(1) ## min_n = integer(1) ## ## Computational engine: partykit ## ## Model fit template: ## parsnip::ctree_train(formula = missing_arg(), data = missing_arg(), ## weights = missing_arg(), maxdepth = integer(1), minsplit = min_rows(0L, ## data))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_partykit.html","id":"translation-from-parsnip-to-the-original-package-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (classification)","title":"Decision trees via partykit — details_decision_tree_partykit","text":"bonsai extension package required fit model. parsnip::ctree_train() wrapper around partykit::ctree() (functions) makes easier run model.","code":"library(bonsai) decision_tree(tree_depth = integer(1), min_n = integer(1)) %>% set_engine(\"partykit\") %>% set_mode(\"classification\") %>% translate() ## Decision Tree Model Specification (classification) ## ## Main Arguments: ## tree_depth = integer(1) ## min_n = integer(1) ## ## Computational engine: partykit ## ## Model fit template: ## parsnip::ctree_train(formula = missing_arg(), data = missing_arg(), ## weights = missing_arg(), maxdepth = integer(1), minsplit = min_rows(0L, ## data))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_partykit.html","id":"translation-from-parsnip-to-the-original-package-censored-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (censored regression)","title":"Decision trees via partykit — details_decision_tree_partykit","text":"censored extension package required fit model. censored::cond_inference_surv_ctree() wrapper around partykit::ctree() (functions) makes easier run model.","code":"library(censored) decision_tree(tree_depth = integer(1), min_n = integer(1)) %>% set_engine(\"partykit\") %>% set_mode(\"censored regression\") %>% translate() ## Decision Tree Model Specification (censored regression) ## ## Main Arguments: ## tree_depth = integer(1) ## min_n = integer(1) ## ## Computational engine: partykit ## ## Model fit template: ## parsnip::ctree_train(formula = missing_arg(), data = missing_arg(), ## weights = missing_arg(), maxdepth = integer(1), minsplit = min_rows(0L, ## data))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_partykit.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Decision trees via partykit — details_decision_tree_partykit","text":"engine require special encoding predictors. Categorical predictors can partitioned groups factor levels (e.g. {, c} vs {b, d}) splitting node. Dummy variables required model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_partykit.html","id":"other-details","dir":"Reference","previous_headings":"","what":"Other details","title":"Decision trees via partykit — details_decision_tree_partykit","text":"Predictions type \"time\" predictions median survival time.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_partykit.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Decision trees via partykit — details_decision_tree_partykit","text":"partykit: Modular Toolkit Recursive Partytioning R Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_rpart.html","id":null,"dir":"Reference","previous_headings":"","what":"Decision trees via CART — details_decision_tree_rpart","title":"Decision trees via CART — details_decision_tree_rpart","text":"rpart::rpart() fits model set /statements creates tree-based structure.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_rpart.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Decision trees via CART — details_decision_tree_rpart","text":"engine, multiple modes: classification, regression, censored regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_rpart.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Decision trees via CART — details_decision_tree_rpart","text":"model 3 tuning parameters: tree_depth: Tree Depth (type: integer, default: 30L) min_n: Minimal Node Size (type: integer, default: 2L) cost_complexity: Cost-Complexity Parameter (type: double, default: 0.01)","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_rpart.html","id":"translation-from-parsnip-to-the-original-package-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (classification)","title":"Decision trees via CART — details_decision_tree_rpart","text":"","code":"decision_tree(tree_depth = integer(1), min_n = integer(1), cost_complexity = double(1)) %>% set_engine(\"rpart\") %>% set_mode(\"classification\") %>% translate() ## Decision Tree Model Specification (classification) ## ## Main Arguments: ## cost_complexity = double(1) ## tree_depth = integer(1) ## min_n = integer(1) ## ## Computational engine: rpart ## ## Model fit template: ## rpart::rpart(formula = missing_arg(), data = missing_arg(), weights = missing_arg(), ## cp = double(1), maxdepth = integer(1), minsplit = min_rows(0L, ## data))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_rpart.html","id":"translation-from-parsnip-to-the-original-package-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (regression)","title":"Decision trees via CART — details_decision_tree_rpart","text":"","code":"decision_tree(tree_depth = integer(1), min_n = integer(1), cost_complexity = double(1)) %>% set_engine(\"rpart\") %>% set_mode(\"regression\") %>% translate() ## Decision Tree Model Specification (regression) ## ## Main Arguments: ## cost_complexity = double(1) ## tree_depth = integer(1) ## min_n = integer(1) ## ## Computational engine: rpart ## ## Model fit template: ## rpart::rpart(formula = missing_arg(), data = missing_arg(), weights = missing_arg(), ## cp = double(1), maxdepth = integer(1), minsplit = min_rows(0L, ## data))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_rpart.html","id":"translation-from-parsnip-to-the-original-package-censored-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (censored regression)","title":"Decision trees via CART — details_decision_tree_rpart","text":"censored extension package required fit model.","code":"library(censored) decision_tree( tree_depth = integer(1), min_n = integer(1), cost_complexity = double(1) ) %>% set_engine(\"rpart\") %>% set_mode(\"censored regression\") %>% translate() ## Decision Tree Model Specification (censored regression) ## ## Main Arguments: ## cost_complexity = double(1) ## tree_depth = integer(1) ## min_n = integer(1) ## ## Computational engine: rpart ## ## Model fit template: ## pec::pecRpart(formula = missing_arg(), data = missing_arg(), ## weights = missing_arg(), cp = double(1), maxdepth = integer(1), ## minsplit = min_rows(0L, data))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_rpart.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Decision trees via CART — details_decision_tree_rpart","text":"engine require special encoding predictors. Categorical predictors can partitioned groups factor levels (e.g. {, c} vs {b, d}) splitting node. Dummy variables required model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_rpart.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Decision trees via CART — details_decision_tree_rpart","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_rpart.html","id":"other-details","dir":"Reference","previous_headings":"","what":"Other details","title":"Decision trees via CART — details_decision_tree_rpart","text":"Predictions type \"time\" predictions mean survival time.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_rpart.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Decision trees via CART — details_decision_tree_rpart","text":"model object contains data required make predictions. saving model purpose prediction, size saved object might substantially reduced using functions butcher package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_rpart.html","id":"examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Decision trees via CART — details_decision_tree_rpart","text":"“Fitting Predicting parsnip” article contains examples decision_tree() \"rpart\" engine.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_rpart.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Decision trees via CART — details_decision_tree_rpart","text":"Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_spark.html","id":null,"dir":"Reference","previous_headings":"","what":"Decision trees via Spark — details_decision_tree_spark","title":"Decision trees via Spark — details_decision_tree_spark","text":"sparklyr::ml_decision_tree() fits model set /statements creates tree-based structure.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_spark.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Decision trees via Spark — details_decision_tree_spark","text":"engine, multiple modes: classification regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_spark.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Decision trees via Spark — details_decision_tree_spark","text":"model 2 tuning parameters: tree_depth: Tree Depth (type: integer, default: 5L) min_n: Minimal Node Size (type: integer, default: 1L)","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_spark.html","id":"translation-from-parsnip-to-the-original-package-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (classification)","title":"Decision trees via Spark — details_decision_tree_spark","text":"","code":"decision_tree(tree_depth = integer(1), min_n = integer(1)) %>% set_engine(\"spark\") %>% set_mode(\"classification\") %>% translate() ## Decision Tree Model Specification (classification) ## ## Main Arguments: ## tree_depth = integer(1) ## min_n = integer(1) ## ## Computational engine: spark ## ## Model fit template: ## sparklyr::ml_decision_tree_classifier(x = missing_arg(), formula = missing_arg(), ## max_depth = integer(1), min_instances_per_node = min_rows(0L, ## x), seed = sample.int(10^5, 1))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_spark.html","id":"translation-from-parsnip-to-the-original-package-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (regression)","title":"Decision trees via Spark — details_decision_tree_spark","text":"","code":"decision_tree(tree_depth = integer(1), min_n = integer(1)) %>% set_engine(\"spark\") %>% set_mode(\"regression\") %>% translate() ## Decision Tree Model Specification (regression) ## ## Main Arguments: ## tree_depth = integer(1) ## min_n = integer(1) ## ## Computational engine: spark ## ## Model fit template: ## sparklyr::ml_decision_tree_regressor(x = missing_arg(), formula = missing_arg(), ## max_depth = integer(1), min_instances_per_node = min_rows(0L, ## x), seed = sample.int(10^5, 1))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_spark.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Decision trees via Spark — details_decision_tree_spark","text":"engine require special encoding predictors. Categorical predictors can partitioned groups factor levels (e.g. {, c} vs {b, d}) splitting node. Dummy variables required model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_spark.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Decision trees via Spark — details_decision_tree_spark","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights. Note , spark engines, case_weight argument value character string specify column numeric case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_spark.html","id":"other-details","dir":"Reference","previous_headings":"","what":"Other details","title":"Decision trees via Spark — details_decision_tree_spark","text":"models created using \"spark\" engine, several things consider. formula interface via fit() available; using fit_xy() generate error. predictions always Spark table format. names documented without dots. equivalent factor columns Spark tables class predictions returned character columns. retain model object new R session (via save()), model$fit element parsnip object serialized via ml_save(object$fit) separately saved disk. new session, object can reloaded reattached parsnip object.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_decision_tree_spark.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Decision trees via Spark — details_decision_tree_spark","text":"Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_flexible_earth.html","id":null,"dir":"Reference","previous_headings":"","what":"Flexible discriminant analysis via earth — details_discrim_flexible_earth","title":"Flexible discriminant analysis via earth — details_discrim_flexible_earth","text":"mda::fda() (conjunction earth::earth() can fit nonlinear discriminant analysis model uses nonlinear features created using multivariate adaptive regression splines (MARS). function can fit classification models.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_flexible_earth.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Flexible discriminant analysis via earth — details_discrim_flexible_earth","text":"engine, single mode: classification","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_flexible_earth.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Flexible discriminant analysis via earth — details_discrim_flexible_earth","text":"model 3 tuning parameter: num_terms: # Model Terms (type: integer, default: (see )) prod_degree: Degree Interaction (type: integer, default: 1L) prune_method: Pruning Method (type: character, default: ‘backward’) default value num_terms depends number columns (p): min(200, max(20, 2 * p)) + 1. Note num_terms = 1 intercept-model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_flexible_earth.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Flexible discriminant analysis via earth — details_discrim_flexible_earth","text":"discrim extension package required fit model.","code":"library(discrim) discrim_flexible( num_terms = integer(0), prod_degree = integer(0), prune_method = character(0) ) %>% translate() ## Flexible Discriminant Model Specification (classification) ## ## Main Arguments: ## num_terms = integer(0) ## prod_degree = integer(0) ## prune_method = character(0) ## ## Computational engine: earth ## ## Model fit template: ## mda::fda(formula = missing_arg(), data = missing_arg(), weights = missing_arg(), ## nprune = integer(0), degree = integer(0), pmethod = character(0), ## method = earth::earth)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_flexible_earth.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Flexible discriminant analysis via earth — details_discrim_flexible_earth","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_flexible_earth.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Flexible discriminant analysis via earth — details_discrim_flexible_earth","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_flexible_earth.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Flexible discriminant analysis via earth — details_discrim_flexible_earth","text":"Hastie, Tibshirani & Buja (1994) Flexible Discriminant Analysis Optimal Scoring, Journal American Statistical Association, 89:428, 1255-1270 Friedman (1991). Multivariate Adaptive Regression Splines. Annals Statistics, 19(1), 1-67.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_linear_MASS.html","id":null,"dir":"Reference","previous_headings":"","what":"Linear discriminant analysis via MASS — details_discrim_linear_MASS","title":"Linear discriminant analysis via MASS — details_discrim_linear_MASS","text":"MASS::lda() fits model estimates multivariate distribution predictors separately data class (Gaussian common covariance matrix). Bayes' theorem used compute probability class, given predictor values.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_linear_MASS.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Linear discriminant analysis via MASS — details_discrim_linear_MASS","text":"engine, single mode: classification","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_linear_MASS.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Linear discriminant analysis via MASS — details_discrim_linear_MASS","text":"engine tuning parameters.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_linear_MASS.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Linear discriminant analysis via MASS — details_discrim_linear_MASS","text":"discrim extension package required fit model.","code":"library(discrim) discrim_linear() %>% set_engine(\"MASS\") %>% translate() ## Linear Discriminant Model Specification (classification) ## ## Computational engine: MASS ## ## Model fit template: ## MASS::lda(formula = missing_arg(), data = missing_arg())"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_linear_MASS.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Linear discriminant analysis via MASS — details_discrim_linear_MASS","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Variance calculations used computations zero-variance predictors (.e., single unique value) eliminated fitting model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_linear_MASS.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Linear discriminant analysis via MASS — details_discrim_linear_MASS","text":"underlying model implementation allow case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_linear_MASS.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Linear discriminant analysis via MASS — details_discrim_linear_MASS","text":"Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_linear_mda.html","id":null,"dir":"Reference","previous_headings":"","what":"Linear discriminant analysis via flexible discriminant analysis — details_discrim_linear_mda","title":"Linear discriminant analysis via flexible discriminant analysis — details_discrim_linear_mda","text":"mda::fda() (conjunction mda::gen.ridge() can fit linear discriminant analysis model penalizes predictor coefficients quadratic penalty (.e., ridge weight decay approach).","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_linear_mda.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Linear discriminant analysis via flexible discriminant analysis — details_discrim_linear_mda","text":"engine, single mode: classification","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_linear_mda.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Linear discriminant analysis via flexible discriminant analysis — details_discrim_linear_mda","text":"model 1 tuning parameter: penalty: Amount Regularization (type: double, default: 1.0)","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_linear_mda.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Linear discriminant analysis via flexible discriminant analysis — details_discrim_linear_mda","text":"discrim extension package required fit model.","code":"library(discrim) discrim_linear(penalty = numeric(0)) %>% set_engine(\"mda\") %>% translate() ## Linear Discriminant Model Specification (classification) ## ## Main Arguments: ## penalty = numeric(0) ## ## Computational engine: mda ## ## Model fit template: ## mda::fda(formula = missing_arg(), data = missing_arg(), weights = missing_arg(), ## lambda = numeric(0), method = mda::gen.ridge, keep.fitted = FALSE)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_linear_mda.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Linear discriminant analysis via flexible discriminant analysis — details_discrim_linear_mda","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Variance calculations used computations zero-variance predictors (.e., single unique value) eliminated fitting model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_linear_mda.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Linear discriminant analysis via flexible discriminant analysis — details_discrim_linear_mda","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_linear_mda.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Linear discriminant analysis via flexible discriminant analysis — details_discrim_linear_mda","text":"Hastie, Tibshirani & Buja (1994) Flexible Discriminant Analysis Optimal Scoring, Journal American Statistical Association, 89:428, 1255-1270","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_linear_sda.html","id":null,"dir":"Reference","previous_headings":"","what":"Linear discriminant analysis via James-Stein-type shrinkage estimation — details_discrim_linear_sda","title":"Linear discriminant analysis via James-Stein-type shrinkage estimation — details_discrim_linear_sda","text":"sda::sda() can fit linear discriminant analysis model can fit models classical discriminant analysis diagonal discriminant analysis.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_linear_sda.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Linear discriminant analysis via James-Stein-type shrinkage estimation — details_discrim_linear_sda","text":"engine, single mode: classification","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_linear_sda.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Linear discriminant analysis via James-Stein-type shrinkage estimation — details_discrim_linear_sda","text":"engine tuning parameter arguments discrim_linear(). However, engine-specific parameters can set optimized calling set_engine(): lambda: shrinkage parameters correlation matrix. maps parameter dials::shrinkage_correlation(). lambda.var: shrinkage parameters predictor variances. maps dials::shrinkage_variance(). lambda.freqs: shrinkage parameters class frequencies. maps dials::shrinkage_frequencies(). diagonal: logical make model covariance diagonal . maps dials::diagonal_covariance().","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_linear_sda.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Linear discriminant analysis via James-Stein-type shrinkage estimation — details_discrim_linear_sda","text":"discrim extension package required fit model.","code":"library(discrim) discrim_linear() %>% set_engine(\"sda\") %>% translate() ## Linear Discriminant Model Specification (classification) ## ## Computational engine: sda ## ## Model fit template: ## sda::sda(Xtrain = missing_arg(), L = missing_arg(), verbose = FALSE)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_linear_sda.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Linear discriminant analysis via James-Stein-type shrinkage estimation — details_discrim_linear_sda","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Variance calculations used computations zero-variance predictors (.e., single unique value) eliminated fitting model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_linear_sda.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Linear discriminant analysis via James-Stein-type shrinkage estimation — details_discrim_linear_sda","text":"underlying model implementation allow case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_linear_sda.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Linear discriminant analysis via James-Stein-type shrinkage estimation — details_discrim_linear_sda","text":"Ahdesmaki, ., K. Strimmer. 2010. Feature selection omics prediction problems using cat scores false non-discovery rate control. Ann. Appl. Stat. 4: 503-519. Preprint.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_linear_sparsediscrim.html","id":null,"dir":"Reference","previous_headings":"","what":"Linear discriminant analysis via regularization — details_discrim_linear_sparsediscrim","title":"Linear discriminant analysis via regularization — details_discrim_linear_sparsediscrim","text":"Functions sparsediscrim package fit different types linear discriminant analysis model regularize estimates (like mean covariance).","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_linear_sparsediscrim.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Linear discriminant analysis via regularization — details_discrim_linear_sparsediscrim","text":"engine, single mode: classification","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_linear_sparsediscrim.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Linear discriminant analysis via regularization — details_discrim_linear_sparsediscrim","text":"model 1 tuning parameter: regularization_method: Regularization Method (type: character, default: ‘diagonal’) possible values parameter, functions execute, : \"diagonal\": sparsediscrim::lda_diag() \"min_distance\": sparsediscrim::lda_emp_bayes_eigen() \"shrink_mean\": sparsediscrim::lda_shrink_mean() \"shrink_cov\": sparsediscrim::lda_shrink_cov()","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_linear_sparsediscrim.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Linear discriminant analysis via regularization — details_discrim_linear_sparsediscrim","text":"discrim extension package required fit model.","code":"library(discrim) discrim_linear(regularization_method = character(0)) %>% set_engine(\"sparsediscrim\") %>% translate() ## Linear Discriminant Model Specification (classification) ## ## Main Arguments: ## regularization_method = character(0) ## ## Computational engine: sparsediscrim ## ## Model fit template: ## discrim::fit_regularized_linear(x = missing_arg(), y = missing_arg(), ## method = character(0))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_linear_sparsediscrim.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Linear discriminant analysis via regularization — details_discrim_linear_sparsediscrim","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Variance calculations used computations zero-variance predictors (.e., single unique value) eliminated fitting model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_linear_sparsediscrim.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Linear discriminant analysis via regularization — details_discrim_linear_sparsediscrim","text":"underlying model implementation allow case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_linear_sparsediscrim.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Linear discriminant analysis via regularization — details_discrim_linear_sparsediscrim","text":"lda_diag(): Dudoit, Fridlyand Speed (2002) Comparison Discrimination Methods Classification Tumors Using Gene Expression Data, Journal American Statistical Association, 97:457, 77-87. lda_shrink_mean(): Tong, Chen, Zhao, Improved mean estimation application diagonal discriminant analysis, Bioinformatics, Volume 28, Issue 4, 15 February 2012, Pages 531-537. lda_shrink_cov(): Pang, Tong Zhao (2009), Shrinkage-based Diagonal Discriminant Analysis Applications High-Dimensional Data. Biometrics, 65, 1021-1029. lda_emp_bayes_eigen(): Srivistava Kubokawa (2007), Comparison Discrimination Methods High Dimensional Data, Journal Japan Statistical Society, 37:1, 123-134.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_quad_MASS.html","id":null,"dir":"Reference","previous_headings":"","what":"Quadratic discriminant analysis via MASS — details_discrim_quad_MASS","title":"Quadratic discriminant analysis via MASS — details_discrim_quad_MASS","text":"MASS::qda() fits model estimates multivariate distribution predictors separately data class (Gaussian separate covariance matrices). Bayes' theorem used compute probability class, given predictor values.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_quad_MASS.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Quadratic discriminant analysis via MASS — details_discrim_quad_MASS","text":"engine, single mode: classification","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_quad_MASS.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Quadratic discriminant analysis via MASS — details_discrim_quad_MASS","text":"engine tuning parameters.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_quad_MASS.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Quadratic discriminant analysis via MASS — details_discrim_quad_MASS","text":"discrim extension package required fit model.","code":"library(discrim) discrim_quad() %>% set_engine(\"MASS\") %>% translate() ## Quadratic Discriminant Model Specification (classification) ## ## Computational engine: MASS ## ## Model fit template: ## MASS::qda(formula = missing_arg(), data = missing_arg())"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_quad_MASS.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Quadratic discriminant analysis via MASS — details_discrim_quad_MASS","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Variance calculations used computations within outcome class. reason, zero-variance predictors (.e., single unique value) within class eliminated fitting model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_quad_MASS.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Quadratic discriminant analysis via MASS — details_discrim_quad_MASS","text":"underlying model implementation allow case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_quad_MASS.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Quadratic discriminant analysis via MASS — details_discrim_quad_MASS","text":"Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_quad_sparsediscrim.html","id":null,"dir":"Reference","previous_headings":"","what":"Quadratic discriminant analysis via regularization — details_discrim_quad_sparsediscrim","title":"Quadratic discriminant analysis via regularization — details_discrim_quad_sparsediscrim","text":"Functions sparsediscrim package fit different types quadratic discriminant analysis model regularize estimates (like mean covariance).","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_quad_sparsediscrim.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Quadratic discriminant analysis via regularization — details_discrim_quad_sparsediscrim","text":"engine, single mode: classification","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_quad_sparsediscrim.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Quadratic discriminant analysis via regularization — details_discrim_quad_sparsediscrim","text":"model 1 tuning parameter: regularization_method: Regularization Method (type: character, default: ‘diagonal’) possible values parameter, functions execute, : \"diagonal\": sparsediscrim::qda_diag() \"shrink_mean\": sparsediscrim::qda_shrink_mean() \"shrink_cov\": sparsediscrim::qda_shrink_cov()","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_quad_sparsediscrim.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Quadratic discriminant analysis via regularization — details_discrim_quad_sparsediscrim","text":"discrim extension package required fit model.","code":"library(discrim) discrim_quad(regularization_method = character(0)) %>% set_engine(\"sparsediscrim\") %>% translate() ## Quadratic Discriminant Model Specification (classification) ## ## Main Arguments: ## regularization_method = character(0) ## ## Computational engine: sparsediscrim ## ## Model fit template: ## discrim::fit_regularized_quad(x = missing_arg(), y = missing_arg(), ## method = character(0))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_quad_sparsediscrim.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Quadratic discriminant analysis via regularization — details_discrim_quad_sparsediscrim","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Variance calculations used computations within outcome class. reason, zero-variance predictors (.e., single unique value) within class eliminated fitting model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_quad_sparsediscrim.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Quadratic discriminant analysis via regularization — details_discrim_quad_sparsediscrim","text":"underlying model implementation allow case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_quad_sparsediscrim.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Quadratic discriminant analysis via regularization — details_discrim_quad_sparsediscrim","text":"qda_diag(): Dudoit, Fridlyand Speed (2002) Comparison Discrimination Methods Classification Tumors Using Gene Expression Data, Journal American Statistical Association, 97:457, 77-87. qda_shrink_mean(): Tong, Chen, Zhao, Improved mean estimation application diagonal discriminant analysis, Bioinformatics, Volume 28, Issue 4, 15 February 2012, Pages 531-537. qda_shrink_cov(): Pang, Tong Zhao (2009), Shrinkage-based Diagonal Discriminant Analysis Applications High-Dimensional Data. Biometrics, 65, 1021-1029.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_regularized_klaR.html","id":null,"dir":"Reference","previous_headings":"","what":"Regularized discriminant analysis via klaR — details_discrim_regularized_klaR","title":"Regularized discriminant analysis via klaR — details_discrim_regularized_klaR","text":"klaR::rda() fits model estimates multivariate distribution predictors separately data class. structure model can LDA, QDA, amalgam two. Bayes' theorem used compute probability class, given predictor values.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_regularized_klaR.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Regularized discriminant analysis via klaR — details_discrim_regularized_klaR","text":"engine, single mode: classification","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_regularized_klaR.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Regularized discriminant analysis via klaR — details_discrim_regularized_klaR","text":"model 2 tuning parameter: frac_common_cov: Fraction Common Covariance Matrix (type: double, default: (see )) frac_identity: Fraction Identity Matrix (type: double, default: (see )) special cases RDA model: frac_identity = 0 frac_common_cov = 1 linear discriminant analysis (LDA) model. frac_identity = 0 frac_common_cov = 0 quadratic discriminant analysis (QDA) model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_regularized_klaR.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Regularized discriminant analysis via klaR — details_discrim_regularized_klaR","text":"discrim extension package required fit model.","code":"library(discrim) discrim_regularized(frac_identity = numeric(0), frac_common_cov = numeric(0)) %>% set_engine(\"klaR\") %>% translate() ## Regularized Discriminant Model Specification (classification) ## ## Main Arguments: ## frac_common_cov = numeric(0) ## frac_identity = numeric(0) ## ## Computational engine: klaR ## ## Model fit template: ## klaR::rda(formula = missing_arg(), data = missing_arg(), lambda = numeric(0), ## gamma = numeric(0))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_regularized_klaR.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Regularized discriminant analysis via klaR — details_discrim_regularized_klaR","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Variance calculations used computations within outcome class. reason, zero-variance predictors (.e., single unique value) within class eliminated fitting model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_regularized_klaR.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Regularized discriminant analysis via klaR — details_discrim_regularized_klaR","text":"underlying model implementation allow case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_discrim_regularized_klaR.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Regularized discriminant analysis via klaR — details_discrim_regularized_klaR","text":"Friedman, J (1989). Regularized Discriminant Analysis. Journal American Statistical Association, 84, 165-175. Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_gen_additive_mod_mgcv.html","id":null,"dir":"Reference","previous_headings":"","what":"Generalized additive models via mgcv — details_gen_additive_mod_mgcv","title":"Generalized additive models via mgcv — details_gen_additive_mod_mgcv","text":"mgcv::gam() fits generalized linear model additive smoother terms continuous predictors.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_gen_additive_mod_mgcv.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Generalized additive models via mgcv — details_gen_additive_mod_mgcv","text":"engine, multiple modes: regression classification","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_gen_additive_mod_mgcv.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Generalized additive models via mgcv — details_gen_additive_mod_mgcv","text":"model 2 tuning parameters: select_features: Select Features? (type: logical, default: FALSE) adjust_deg_free: Smoothness Adjustment (type: double, default: 1.0)","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_gen_additive_mod_mgcv.html","id":"translation-from-parsnip-to-the-original-package-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (regression)","title":"Generalized additive models via mgcv — details_gen_additive_mod_mgcv","text":"","code":"gen_additive_mod(adjust_deg_free = numeric(1), select_features = logical(1)) %>% set_engine(\"mgcv\") %>% set_mode(\"regression\") %>% translate() ## GAM Model Specification (regression) ## ## Main Arguments: ## select_features = logical(1) ## adjust_deg_free = numeric(1) ## ## Computational engine: mgcv ## ## Model fit template: ## mgcv::gam(formula = missing_arg(), data = missing_arg(), weights = missing_arg(), ## select = logical(1), gamma = numeric(1))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_gen_additive_mod_mgcv.html","id":"translation-from-parsnip-to-the-original-package-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (classification)","title":"Generalized additive models via mgcv — details_gen_additive_mod_mgcv","text":"","code":"gen_additive_mod(adjust_deg_free = numeric(1), select_features = logical(1)) %>% set_engine(\"mgcv\") %>% set_mode(\"classification\") %>% translate() ## GAM Model Specification (classification) ## ## Main Arguments: ## select_features = logical(1) ## adjust_deg_free = numeric(1) ## ## Computational engine: mgcv ## ## Model fit template: ## mgcv::gam(formula = missing_arg(), data = missing_arg(), weights = missing_arg(), ## select = logical(1), gamma = numeric(1), family = stats::binomial(link = \"logit\"))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_gen_additive_mod_mgcv.html","id":"model-fitting","dir":"Reference","previous_headings":"","what":"Model fitting","title":"Generalized additive models via mgcv — details_gen_additive_mod_mgcv","text":"model used model formula smooth terms can specified. example: smoothness terms need manually specified (e.g., using s(x, df = 10)) formula. Tuning can accomplished using adjust_deg_free parameter. using workflow, pass model formula add_model()’s formula argument, simplified preprocessing formula elsewhere. learn differences formulas, see ?model_formula.","code":"library(mgcv) gen_additive_mod() %>% set_engine(\"mgcv\") %>% set_mode(\"regression\") %>% fit(mpg ~ wt + gear + cyl + s(disp, k = 10), data = mtcars) ## parsnip model object ## ## ## Family: gaussian ## Link function: identity ## ## Formula: ## mpg ~ wt + gear + cyl + s(disp, k = 10) ## ## Estimated degrees of freedom: ## 7.52 total = 11.52 ## ## GCV score: 4.225228 spec <- gen_additive_mod() %>% set_engine(\"mgcv\") %>% set_mode(\"regression\") workflow() %>% add_model(spec, formula = mpg ~ wt + gear + cyl + s(disp, k = 10)) %>% add_formula(mpg ~ wt + gear + cyl + disp) %>% fit(data = mtcars) %>% extract_fit_engine() ## ## Family: gaussian ## Link function: identity ## ## Formula: ## mpg ~ wt + gear + cyl + s(disp, k = 10) ## ## Estimated degrees of freedom: ## 7.52 total = 11.52 ## ## GCV score: 4.225228"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_gen_additive_mod_mgcv.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Generalized additive models via mgcv — details_gen_additive_mod_mgcv","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_gen_additive_mod_mgcv.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Generalized additive models via mgcv — details_gen_additive_mod_mgcv","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_gen_additive_mod_mgcv.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Generalized additive models via mgcv — details_gen_additive_mod_mgcv","text":"model object contains data required make predictions. saving model purpose prediction, size saved object might substantially reduced using functions butcher package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_gen_additive_mod_mgcv.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Generalized additive models via mgcv — details_gen_additive_mod_mgcv","text":"Ross, W. 2021. Generalized Additive Models R: Free, Interactive Course using mgcv Wood, S. 2017. Generalized Additive Models: Introduction R. Chapman Hall/CRC.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_brulee.html","id":null,"dir":"Reference","previous_headings":"","what":"Linear regression via brulee — details_linear_reg_brulee","title":"Linear regression via brulee — details_linear_reg_brulee","text":"brulee::brulee_linear_reg() uses ordinary least squares fit models numeric outcomes.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_brulee.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Linear regression via brulee — details_linear_reg_brulee","text":"engine, single mode: regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_brulee.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Linear regression via brulee — details_linear_reg_brulee","text":"model 2 tuning parameter: penalty: Amount Regularization (type: double, default: 0.001) mixture: Proportion Lasso Penalty (type: double, default: 0.0) use L1 penalty (.k.. lasso penalty) force parameters strictly zero (packages glmnet). zeroing parameters specific feature optimization method used packages. engine arguments interest: optimizer(): optimization method. See brulee::brulee_linear_reg(). epochs(): integer number passes training set. lean_rate(): number used accelerate gradient decsent process. momentum(): number used use historical gradient infomration optimization (optimizer = \"SGD\" ). batch_size(): integer number training set points batch. stop_iter(): non-negative integer many iterations improvement stopping. (default: 5L).","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_brulee.html","id":"translation-from-parsnip-to-the-original-package-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (regression)","title":"Linear regression via brulee — details_linear_reg_brulee","text":"","code":"linear_reg(penalty = double(1)) %>% set_engine(\"brulee\") %>% translate() ## Linear Regression Model Specification (regression) ## ## Main Arguments: ## penalty = double(1) ## ## Computational engine: brulee ## ## Model fit template: ## brulee::brulee_linear_reg(x = missing_arg(), y = missing_arg(), ## penalty = double(1))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_brulee.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Linear regression via brulee — details_linear_reg_brulee","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Predictors scale. One way achieve center scale predictor mean zero variance one.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_brulee.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Linear regression via brulee — details_linear_reg_brulee","text":"underlying model implementation allow case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_brulee.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Linear regression via brulee — details_linear_reg_brulee","text":"Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_gee.html","id":null,"dir":"Reference","previous_headings":"","what":"Linear regression via generalized estimating equations (GEE) — details_linear_reg_gee","title":"Linear regression via generalized estimating equations (GEE) — details_linear_reg_gee","text":"gee::gee() uses generalized least squares fit different types models errors independent.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_gee.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Linear regression via generalized estimating equations (GEE) — details_linear_reg_gee","text":"engine, single mode: regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_gee.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Linear regression via generalized estimating equations (GEE) — details_linear_reg_gee","text":"model formal tuning parameters. may beneficial determine appropriate correlation structure use, typically affect predicted value model. effect inferential results parameter covariance values.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_gee.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Linear regression via generalized estimating equations (GEE) — details_linear_reg_gee","text":"multilevelmod extension package required fit model. multilevelmod::gee_fit() wrapper model around gee::gee().","code":"library(multilevelmod) linear_reg() %>% set_engine(\"gee\") %>% set_mode(\"regression\") %>% translate() ## Linear Regression Model Specification (regression) ## ## Computational engine: gee ## ## Model fit template: ## multilevelmod::gee_fit(formula = missing_arg(), data = missing_arg(), ## family = gaussian)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_gee.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Linear regression via generalized estimating equations (GEE) — details_linear_reg_gee","text":"specific preprocessing needs. However, helpful keep clustering/subject identifier column factor character (instead making dummy variables). See examples next section.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_gee.html","id":"other-details","dir":"Reference","previous_headings":"","what":"Other details","title":"Linear regression via generalized estimating equations (GEE) — details_linear_reg_gee","text":"model accept case weights. gee:gee() gee:geepack() specify id/cluster variable using argument id requires vector. parsnip doesn’t work way enable model fit using artificial function id_var() used formula. , original package, call look like: parsnip, suggest using formula method fitting: using tidymodels infrastructure, may better use workflow. case, can add appropriate columns using add_variables() supply GEE formula adding model: gee::gee() function always prints warnings output even silent = TRUE. parsnip \"gee\" engine, contrast, silences console output coming gee::gee(), even silent = FALSE. Also, issues gee() function, supplementary call glm() needed get rank QR decomposition objects predict() can used.","code":"gee(breaks ~ tension, id = wool, data = warpbreaks, corstr = \"exchangeable\") library(tidymodels) linear_reg() %>% set_engine(\"gee\", corstr = \"exchangeable\") %>% fit(breaks ~ tension + id_var(wool), data = warpbreaks) library(tidymodels) gee_spec <- linear_reg() %>% set_engine(\"gee\", corstr = \"exchangeable\") gee_wflow <- workflow() %>% # The data are included as-is using: add_variables(outcomes = breaks, predictors = c(tension, wool)) %>% add_model(gee_spec, formula = breaks ~ tension + id_var(wool)) fit(gee_wflow, data = warpbreaks)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_gee.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Linear regression via generalized estimating equations (GEE) — details_linear_reg_gee","text":"underlying model implementation allow case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_gee.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Linear regression via generalized estimating equations (GEE) — details_linear_reg_gee","text":"Liang, K.Y. Zeger, S.L. (1986) Longitudinal data analysis using generalized linear models. Biometrika, 73 13–22. Zeger, S.L. Liang, K.Y. (1986) Longitudinal data analysis discrete continuous outcomes. Biometrics, 42 121–130.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_glm.html","id":null,"dir":"Reference","previous_headings":"","what":"Linear regression via glm — details_linear_reg_glm","title":"Linear regression via glm — details_linear_reg_glm","text":"stats::glm() fits generalized linear model numeric outcomes. linear combination predictors used model numeric outcome via link function.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_glm.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Linear regression via glm — details_linear_reg_glm","text":"engine, single mode: regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_glm.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Linear regression via glm — details_linear_reg_glm","text":"engine tuning parameters can set family parameter (/link) engine argument (see ).","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_glm.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Linear regression via glm — details_linear_reg_glm","text":"use non-default family /link, pass argument set_engine():","code":"linear_reg() %>% set_engine(\"glm\") %>% translate() ## Linear Regression Model Specification (regression) ## ## Computational engine: glm ## ## Model fit template: ## stats::glm(formula = missing_arg(), data = missing_arg(), weights = missing_arg(), ## family = stats::gaussian) linear_reg() %>% set_engine(\"glm\", family = stats::poisson(link = \"sqrt\")) %>% translate() ## Linear Regression Model Specification (regression) ## ## Engine-Specific Arguments: ## family = stats::poisson(link = \"sqrt\") ## ## Computational engine: glm ## ## Model fit template: ## stats::glm(formula = missing_arg(), data = missing_arg(), weights = missing_arg(), ## family = stats::poisson(link = \"sqrt\"))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_glm.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Linear regression via glm — details_linear_reg_glm","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_glm.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Linear regression via glm — details_linear_reg_glm","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights. However, documentation stats::glm() assumes specific type case weights used:“Non-NULL weights can used indicate different observations different dispersions (values weights inversely proportional dispersions); equivalently, elements weights positive integers w_i, response y_i mean w_i unit-weight observations. binomial GLM prior weights used give number trials response proportion successes: rarely used Poisson GLM.”","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_glm.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Linear regression via glm — details_linear_reg_glm","text":"model object contains data required make predictions. saving model purpose prediction, size saved object might substantially reduced using functions butcher package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_glm.html","id":"examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Linear regression via glm — details_linear_reg_glm","text":"“Fitting Predicting parsnip” article contains examples linear_reg() \"glm\" engine.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_glm.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Linear regression via glm — details_linear_reg_glm","text":"Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_glmer.html","id":null,"dir":"Reference","previous_headings":"","what":"Linear regression via generalized mixed models — details_linear_reg_glmer","title":"Linear regression via generalized mixed models — details_linear_reg_glmer","text":"\"glmer\" engine estimates fixed random effect regression parameters using maximum likelihood (restricted maximum likelihood) estimation.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_glmer.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Linear regression via generalized mixed models — details_linear_reg_glmer","text":"engine, single mode: regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_glmer.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Linear regression via generalized mixed models — details_linear_reg_glmer","text":"model tuning parameters.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_glmer.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Linear regression via generalized mixed models — details_linear_reg_glmer","text":"multilevelmod extension package required fit model. Note using engine linear link function result warning:","code":"library(multilevelmod) linear_reg() %>% set_engine(\"glmer\") %>% set_mode(\"regression\") %>% translate() ## Linear Regression Model Specification (regression) ## ## Computational engine: glmer ## ## Model fit template: ## lme4::glmer(formula = missing_arg(), data = missing_arg(), weights = missing_arg(), ## family = stats::gaussian) calling glmer() with family=gaussian (identity link) as a shortcut to lmer() is deprecated; please call lmer() directly"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_glmer.html","id":"predicting-new-samples","dir":"Reference","previous_headings":"","what":"Predicting new samples","title":"Linear regression via generalized mixed models — details_linear_reg_glmer","text":"model can use subject-specific coefficient estimates make predictions (.e. partial pooling). example, equation shows linear predictor (\\eta) random intercept: $$ denotes ith independent experimental unit (e.g. subject). model seen subject , can use subject’s data adjust population intercept specific subjects results. happens data predicted subject used model fit? case, package uses population parameter estimates prediction: Depending covariates model, might effect making prediction new samples. population parameters “best estimate” subject included model fit. tidymodels framework deliberately constrains predictions new data use training set data (prevent information leakage).","code":"\\eta_{i} = (\\beta_0 + b_{0i}) + \\beta_1x_{i1} \\hat{\\eta}_{i'} = \\hat{\\beta}_0+ \\hat{\\beta}x_{i'1}"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_glmer.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Linear regression via generalized mixed models — details_linear_reg_glmer","text":"specific preprocessing needs. However, helpful keep clustering/subject identifier column factor character (instead making dummy variables). See examples next section.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_glmer.html","id":"other-details","dir":"Reference","previous_headings":"","what":"Other details","title":"Linear regression via generalized mixed models — details_linear_reg_glmer","text":"model can accept case weights. parsnip, suggest using formula method fitting: using tidymodels infrastructure, may better use workflow. case, can add appropriate columns using add_variables() supply typical formula adding model:","code":"library(tidymodels) data(\"riesby\") linear_reg() %>% set_engine(\"glmer\") %>% fit(depr_score ~ week + (1|subject), data = riesby) library(tidymodels) glmer_spec <- linear_reg() %>% set_engine(\"glmer\") glmer_wflow <- workflow() %>% # The data are included as-is using: add_variables(outcomes = depr_score, predictors = c(week, subject)) %>% add_model(glmer_spec, formula = depr_score ~ week + (1|subject)) fit(glmer_wflow, data = riesby)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_glmer.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Linear regression via generalized mixed models — details_linear_reg_glmer","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_glmer.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Linear regression via generalized mixed models — details_linear_reg_glmer","text":"J Pinheiro, D Bates. 2000. Mixed-effects models S S-PLUS. Springer, New York, NY West, K, Band Welch, Galecki. 2014. Linear Mixed Models: Practical Guide Using Statistical Software. CRC Press. Thorson, J, Minto, C. 2015, Mixed effects: unifying framework statistical modelling fisheries biology. ICES Journal Marine Science, Volume 72, Issue 5, Pages 1245–1256. Harrison, XA, Donaldson, L, Correa-Cano, , Evans, J, Fisher, DN, Goodwin, CED, Robinson, BS, Hodgson, DJ, Inger, R. 2018. brief introduction mixed effects modelling multi-model inference ecology. PeerJ 6:e4794. DeBruine LM, Barr DJ. Understanding Mixed-Effects Models Data Simulation. 2021. Advances Methods Practices Psychological Science.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_glmnet.html","id":null,"dir":"Reference","previous_headings":"","what":"Linear regression via glmnet — details_linear_reg_glmnet","title":"Linear regression via glmnet — details_linear_reg_glmnet","text":"glmnet::glmnet() uses regularized least squares fit models numeric outcomes.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_glmnet.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Linear regression via glmnet — details_linear_reg_glmnet","text":"engine, single mode: regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_glmnet.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Linear regression via glmnet — details_linear_reg_glmnet","text":"model 2 tuning parameters: penalty: Amount Regularization (type: double, default: see ) mixture: Proportion Lasso Penalty (type: double, default: 1.0) value mixture = 1 corresponds pure lasso model, mixture = 0 indicates ridge regression. penalty parameter default requires single numeric value. details , glmnet model general, see glmnet-details.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_glmnet.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Linear regression via glmnet — details_linear_reg_glmnet","text":"","code":"linear_reg(penalty = double(1), mixture = double(1)) %>% set_engine(\"glmnet\") %>% translate() ## Linear Regression Model Specification (regression) ## ## Main Arguments: ## penalty = 0 ## mixture = double(1) ## ## Computational engine: glmnet ## ## Model fit template: ## glmnet::glmnet(x = missing_arg(), y = missing_arg(), weights = missing_arg(), ## alpha = double(1), family = \"gaussian\")"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_glmnet.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Linear regression via glmnet — details_linear_reg_glmnet","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Predictors scale. One way achieve center scale predictor mean zero variance one. default, glmnet::glmnet() uses argument standardize = TRUE center scale data.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_glmnet.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Linear regression via glmnet — details_linear_reg_glmnet","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_glmnet.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Linear regression via glmnet — details_linear_reg_glmnet","text":"model object contains data required make predictions. saving model purpose prediction, size saved object might substantially reduced using functions butcher package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_glmnet.html","id":"examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Linear regression via glmnet — details_linear_reg_glmnet","text":"“Fitting Predicting parsnip” article contains examples linear_reg() \"glmnet\" engine.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_glmnet.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Linear regression via glmnet — details_linear_reg_glmnet","text":"Hastie, T, R Tibshirani, M Wainwright. 2015. Statistical Learning Sparsity. CRC Press. Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_gls.html","id":null,"dir":"Reference","previous_headings":"","what":"Linear regression via generalized least squares — details_linear_reg_gls","title":"Linear regression via generalized least squares — details_linear_reg_gls","text":"\"gls\" engine estimates linear regression models rows data independent.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_gls.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Linear regression via generalized least squares — details_linear_reg_gls","text":"engine, single mode: regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_gls.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Linear regression via generalized least squares — details_linear_reg_gls","text":"model tuning parameters.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_gls.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Linear regression via generalized least squares — details_linear_reg_gls","text":"multilevelmod extension package required fit model.","code":"library(multilevelmod) linear_reg() %>% set_engine(\"gls\") %>% set_mode(\"regression\") %>% translate() ## Linear Regression Model Specification (regression) ## ## Computational engine: gls ## ## Model fit template: ## nlme::gls(formula = missing_arg(), data = missing_arg())"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_gls.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Linear regression via generalized least squares — details_linear_reg_gls","text":"specific preprocessing needs. However, helpful keep clustering/subject identifier column factor character (instead making dummy variables). See examples next section.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_gls.html","id":"other-details","dir":"Reference","previous_headings":"","what":"Other details","title":"Linear regression via generalized least squares — details_linear_reg_gls","text":"model can accept case weights. parsnip, suggest using fixed effects formula method fitting, details correlation structure passed set_engine() since irregular (required) argument: using tidymodels infrastructure, may better use workflow. case, can add appropriate columns using add_variables() supply typical formula adding model:","code":"library(tidymodels) # load nlme to be able to use the `cor*()` functions library(nlme) data(\"riesby\") linear_reg() %>% set_engine(\"gls\", correlation = corCompSymm(form = ~ 1 | subject)) %>% fit(depr_score ~ week, data = riesby) ## parsnip model object ## ## Generalized least squares fit by REML ## Model: depr_score ~ week ## Data: data ## Log-restricted-likelihood: -765.0148 ## ## Coefficients: ## (Intercept) week ## -4.953439 -2.119678 ## ## Correlation Structure: Compound symmetry ## Formula: ~1 | subject ## Parameter estimate(s): ## Rho ## 0.6820145 ## Degrees of freedom: 250 total; 248 residual ## Residual standard error: 6.868785 library(tidymodels) gls_spec <- linear_reg() %>% set_engine(\"gls\", correlation = corCompSymm(form = ~ 1 | subject)) gls_wflow <- workflow() %>% # The data are included as-is using: add_variables(outcomes = depr_score, predictors = c(week, subject)) %>% add_model(gls_spec, formula = depr_score ~ week) fit(gls_wflow, data = riesby)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_gls.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Linear regression via generalized least squares — details_linear_reg_gls","text":"underlying model implementation allow case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_gls.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Linear regression via generalized least squares — details_linear_reg_gls","text":"J Pinheiro, D Bates. 2000. Mixed-effects models S S-PLUS. Springer, New York, NY","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_h2o.html","id":null,"dir":"Reference","previous_headings":"","what":"Linear regression via h2o — details_linear_reg_h2o","title":"Linear regression via h2o — details_linear_reg_h2o","text":"model uses regularized least squares fit models numeric outcomes.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_h2o.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Linear regression via h2o — details_linear_reg_h2o","text":"engine, single mode: regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_h2o.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Linear regression via h2o — details_linear_reg_h2o","text":"model 2 tuning parameters: mixture: Proportion Lasso Penalty (type: double, default: see ) penalty: Amount Regularization (type: double, default: see ) default, given fixed penalty, h2o::h2o.glm() uses heuristic approach select optimal value penalty based training data. Setting engine parameter lambda_search TRUE enables efficient version grid search, see details https://docs.h2o.ai/h2o/latest-stable/h2o-docs/data-science/algo-params/lambda_search.html. choice mixture depends engine parameter solver, automatically chosen given training data specification model parameters. solver set 'L-BFGS', mixture defaults 0 (ridge regression) 0.5 otherwise.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_h2o.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Linear regression via h2o — details_linear_reg_h2o","text":"agua::h2o_train_glm() linear_reg() wrapper around h2o::h2o.glm() family = \"gaussian\".","code":"linear_reg(penalty = 1, mixture = 0.5) %>% set_engine(\"h2o\") %>% translate() ## Linear Regression Model Specification (regression) ## ## Main Arguments: ## penalty = 1 ## mixture = 0.5 ## ## Computational engine: h2o ## ## Model fit template: ## agua::h2o_train_glm(x = missing_arg(), y = missing_arg(), weights = missing_arg(), ## validation_frame = missing_arg(), lambda = 1, alpha = 0.5, ## family = \"gaussian\")"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_h2o.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Linear regression via h2o — details_linear_reg_h2o","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Predictors scale. One way achieve center scale predictor mean zero variance one. default, h2o::h2o.glm() uses argument standardize = TRUE center scale data.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_h2o.html","id":"initializing-h-o","dir":"Reference","previous_headings":"","what":"Initializing h2o","title":"Linear regression via h2o — details_linear_reg_h2o","text":"use h2o engine tidymodels, please run h2o::h2o.init() first. default, connects R local h2o server. needs done every new R session. can also connect remote h2o server IP address, details see h2o::h2o.init(). can control number threads thread pool used h2o nthreads argument. default, uses CPUs host. different usual parallel processing mechanism tidymodels tuning, tidymodels parallelizes resamples, h2o parallelizes hyperparameter combinations given resample. h2o automatically shut local h2o instance started R R terminated. manually stop h2o server, run h2o::h2o.shutdown().","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_h2o.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Linear regression via h2o — details_linear_reg_h2o","text":"Models fitted engine may require native serialization methods properly saved /passed R sessions. learn preparing fitted models serialization, see bundle package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_keras.html","id":null,"dir":"Reference","previous_headings":"","what":"Linear regression via keras/tensorflow — details_linear_reg_keras","title":"Linear regression via keras/tensorflow — details_linear_reg_keras","text":"model uses regularized least squares fit models numeric outcomes.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_keras.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Linear regression via keras/tensorflow — details_linear_reg_keras","text":"engine, single mode: regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_keras.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Linear regression via keras/tensorflow — details_linear_reg_keras","text":"model one tuning parameter: penalty: Amount Regularization (type: double, default: 0.0) penalty, amount regularization L2 penalty (.e., ridge weight decay).","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_keras.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Linear regression via keras/tensorflow — details_linear_reg_keras","text":"keras_mlp() parsnip wrapper around keras code neural networks. model fits linear regression network single hidden unit.","code":"linear_reg(penalty = double(1)) %>% set_engine(\"keras\") %>% translate() ## Linear Regression Model Specification (regression) ## ## Main Arguments: ## penalty = double(1) ## ## Computational engine: keras ## ## Model fit template: ## parsnip::keras_mlp(x = missing_arg(), y = missing_arg(), penalty = double(1), ## hidden_units = 1, act = \"linear\")"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_keras.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Linear regression via keras/tensorflow — details_linear_reg_keras","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Predictors scale. One way achieve center scale predictor mean zero variance one.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_keras.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Linear regression via keras/tensorflow — details_linear_reg_keras","text":"underlying model implementation allow case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_keras.html","id":"examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Linear regression via keras/tensorflow — details_linear_reg_keras","text":"“Fitting Predicting parsnip” article contains examples linear_reg() \"keras\" engine.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_keras.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Linear regression via keras/tensorflow — details_linear_reg_keras","text":"Hoerl, ., & Kennard, R. (2000). Ridge Regression: Biased Estimation Nonorthogonal Problems. Technometrics, 42(1), 80-86.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_lm.html","id":null,"dir":"Reference","previous_headings":"","what":"Linear regression via lm — details_linear_reg_lm","title":"Linear regression via lm — details_linear_reg_lm","text":"stats::lm() uses ordinary least squares fit models numeric outcomes.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_lm.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Linear regression via lm — details_linear_reg_lm","text":"engine, single mode: regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_lm.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Linear regression via lm — details_linear_reg_lm","text":"engine tuning parameters.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_lm.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Linear regression via lm — details_linear_reg_lm","text":"","code":"linear_reg() %>% set_engine(\"lm\") %>% translate() ## Linear Regression Model Specification (regression) ## ## Computational engine: lm ## ## Model fit template: ## stats::lm(formula = missing_arg(), data = missing_arg(), weights = missing_arg())"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_lm.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Linear regression via lm — details_linear_reg_lm","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_lm.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Linear regression via lm — details_linear_reg_lm","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights. However, documentation stats::lm() assumes specific type case weights used: “Non-NULL weights can used indicate different observations different variances (values weights inversely proportional variances); equivalently, elements weights positive integers w_i, response y_i mean w_i unit-weight observations (including case w_i observations equal y_i data summarized). However, latter case, notice within-group variation used. Therefore, sigma estimate residual degrees freedom may suboptimal; case replication weights, even wrong. Hence, standard errors analysis variance tables treated care” (emphasis added) Depending application, degrees freedown model (statistics) might incorrect.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_lm.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Linear regression via lm — details_linear_reg_lm","text":"model object contains data required make predictions. saving model purpose prediction, size saved object might substantially reduced using functions butcher package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_lm.html","id":"examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Linear regression via lm — details_linear_reg_lm","text":"“Fitting Predicting parsnip” article contains examples linear_reg() \"lm\" engine.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_lm.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Linear regression via lm — details_linear_reg_lm","text":"Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_lme.html","id":null,"dir":"Reference","previous_headings":"","what":"Linear regression via mixed models — details_linear_reg_lme","title":"Linear regression via mixed models — details_linear_reg_lme","text":"\"lme\" engine estimates fixed random effect regression parameters using maximum likelihood (restricted maximum likelihood) estimation.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_lme.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Linear regression via mixed models — details_linear_reg_lme","text":"engine, single mode: regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_lme.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Linear regression via mixed models — details_linear_reg_lme","text":"model tuning parameters.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_lme.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Linear regression via mixed models — details_linear_reg_lme","text":"multilevelmod extension package required fit model.","code":"library(multilevelmod) linear_reg() %>% set_engine(\"lme\") %>% set_mode(\"regression\") %>% translate() ## Linear Regression Model Specification (regression) ## ## Computational engine: lme ## ## Model fit template: ## nlme::lme(fixed = missing_arg(), data = missing_arg())"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_lme.html","id":"predicting-new-samples","dir":"Reference","previous_headings":"","what":"Predicting new samples","title":"Linear regression via mixed models — details_linear_reg_lme","text":"model can use subject-specific coefficient estimates make predictions (.e. partial pooling). example, equation shows linear predictor (\\eta) random intercept: $$ denotes ith independent experimental unit (e.g. subject). model seen subject , can use subject’s data adjust population intercept specific subjects results. happens data predicted subject used model fit? case, package uses population parameter estimates prediction: Depending covariates model, might effect making prediction new samples. population parameters “best estimate” subject included model fit. tidymodels framework deliberately constrains predictions new data use training set data (prevent information leakage).","code":"\\eta_{i} = (\\beta_0 + b_{0i}) + \\beta_1x_{i1} \\hat{\\eta}_{i'} = \\hat{\\beta}_0+ \\hat{\\beta}x_{i'1}"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_lme.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Linear regression via mixed models — details_linear_reg_lme","text":"specific preprocessing needs. However, helpful keep clustering/subject identifier column factor character (instead making dummy variables). See examples next section.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_lme.html","id":"other-details","dir":"Reference","previous_headings":"","what":"Other details","title":"Linear regression via mixed models — details_linear_reg_lme","text":"model can accept case weights. parsnip, suggest using fixed effects formula method fitting, random effects formula passed set_engine() since irregular (required) argument: using tidymodels infrastructure, may better use workflow. case, can add appropriate columns using add_variables() supply typical formula adding model:","code":"library(tidymodels) data(\"riesby\") linear_reg() %>% set_engine(\"lme\", random = ~ 1|subject) %>% fit(depr_score ~ week, data = riesby) library(tidymodels) lme_spec <- linear_reg() %>% set_engine(\"lme\", random = ~ 1|subject) lme_wflow <- workflow() %>% # The data are included as-is using: add_variables(outcomes = depr_score, predictors = c(week, subject)) %>% add_model(lme_spec, formula = depr_score ~ week) fit(lme_wflow, data = riesby)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_lme.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Linear regression via mixed models — details_linear_reg_lme","text":"underlying model implementation allow case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_lme.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Linear regression via mixed models — details_linear_reg_lme","text":"J Pinheiro, D Bates. 2000. Mixed-effects models S S-PLUS. Springer, New York, NY West, K, Band Welch, Galecki. 2014. Linear Mixed Models: Practical Guide Using Statistical Software. CRC Press. Thorson, J, Minto, C. 2015, Mixed effects: unifying framework statistical modelling fisheries biology. ICES Journal Marine Science, Volume 72, Issue 5, Pages 1245–1256. Harrison, XA, Donaldson, L, Correa-Cano, , Evans, J, Fisher, DN, Goodwin, CED, Robinson, BS, Hodgson, DJ, Inger, R. 2018. brief introduction mixed effects modelling multi-model inference ecology. PeerJ 6:e4794. DeBruine LM, Barr DJ. Understanding Mixed-Effects Models Data Simulation. 2021. Advances Methods Practices Psychological Science.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_lmer.html","id":null,"dir":"Reference","previous_headings":"","what":"Linear regression via mixed models — details_linear_reg_lmer","title":"Linear regression via mixed models — details_linear_reg_lmer","text":"\"lmer\" engine estimates fixed random effect regression parameters using maximum likelihood (restricted maximum likelihood) estimation.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_lmer.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Linear regression via mixed models — details_linear_reg_lmer","text":"engine, single mode: regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_lmer.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Linear regression via mixed models — details_linear_reg_lmer","text":"model tuning parameters.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_lmer.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Linear regression via mixed models — details_linear_reg_lmer","text":"multilevelmod extension package required fit model.","code":"library(multilevelmod) linear_reg() %>% set_engine(\"lmer\") %>% set_mode(\"regression\") %>% translate() ## Linear Regression Model Specification (regression) ## ## Computational engine: lmer ## ## Model fit template: ## lme4::lmer(formula = missing_arg(), data = missing_arg(), weights = missing_arg())"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_lmer.html","id":"predicting-new-samples","dir":"Reference","previous_headings":"","what":"Predicting new samples","title":"Linear regression via mixed models — details_linear_reg_lmer","text":"model can use subject-specific coefficient estimates make predictions (.e. partial pooling). example, equation shows linear predictor (\\eta) random intercept: $$ denotes ith independent experimental unit (e.g. subject). model seen subject , can use subject’s data adjust population intercept specific subjects results. happens data predicted subject used model fit? case, package uses population parameter estimates prediction: Depending covariates model, might effect making prediction new samples. population parameters “best estimate” subject included model fit. tidymodels framework deliberately constrains predictions new data use training set data (prevent information leakage).","code":"\\eta_{i} = (\\beta_0 + b_{0i}) + \\beta_1x_{i1} \\hat{\\eta}_{i'} = \\hat{\\beta}_0+ \\hat{\\beta}x_{i'1}"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_lmer.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Linear regression via mixed models — details_linear_reg_lmer","text":"specific preprocessing needs. However, helpful keep clustering/subject identifier column factor character (instead making dummy variables). See examples next section.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_lmer.html","id":"other-details","dir":"Reference","previous_headings":"","what":"Other details","title":"Linear regression via mixed models — details_linear_reg_lmer","text":"model can accept case weights. parsnip, suggest using formula method fitting: using tidymodels infrastructure, may better use workflow. case, can add appropriate columns using add_variables() supply typical formula adding model:","code":"library(tidymodels) data(\"riesby\") linear_reg() %>% set_engine(\"lmer\") %>% fit(depr_score ~ week + (1|subject), data = riesby) library(tidymodels) lmer_spec <- linear_reg() %>% set_engine(\"lmer\") lmer_wflow <- workflow() %>% # The data are included as-is using: add_variables(outcomes = depr_score, predictors = c(week, subject)) %>% add_model(lmer_spec, formula = depr_score ~ week + (1|subject)) fit(lmer_wflow, data = riesby)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_lmer.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Linear regression via mixed models — details_linear_reg_lmer","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_lmer.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Linear regression via mixed models — details_linear_reg_lmer","text":"J Pinheiro, D Bates. 2000. Mixed-effects models S S-PLUS. Springer, New York, NY West, K, Band Welch, Galecki. 2014. Linear Mixed Models: Practical Guide Using Statistical Software. CRC Press. Thorson, J, Minto, C. 2015, Mixed effects: unifying framework statistical modelling fisheries biology. ICES Journal Marine Science, Volume 72, Issue 5, Pages 1245–1256. Harrison, XA, Donaldson, L, Correa-Cano, , Evans, J, Fisher, DN, Goodwin, CED, Robinson, BS, Hodgson, DJ, Inger, R. 2018. brief introduction mixed effects modelling multi-model inference ecology. PeerJ 6:e4794. DeBruine LM, Barr DJ. Understanding Mixed-Effects Models Data Simulation. 2021. Advances Methods Practices Psychological Science.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_spark.html","id":null,"dir":"Reference","previous_headings":"","what":"Linear regression via spark — details_linear_reg_spark","title":"Linear regression via spark — details_linear_reg_spark","text":"sparklyr::ml_linear_regression() uses regularized least squares fit models numeric outcomes.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_spark.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Linear regression via spark — details_linear_reg_spark","text":"engine, single mode: regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_spark.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Linear regression via spark — details_linear_reg_spark","text":"model 2 tuning parameters: penalty: Amount Regularization (type: double, default: 0.0) mixture: Proportion Lasso Penalty (type: double, default: 0.0) penalty, amount regularization includes L1 penalty (.e., lasso) L2 penalty (.e., ridge weight decay). mixture: mixture = 1 specifies pure lasso model, mixture = 0 specifies ridge regression model, 0 < mixture < 1 specifies elastic net model, interpolating lasso ridge.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_spark.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Linear regression via spark — details_linear_reg_spark","text":"","code":"linear_reg(penalty = double(1), mixture = double(1)) %>% set_engine(\"spark\") %>% translate() ## Linear Regression Model Specification (regression) ## ## Main Arguments: ## penalty = double(1) ## mixture = double(1) ## ## Computational engine: spark ## ## Model fit template: ## sparklyr::ml_linear_regression(x = missing_arg(), formula = missing_arg(), ## weights = missing_arg(), reg_param = double(1), elastic_net_param = double(1))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_spark.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Linear regression via spark — details_linear_reg_spark","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Predictors scale. One way achieve center scale predictor mean zero variance one. default, ml_linear_regression() uses argument standardization = TRUE center scale data.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_spark.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Linear regression via spark — details_linear_reg_spark","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights. Note , spark engines, case_weight argument value character string specify column numeric case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_spark.html","id":"other-details","dir":"Reference","previous_headings":"","what":"Other details","title":"Linear regression via spark — details_linear_reg_spark","text":"models created using \"spark\" engine, several things consider. formula interface via fit() available; using fit_xy() generate error. predictions always Spark table format. names documented without dots. equivalent factor columns Spark tables class predictions returned character columns. retain model object new R session (via save()), model$fit element parsnip object serialized via ml_save(object$fit) separately saved disk. new session, object can reloaded reattached parsnip object.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_spark.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Linear regression via spark — details_linear_reg_spark","text":"Luraschi, J, K Kuo, E Ruiz. 2019. Mastering Spark R. O’Reilly Media Hastie, T, R Tibshirani, M Wainwright. 2015. Statistical Learning Sparsity. CRC Press. Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_stan.html","id":null,"dir":"Reference","previous_headings":"","what":"Linear regression via Bayesian Methods — details_linear_reg_stan","title":"Linear regression via Bayesian Methods — details_linear_reg_stan","text":"\"stan\" engine estimates regression parameters using Bayesian estimation.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_stan.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Linear regression via Bayesian Methods — details_linear_reg_stan","text":"engine, single mode: regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_stan.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Linear regression via Bayesian Methods — details_linear_reg_stan","text":"engine tuning parameters.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_stan.html","id":"important-engine-specific-options","dir":"Reference","previous_headings":"","what":"Important engine-specific options","title":"Linear regression via Bayesian Methods — details_linear_reg_stan","text":"relevant arguments can passed set_engine(): chains: positive integer specifying number Markov chains. default 4. iter: positive integer specifying number iterations chain (including warmup). default 2000. seed: seed random number generation. cores: Number cores use executing chains parallel. prior: prior distribution (non-hierarchical) regression coefficients. \"stan\" engine fit hierarchical terms. See \"stan_glmer\" engine multilevelmod package type model. prior_intercept: prior distribution intercept (centering predictors). See rstan::sampling() rstanarm::priors() information options.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_stan.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Linear regression via Bayesian Methods — details_linear_reg_stan","text":"Note refresh default prevents logging estimation process. Change value set_engine() show MCMC logs.","code":"linear_reg() %>% set_engine(\"stan\") %>% translate() ## Linear Regression Model Specification (regression) ## ## Computational engine: stan ## ## Model fit template: ## rstanarm::stan_glm(formula = missing_arg(), data = missing_arg(), ## weights = missing_arg(), family = stats::gaussian, refresh = 0)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_stan.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Linear regression via Bayesian Methods — details_linear_reg_stan","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_stan.html","id":"other-details","dir":"Reference","previous_headings":"","what":"Other details","title":"Linear regression via Bayesian Methods — details_linear_reg_stan","text":"prediction, \"stan\" engine can compute posterior intervals analogous confidence prediction intervals. instances, units original outcome std_error = TRUE, standard deviation posterior distribution (posterior predictive distribution appropriate) returned.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_stan.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Linear regression via Bayesian Methods — details_linear_reg_stan","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_stan.html","id":"examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Linear regression via Bayesian Methods — details_linear_reg_stan","text":"“Fitting Predicting parsnip” article contains examples linear_reg() \"stan\" engine.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_stan.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Linear regression via Bayesian Methods — details_linear_reg_stan","text":"McElreath, R. 2020 Statistical Rethinking. CRC Press.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_stan_glmer.html","id":null,"dir":"Reference","previous_headings":"","what":"Linear regression via hierarchical Bayesian methods — details_linear_reg_stan_glmer","title":"Linear regression via hierarchical Bayesian methods — details_linear_reg_stan_glmer","text":"\"stan_glmer\" engine estimates hierarchical regression parameters using Bayesian estimation.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_stan_glmer.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Linear regression via hierarchical Bayesian methods — details_linear_reg_stan_glmer","text":"engine, single mode: regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_stan_glmer.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Linear regression via hierarchical Bayesian methods — details_linear_reg_stan_glmer","text":"model tuning parameters.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_stan_glmer.html","id":"important-engine-specific-options","dir":"Reference","previous_headings":"","what":"Important engine-specific options","title":"Linear regression via hierarchical Bayesian methods — details_linear_reg_stan_glmer","text":"relevant arguments can passed set_engine(): chains: positive integer specifying number Markov chains. default 4. iter: positive integer specifying number iterations chain (including warmup). default 2000. seed: seed random number generation. cores: Number cores use executing chains parallel. prior: prior distribution (non-hierarchical) regression coefficients. prior_intercept: prior distribution intercept (centering predictors). See ?rstanarm::stan_glmer ?rstan::sampling information.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_stan_glmer.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Linear regression via hierarchical Bayesian methods — details_linear_reg_stan_glmer","text":"multilevelmod extension package required fit model.","code":"library(multilevelmod) linear_reg() %>% set_engine(\"stan_glmer\") %>% set_mode(\"regression\") %>% translate() ## Linear Regression Model Specification (regression) ## ## Computational engine: stan_glmer ## ## Model fit template: ## rstanarm::stan_glmer(formula = missing_arg(), data = missing_arg(), ## weights = missing_arg(), family = stats::gaussian, refresh = 0)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_stan_glmer.html","id":"predicting-new-samples","dir":"Reference","previous_headings":"","what":"Predicting new samples","title":"Linear regression via hierarchical Bayesian methods — details_linear_reg_stan_glmer","text":"model can use subject-specific coefficient estimates make predictions (.e. partial pooling). example, equation shows linear predictor (\\eta) random intercept: $$ denotes ith independent experimental unit (e.g. subject). model seen subject , can use subject’s data adjust population intercept specific subjects results. happens data predicted subject used model fit? case, package uses population parameter estimates prediction: Depending covariates model, might effect making prediction new samples. population parameters “best estimate” subject included model fit. tidymodels framework deliberately constrains predictions new data use training set data (prevent information leakage).","code":"\\eta_{i} = (\\beta_0 + b_{0i}) + \\beta_1x_{i1} \\hat{\\eta}_{i'} = \\hat{\\beta}_0+ \\hat{\\beta}x_{i'1}"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_stan_glmer.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Linear regression via hierarchical Bayesian methods — details_linear_reg_stan_glmer","text":"specific preprocessing needs. However, helpful keep clustering/subject identifier column factor character (instead making dummy variables). See examples next section.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_stan_glmer.html","id":"other-details","dir":"Reference","previous_headings":"","what":"Other details","title":"Linear regression via hierarchical Bayesian methods — details_linear_reg_stan_glmer","text":"model can accept case weights. parsnip, suggest using formula method fitting: using tidymodels infrastructure, may better use workflow. case, can add appropriate columns using add_variables() supply typical formula adding model: prediction, \"stan_glmer\" engine can compute posterior intervals analogous confidence prediction intervals. instances, units original outcome. std_error = TRUE, standard deviation posterior distribution (posterior predictive distribution appropriate) returned.","code":"library(tidymodels) data(\"riesby\") linear_reg() %>% set_engine(\"stan_glmer\") %>% fit(depr_score ~ week + (1|subject), data = riesby) library(tidymodels) glmer_spec <- linear_reg() %>% set_engine(\"stan_glmer\") glmer_wflow <- workflow() %>% # The data are included as-is using: add_variables(outcomes = depr_score, predictors = c(week, subject)) %>% add_model(glmer_spec, formula = depr_score ~ week + (1|subject)) fit(glmer_wflow, data = riesby)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_stan_glmer.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Linear regression via hierarchical Bayesian methods — details_linear_reg_stan_glmer","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_linear_reg_stan_glmer.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Linear regression via hierarchical Bayesian methods — details_linear_reg_stan_glmer","text":"McElreath, R. 2020 Statistical Rethinking. CRC Press. Sorensen, T, Vasishth, S. 2016. Bayesian linear mixed models using Stan: tutorial psychologists, linguists, cognitive scientists, arXiv:1506.06201.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_LiblineaR.html","id":null,"dir":"Reference","previous_headings":"","what":"Logistic regression via LiblineaR — details_logistic_reg_LiblineaR","title":"Logistic regression via LiblineaR — details_logistic_reg_LiblineaR","text":"LiblineaR::LiblineaR() fits generalized linear model binary outcomes. linear combination predictors used model log odds event.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_LiblineaR.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Logistic regression via LiblineaR — details_logistic_reg_LiblineaR","text":"engine, single mode: classification","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_LiblineaR.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Logistic regression via LiblineaR — details_logistic_reg_LiblineaR","text":"model 2 tuning parameters: penalty: Amount Regularization (type: double, default: see ) mixture: Proportion Lasso Penalty (type: double, default: 0) LiblineaR models, value mixture can either 0 (ridge) 1 (lasso) intermediate values. LiblineaR::LiblineaR() documentation, correspond types 0 (L2-regularized) 6 (L1-regularized). aware LiblineaR engine regularizes intercept. regularized regression models , result different parameter estimates.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_LiblineaR.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Logistic regression via LiblineaR — details_logistic_reg_LiblineaR","text":"","code":"logistic_reg(penalty = double(1), mixture = double(1)) %>% set_engine(\"LiblineaR\") %>% translate() ## Logistic Regression Model Specification (classification) ## ## Main Arguments: ## penalty = double(1) ## mixture = double(1) ## ## Computational engine: LiblineaR ## ## Model fit template: ## LiblineaR::LiblineaR(x = missing_arg(), y = missing_arg(), cost = Inf, ## type = double(1), verbose = FALSE)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_LiblineaR.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Logistic regression via LiblineaR — details_logistic_reg_LiblineaR","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Predictors scale. One way achieve center scale predictor mean zero variance one.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_LiblineaR.html","id":"examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Logistic regression via LiblineaR — details_logistic_reg_LiblineaR","text":"“Fitting Predicting parsnip” article contains examples logistic_reg() \"LiblineaR\" engine.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_LiblineaR.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Logistic regression via LiblineaR — details_logistic_reg_LiblineaR","text":"Hastie, T, R Tibshirani, M Wainwright. 2015. Statistical Learning Sparsity. CRC Press. Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_brulee.html","id":null,"dir":"Reference","previous_headings":"","what":"Logistic regression via brulee — details_logistic_reg_brulee","title":"Logistic regression via brulee — details_logistic_reg_brulee","text":"brulee::brulee_logistic_reg() fits generalized linear model binary outcomes. linear combination predictors used model log odds event.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_brulee.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Logistic regression via brulee — details_logistic_reg_brulee","text":"engine, single mode: classification","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_brulee.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Logistic regression via brulee — details_logistic_reg_brulee","text":"model 2 tuning parameter: penalty: Amount Regularization (type: double, default: 0.001) mixture: Proportion Lasso Penalty (type: double, default: 0.0) use L1 penalty (.k.. lasso penalty) force parameters strictly zero (packages glmnet). zeroing parameters specific feature optimization method used packages. engine arguments interest: optimizer(): optimization method. See brulee::brulee_linear_reg(). epochs(): integer number passes training set. lean_rate(): number used accelerate gradient decsent process. momentum(): number used use historical gradient information optimization (optimizer = \"SGD\" ). batch_size(): integer number training set points batch. stop_iter(): non-negative integer many iterations improvement stopping. (default: 5L). class_weights(): Numeric class weights. See brulee::brulee_logistic_reg().","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_brulee.html","id":"translation-from-parsnip-to-the-original-package-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (classification)","title":"Logistic regression via brulee — details_logistic_reg_brulee","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Predictors scale. One way achieve center scale predictor mean zero variance one.","code":"logistic_reg(penalty = double(1)) %>% set_engine(\"brulee\") %>% translate() ## Logistic Regression Model Specification (classification) ## ## Main Arguments: ## penalty = double(1) ## ## Computational engine: brulee ## ## Model fit template: ## brulee::brulee_logistic_reg(x = missing_arg(), y = missing_arg(), ## penalty = double(1))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_brulee.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Logistic regression via brulee — details_logistic_reg_brulee","text":"underlying model implementation allow case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_brulee.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Logistic regression via brulee — details_logistic_reg_brulee","text":"Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_gee.html","id":null,"dir":"Reference","previous_headings":"","what":"Logistic regression via generalized estimating equations (GEE) — details_logistic_reg_gee","title":"Logistic regression via generalized estimating equations (GEE) — details_logistic_reg_gee","text":"gee::gee() uses generalized least squares fit different types models errors independent.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_gee.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Logistic regression via generalized estimating equations (GEE) — details_logistic_reg_gee","text":"engine, single mode: classification","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_gee.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Logistic regression via generalized estimating equations (GEE) — details_logistic_reg_gee","text":"model formal tuning parameters. may beneficial determine appropriate correlation structure use, typically affect predicted value model. effect inferential results parameter covariance values.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_gee.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Logistic regression via generalized estimating equations (GEE) — details_logistic_reg_gee","text":"multilevelmod extension package required fit model. multilevelmod::gee_fit() wrapper model around gee::gee().","code":"library(multilevelmod) logistic_reg() %>% set_engine(\"gee\") %>% translate() ## Logistic Regression Model Specification (classification) ## ## Computational engine: gee ## ## Model fit template: ## multilevelmod::gee_fit(formula = missing_arg(), data = missing_arg(), ## family = binomial)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_gee.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Logistic regression via generalized estimating equations (GEE) — details_logistic_reg_gee","text":"specific preprocessing needs. However, helpful keep clustering/subject identifier column factor character (instead making dummy variables). See examples next section.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_gee.html","id":"other-details","dir":"Reference","previous_headings":"","what":"Other details","title":"Logistic regression via generalized estimating equations (GEE) — details_logistic_reg_gee","text":"model accept case weights. gee:gee() gee:geepack() specify id/cluster variable using argument id requires vector. parsnip doesn’t work way enable model fit using artificial function id_var() used formula. , original package, call look like: parsnip, suggest using formula method fitting: using tidymodels infrastructure, may better use workflow. case, can add appropriate columns using add_variables() supply GEE formula adding model: gee::gee() function always prints warnings output even silent = TRUE. parsnip \"gee\" engine, contrast, silences console output coming gee::gee(), even silent = FALSE. Also, issues gee() function, supplementary call glm() needed get rank QR decomposition objects predict() can used.","code":"gee(breaks ~ tension, id = wool, data = warpbreaks, corstr = \"exchangeable\") library(tidymodels) data(\"toenail\", package = \"HSAUR3\") logistic_reg() %>% set_engine(\"gee\", corstr = \"exchangeable\") %>% fit(outcome ~ treatment * visit + id_var(patientID), data = toenail) library(tidymodels) gee_spec <- logistic_reg() %>% set_engine(\"gee\", corstr = \"exchangeable\") gee_wflow <- workflow() %>% # The data are included as-is using: add_variables(outcomes = outcome, predictors = c(treatment, visit, patientID)) %>% add_model(gee_spec, formula = outcome ~ treatment * visit + id_var(patientID)) fit(gee_wflow, data = toenail)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_gee.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Logistic regression via generalized estimating equations (GEE) — details_logistic_reg_gee","text":"underlying model implementation allow case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_gee.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Logistic regression via generalized estimating equations (GEE) — details_logistic_reg_gee","text":"Liang, K.Y. Zeger, S.L. (1986) Longitudinal data analysis using generalized linear models. Biometrika, 73 13–22. Zeger, S.L. Liang, K.Y. (1986) Longitudinal data analysis discrete continuous outcomes. Biometrics, 42 121–130.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_glm.html","id":null,"dir":"Reference","previous_headings":"","what":"Logistic regression via glm — details_logistic_reg_glm","title":"Logistic regression via glm — details_logistic_reg_glm","text":"stats::glm() fits generalized linear model binary outcomes. linear combination predictors used model log odds event.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_glm.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Logistic regression via glm — details_logistic_reg_glm","text":"engine, single mode: classification","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_glm.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Logistic regression via glm — details_logistic_reg_glm","text":"engine tuning parameters can set family parameter (/link) engine argument (see ).","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_glm.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Logistic regression via glm — details_logistic_reg_glm","text":"use non-default family /link, pass argument set_engine():","code":"logistic_reg() %>% set_engine(\"glm\") %>% translate() ## Logistic Regression Model Specification (classification) ## ## Computational engine: glm ## ## Model fit template: ## stats::glm(formula = missing_arg(), data = missing_arg(), weights = missing_arg(), ## family = stats::binomial) linear_reg() %>% set_engine(\"glm\", family = stats::binomial(link = \"probit\")) %>% translate() ## Linear Regression Model Specification (regression) ## ## Engine-Specific Arguments: ## family = stats::binomial(link = \"probit\") ## ## Computational engine: glm ## ## Model fit template: ## stats::glm(formula = missing_arg(), data = missing_arg(), weights = missing_arg(), ## family = stats::binomial(link = \"probit\"))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_glm.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Logistic regression via glm — details_logistic_reg_glm","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_glm.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Logistic regression via glm — details_logistic_reg_glm","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights. However, documentation stats::glm() assumes specific type case weights used:“Non-NULL weights can used indicate different observations different dispersions (values weights inversely proportional dispersions); equivalently, elements weights positive integers w_i, response y_i mean w_i unit-weight observations. binomial GLM prior weights used give number trials response proportion successes: rarely used Poisson GLM.”","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_glm.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Logistic regression via glm — details_logistic_reg_glm","text":"model object contains data required make predictions. saving model purpose prediction, size saved object might substantially reduced using functions butcher package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_glm.html","id":"examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Logistic regression via glm — details_logistic_reg_glm","text":"“Fitting Predicting parsnip” article contains examples logistic_reg() \"glm\" engine.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_glm.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Logistic regression via glm — details_logistic_reg_glm","text":"Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_glmer.html","id":null,"dir":"Reference","previous_headings":"","what":"Logistic regression via mixed models — details_logistic_reg_glmer","title":"Logistic regression via mixed models — details_logistic_reg_glmer","text":"\"glmer\" engine estimates fixed random effect regression parameters using maximum likelihood (restricted maximum likelihood) estimation.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_glmer.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Logistic regression via mixed models — details_logistic_reg_glmer","text":"engine, single mode: classification","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_glmer.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Logistic regression via mixed models — details_logistic_reg_glmer","text":"model tuning parameters.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_glmer.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Logistic regression via mixed models — details_logistic_reg_glmer","text":"multilevelmod extension package required fit model.","code":"library(multilevelmod) logistic_reg() %>% set_engine(\"glmer\") %>% translate() ## Logistic Regression Model Specification (classification) ## ## Computational engine: glmer ## ## Model fit template: ## lme4::glmer(formula = missing_arg(), data = missing_arg(), weights = missing_arg(), ## family = binomial)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_glmer.html","id":"predicting-new-samples","dir":"Reference","previous_headings":"","what":"Predicting new samples","title":"Logistic regression via mixed models — details_logistic_reg_glmer","text":"model can use subject-specific coefficient estimates make predictions (.e. partial pooling). example, equation shows linear predictor (\\eta) random intercept: $$ denotes ith independent experimental unit (e.g. subject). model seen subject , can use subject’s data adjust population intercept specific subjects results. happens data predicted subject used model fit? case, package uses population parameter estimates prediction: Depending covariates model, might effect making prediction new samples. population parameters “best estimate” subject included model fit. tidymodels framework deliberately constrains predictions new data use training set data (prevent information leakage).","code":"\\eta_{i} = (\\beta_0 + b_{0i}) + \\beta_1x_{i1} \\hat{\\eta}_{i'} = \\hat{\\beta}_0+ \\hat{\\beta}x_{i'1}"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_glmer.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Logistic regression via mixed models — details_logistic_reg_glmer","text":"specific preprocessing needs. However, helpful keep clustering/subject identifier column factor character (instead making dummy variables). See examples next section.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_glmer.html","id":"other-details","dir":"Reference","previous_headings":"","what":"Other details","title":"Logistic regression via mixed models — details_logistic_reg_glmer","text":"model can accept case weights. parsnip, suggest using formula method fitting: using tidymodels infrastructure, may better use workflow. case, can add appropriate columns using add_variables() supply typical formula adding model:","code":"library(tidymodels) data(\"toenail\", package = \"HSAUR3\") logistic_reg() %>% set_engine(\"glmer\") %>% fit(outcome ~ treatment * visit + (1 | patientID), data = toenail) library(tidymodels) glmer_spec <- logistic_reg() %>% set_engine(\"glmer\") glmer_wflow <- workflow() %>% # The data are included as-is using: add_variables(outcomes = outcome, predictors = c(treatment, visit, patientID)) %>% add_model(glmer_spec, formula = outcome ~ treatment * visit + (1 | patientID)) fit(glmer_wflow, data = toenail)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_glmer.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Logistic regression via mixed models — details_logistic_reg_glmer","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_glmer.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Logistic regression via mixed models — details_logistic_reg_glmer","text":"J Pinheiro, D Bates. 2000. Mixed-effects models S S-PLUS. Springer, New York, NY West, K, Band Welch, Galecki. 2014. Linear Mixed Models: Practical Guide Using Statistical Software. CRC Press. Thorson, J, Minto, C. 2015, Mixed effects: unifying framework statistical modelling fisheries biology. ICES Journal Marine Science, Volume 72, Issue 5, Pages 1245–1256. Harrison, XA, Donaldson, L, Correa-Cano, , Evans, J, Fisher, DN, Goodwin, CED, Robinson, BS, Hodgson, DJ, Inger, R. 2018. brief introduction mixed effects modelling multi-model inference ecology. PeerJ 6:e4794. DeBruine LM, Barr DJ. Understanding Mixed-Effects Models Data Simulation. 2021. Advances Methods Practices Psychological Science.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_glmnet.html","id":null,"dir":"Reference","previous_headings":"","what":"Logistic regression via glmnet — details_logistic_reg_glmnet","title":"Logistic regression via glmnet — details_logistic_reg_glmnet","text":"glmnet::glmnet() fits generalized linear model binary outcomes. linear combination predictors used model log odds event.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_glmnet.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Logistic regression via glmnet — details_logistic_reg_glmnet","text":"engine, single mode: classification","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_glmnet.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Logistic regression via glmnet — details_logistic_reg_glmnet","text":"model 2 tuning parameters: penalty: Amount Regularization (type: double, default: see ) mixture: Proportion Lasso Penalty (type: double, default: 1.0) penalty parameter default requires single numeric value. details , glmnet model general, see glmnet-details. mixture: mixture = 1 specifies pure lasso model, mixture = 0 specifies ridge regression model, 0 < mixture < 1 specifies elastic net model, interpolating lasso ridge.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_glmnet.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Logistic regression via glmnet — details_logistic_reg_glmnet","text":"","code":"logistic_reg(penalty = double(1), mixture = double(1)) %>% set_engine(\"glmnet\") %>% translate() ## Logistic Regression Model Specification (classification) ## ## Main Arguments: ## penalty = 0 ## mixture = double(1) ## ## Computational engine: glmnet ## ## Model fit template: ## glmnet::glmnet(x = missing_arg(), y = missing_arg(), weights = missing_arg(), ## alpha = double(1), family = \"binomial\")"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_glmnet.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Logistic regression via glmnet — details_logistic_reg_glmnet","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Predictors scale. One way achieve center scale predictor mean zero variance one. default, glmnet::glmnet() uses argument standardize = TRUE center scale data.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_glmnet.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Logistic regression via glmnet — details_logistic_reg_glmnet","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_glmnet.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Logistic regression via glmnet — details_logistic_reg_glmnet","text":"model object contains data required make predictions. saving model purpose prediction, size saved object might substantially reduced using functions butcher package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_glmnet.html","id":"examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Logistic regression via glmnet — details_logistic_reg_glmnet","text":"“Fitting Predicting parsnip” article contains examples logistic_reg() \"glmnet\" engine.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_glmnet.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Logistic regression via glmnet — details_logistic_reg_glmnet","text":"Hastie, T, R Tibshirani, M Wainwright. 2015. Statistical Learning Sparsity. CRC Press. Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_h2o.html","id":null,"dir":"Reference","previous_headings":"","what":"Logistic regression via h2o — details_logistic_reg_h2o","title":"Logistic regression via h2o — details_logistic_reg_h2o","text":"h2o::h2o.glm() fits generalized linear model binary outcomes. linear combination predictors used model log odds event.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_h2o.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Logistic regression via h2o — details_logistic_reg_h2o","text":"engine, single mode: classification","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_h2o.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Logistic regression via h2o — details_logistic_reg_h2o","text":"model 2 tuning parameters: mixture: Proportion Lasso Penalty (type: double, default: see ) penalty: Amount Regularization (type: double, default: see ) default, given fixed penalty, h2o::h2o.glm() uses heuristic approach select optimal value penalty based training data. Setting engine parameter lambda_search TRUE enables efficient version grid search, see details https://docs.h2o.ai/h2o/latest-stable/h2o-docs/data-science/algo-params/lambda_search.html. choice mixture depends engine parameter solver, automatically chosen given training data specification model parameters. solver set 'L-BFGS', mixture defaults 0 (ridge regression) 0.5 otherwise.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_h2o.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Logistic regression via h2o — details_logistic_reg_h2o","text":"agua::h2o_train_glm() logistic_reg() wrapper around h2o::h2o.glm(). h2o automatically picks link function distribution family binomial responses. use non-default argument h2o::h2o.glm(), pass engine argument set_engine():","code":"logistic_reg() %>% set_engine(\"h2o\") %>% translate() ## Logistic Regression Model Specification (classification) ## ## Computational engine: h2o ## ## Model fit template: ## agua::h2o_train_glm(x = missing_arg(), y = missing_arg(), weights = missing_arg(), ## validation_frame = missing_arg(), family = \"binomial\") logistic_reg() %>% set_engine(\"h2o\", compute_p_values = TRUE) %>% translate() ## Logistic Regression Model Specification (classification) ## ## Engine-Specific Arguments: ## compute_p_values = TRUE ## ## Computational engine: h2o ## ## Model fit template: ## agua::h2o_train_glm(x = missing_arg(), y = missing_arg(), weights = missing_arg(), ## validation_frame = missing_arg(), compute_p_values = TRUE, ## family = \"binomial\")"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_h2o.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Logistic regression via h2o — details_logistic_reg_h2o","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Predictors scale. One way achieve center scale predictor mean zero variance one. default, h2o::h2o.glm() uses argument standardize = TRUE center scale numeric columns.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_h2o.html","id":"initializing-h-o","dir":"Reference","previous_headings":"","what":"Initializing h2o","title":"Logistic regression via h2o — details_logistic_reg_h2o","text":"use h2o engine tidymodels, please run h2o::h2o.init() first. default, connects R local h2o server. needs done every new R session. can also connect remote h2o server IP address, details see h2o::h2o.init(). can control number threads thread pool used h2o nthreads argument. default, uses CPUs host. different usual parallel processing mechanism tidymodels tuning, tidymodels parallelizes resamples, h2o parallelizes hyperparameter combinations given resample. h2o automatically shut local h2o instance started R R terminated. manually stop h2o server, run h2o::h2o.shutdown().","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_h2o.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Logistic regression via h2o — details_logistic_reg_h2o","text":"Models fitted engine may require native serialization methods properly saved /passed R sessions. learn preparing fitted models serialization, see bundle package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_keras.html","id":null,"dir":"Reference","previous_headings":"","what":"Logistic regression via keras — details_logistic_reg_keras","title":"Logistic regression via keras — details_logistic_reg_keras","text":"keras_mlp() fits generalized linear model binary outcomes. linear combination predictors used model log odds event.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_keras.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Logistic regression via keras — details_logistic_reg_keras","text":"engine, single mode: classification","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_keras.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Logistic regression via keras — details_logistic_reg_keras","text":"model one tuning parameter: penalty: Amount Regularization (type: double, default: 0.0) penalty, amount regularization L2 penalty (.e., ridge weight decay).","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_keras.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Logistic regression via keras — details_logistic_reg_keras","text":"keras_mlp() parsnip wrapper around keras code neural networks. model fits linear regression network single hidden unit.","code":"logistic_reg(penalty = double(1)) %>% set_engine(\"keras\") %>% translate() ## Logistic Regression Model Specification (classification) ## ## Main Arguments: ## penalty = double(1) ## ## Computational engine: keras ## ## Model fit template: ## parsnip::keras_mlp(x = missing_arg(), y = missing_arg(), penalty = double(1), ## hidden_units = 1, act = \"linear\")"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_keras.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Logistic regression via keras — details_logistic_reg_keras","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Predictors scale. One way achieve center scale predictor mean zero variance one.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_keras.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Logistic regression via keras — details_logistic_reg_keras","text":"underlying model implementation allow case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_keras.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Logistic regression via keras — details_logistic_reg_keras","text":"Models fitted engine may require native serialization methods properly saved /passed R sessions. learn preparing fitted models serialization, see bundle package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_keras.html","id":"examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Logistic regression via keras — details_logistic_reg_keras","text":"“Fitting Predicting parsnip” article contains examples logistic_reg() \"keras\" engine.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_keras.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Logistic regression via keras — details_logistic_reg_keras","text":"Hoerl, ., & Kennard, R. (2000). Ridge Regression: Biased Estimation Nonorthogonal Problems. Technometrics, 42(1), 80-86.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_spark.html","id":null,"dir":"Reference","previous_headings":"","what":"Logistic regression via spark — details_logistic_reg_spark","title":"Logistic regression via spark — details_logistic_reg_spark","text":"sparklyr::ml_logistic_regression() fits generalized linear model binary outcomes. linear combination predictors used model log odds event.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_spark.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Logistic regression via spark — details_logistic_reg_spark","text":"engine, single mode: classification","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_spark.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Logistic regression via spark — details_logistic_reg_spark","text":"model 2 tuning parameters: penalty: Amount Regularization (type: double, default: 0.0) mixture: Proportion Lasso Penalty (type: double, default: 0.0) penalty, amount regularization includes L1 penalty (.e., lasso) L2 penalty (.e., ridge weight decay). mixture: mixture = 1 specifies pure lasso model, mixture = 0 specifies ridge regression model, 0 < mixture < 1 specifies elastic net model, interpolating lasso ridge.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_spark.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Logistic regression via spark — details_logistic_reg_spark","text":"","code":"logistic_reg(penalty = double(1), mixture = double(1)) %>% set_engine(\"spark\") %>% translate() ## Logistic Regression Model Specification (classification) ## ## Main Arguments: ## penalty = double(1) ## mixture = double(1) ## ## Computational engine: spark ## ## Model fit template: ## sparklyr::ml_logistic_regression(x = missing_arg(), formula = missing_arg(), ## weights = missing_arg(), reg_param = double(1), elastic_net_param = double(1), ## family = \"binomial\")"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_spark.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Logistic regression via spark — details_logistic_reg_spark","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Predictors scale. One way achieve center scale predictor mean zero variance one. default, ml_logistic_regression() uses argument standardization = TRUE center scale data.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_spark.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Logistic regression via spark — details_logistic_reg_spark","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights. Note , spark engines, case_weight argument value character string specify column numeric case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_spark.html","id":"other-details","dir":"Reference","previous_headings":"","what":"Other details","title":"Logistic regression via spark — details_logistic_reg_spark","text":"models created using \"spark\" engine, several things consider. formula interface via fit() available; using fit_xy() generate error. predictions always Spark table format. names documented without dots. equivalent factor columns Spark tables class predictions returned character columns. retain model object new R session (via save()), model$fit element parsnip object serialized via ml_save(object$fit) separately saved disk. new session, object can reloaded reattached parsnip object.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_spark.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Logistic regression via spark — details_logistic_reg_spark","text":"Luraschi, J, K Kuo, E Ruiz. 2019. Mastering Spark R. O’Reilly Media Hastie, T, R Tibshirani, M Wainwright. 2015. Statistical Learning Sparsity. CRC Press. Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_stan.html","id":null,"dir":"Reference","previous_headings":"","what":"Logistic regression via stan — details_logistic_reg_stan","title":"Logistic regression via stan — details_logistic_reg_stan","text":"rstanarm::stan_glm() fits generalized linear model binary outcomes. linear combination predictors used model log odds event.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_stan.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Logistic regression via stan — details_logistic_reg_stan","text":"engine, single mode: classification","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_stan.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Logistic regression via stan — details_logistic_reg_stan","text":"engine tuning parameters.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_stan.html","id":"important-engine-specific-options","dir":"Reference","previous_headings":"","what":"Important engine-specific options","title":"Logistic regression via stan — details_logistic_reg_stan","text":"relevant arguments can passed set_engine(): chains: positive integer specifying number Markov chains. default 4. iter: positive integer specifying number iterations chain (including warmup). default 2000. seed: seed random number generation. cores: Number cores use executing chains parallel. prior: prior distribution (non-hierarchical) regression coefficients. \"stan\" engine fit hierarchical terms. prior_intercept: prior distribution intercept (centering predictors). See rstan::sampling() rstanarm::priors() information options.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_stan.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Logistic regression via stan — details_logistic_reg_stan","text":"Note refresh default prevents logging estimation process. Change value set_engine() show MCMC logs.","code":"logistic_reg() %>% set_engine(\"stan\") %>% translate() ## Logistic Regression Model Specification (classification) ## ## Computational engine: stan ## ## Model fit template: ## rstanarm::stan_glm(formula = missing_arg(), data = missing_arg(), ## weights = missing_arg(), family = stats::binomial, refresh = 0)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_stan.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Logistic regression via stan — details_logistic_reg_stan","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_stan.html","id":"other-details","dir":"Reference","previous_headings":"","what":"Other details","title":"Logistic regression via stan — details_logistic_reg_stan","text":"prediction, \"stan\" engine can compute posterior intervals analogous confidence prediction intervals. instances, units original outcome std_error = TRUE, standard deviation posterior distribution (posterior predictive distribution appropriate) returned.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_stan.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Logistic regression via stan — details_logistic_reg_stan","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_stan.html","id":"examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Logistic regression via stan — details_logistic_reg_stan","text":"“Fitting Predicting parsnip” article contains examples logistic_reg() \"stan\" engine.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_stan.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Logistic regression via stan — details_logistic_reg_stan","text":"McElreath, R. 2020 Statistical Rethinking. CRC Press.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_stan_glmer.html","id":null,"dir":"Reference","previous_headings":"","what":"Logistic regression via hierarchical Bayesian methods — details_logistic_reg_stan_glmer","title":"Logistic regression via hierarchical Bayesian methods — details_logistic_reg_stan_glmer","text":"\"stan_glmer\" engine estimates hierarchical regression parameters using Bayesian estimation.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_stan_glmer.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Logistic regression via hierarchical Bayesian methods — details_logistic_reg_stan_glmer","text":"engine, single mode: classification","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_stan_glmer.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Logistic regression via hierarchical Bayesian methods — details_logistic_reg_stan_glmer","text":"model tuning parameters.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_stan_glmer.html","id":"important-engine-specific-options","dir":"Reference","previous_headings":"","what":"Important engine-specific options","title":"Logistic regression via hierarchical Bayesian methods — details_logistic_reg_stan_glmer","text":"relevant arguments can passed set_engine(): chains: positive integer specifying number Markov chains. default 4. iter: positive integer specifying number iterations chain (including warmup). default 2000. seed: seed random number generation. cores: Number cores use executing chains parallel. prior: prior distribution (non-hierarchical) regression coefficients. prior_intercept: prior distribution intercept (centering predictors). See ?rstanarm::stan_glmer ?rstan::sampling information.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_stan_glmer.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Logistic regression via hierarchical Bayesian methods — details_logistic_reg_stan_glmer","text":"multilevelmod extension package required fit model.","code":"library(multilevelmod) logistic_reg() %>% set_engine(\"stan_glmer\") %>% translate() ## Logistic Regression Model Specification (classification) ## ## Computational engine: stan_glmer ## ## Model fit template: ## rstanarm::stan_glmer(formula = missing_arg(), data = missing_arg(), ## weights = missing_arg(), family = stats::binomial, refresh = 0)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_stan_glmer.html","id":"predicting-new-samples","dir":"Reference","previous_headings":"","what":"Predicting new samples","title":"Logistic regression via hierarchical Bayesian methods — details_logistic_reg_stan_glmer","text":"model can use subject-specific coefficient estimates make predictions (.e. partial pooling). example, equation shows linear predictor (\\eta) random intercept: $$ denotes ith independent experimental unit (e.g. subject). model seen subject , can use subject’s data adjust population intercept specific subjects results. happens data predicted subject used model fit? case, package uses population parameter estimates prediction: Depending covariates model, might effect making prediction new samples. population parameters “best estimate” subject included model fit. tidymodels framework deliberately constrains predictions new data use training set data (prevent information leakage).","code":"\\eta_{i} = (\\beta_0 + b_{0i}) + \\beta_1x_{i1} \\hat{\\eta}_{i'} = \\hat{\\beta}_0+ \\hat{\\beta}x_{i'1}"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_stan_glmer.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Logistic regression via hierarchical Bayesian methods — details_logistic_reg_stan_glmer","text":"specific preprocessing needs. However, helpful keep clustering/subject identifier column factor character (instead making dummy variables). See examples next section.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_stan_glmer.html","id":"other-details","dir":"Reference","previous_headings":"","what":"Other details","title":"Logistic regression via hierarchical Bayesian methods — details_logistic_reg_stan_glmer","text":"model can accept case weights. parsnip, suggest using formula method fitting: using tidymodels infrastructure, may better use workflow. case, can add appropriate columns using add_variables() supply typical formula adding model: prediction, \"stan_glmer\" engine can compute posterior intervals analogous confidence prediction intervals. instances, units original outcome. std_error = TRUE, standard deviation posterior distribution (posterior predictive distribution appropriate) returned.","code":"library(tidymodels) data(\"toenail\", package = \"HSAUR3\") logistic_reg() %>% set_engine(\"stan_glmer\") %>% fit(outcome ~ treatment * visit + (1 | patientID), data = toenail) library(tidymodels) glmer_spec <- logistic_reg() %>% set_engine(\"stan_glmer\") glmer_wflow <- workflow() %>% # The data are included as-is using: add_variables(outcomes = outcome, predictors = c(treatment, visit, patientID)) %>% add_model(glmer_spec, formula = outcome ~ treatment * visit + (1 | patientID)) fit(glmer_wflow, data = toenail)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_stan_glmer.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Logistic regression via hierarchical Bayesian methods — details_logistic_reg_stan_glmer","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_logistic_reg_stan_glmer.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Logistic regression via hierarchical Bayesian methods — details_logistic_reg_stan_glmer","text":"McElreath, R. 2020 Statistical Rethinking. CRC Press. Sorensen, T, Vasishth, S. 2016. Bayesian linear mixed models using Stan: tutorial psychologists, linguists, cognitive scientists, arXiv:1506.06201.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mars_earth.html","id":null,"dir":"Reference","previous_headings":"","what":"Multivariate adaptive regression splines (MARS) via earth — details_mars_earth","title":"Multivariate adaptive regression splines (MARS) via earth — details_mars_earth","text":"earth::earth() fits generalized linear model uses artificial features predictors. features resemble hinge functions result model segmented regression small dimensions.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mars_earth.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Multivariate adaptive regression splines (MARS) via earth — details_mars_earth","text":"engine, multiple modes: classification regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mars_earth.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Multivariate adaptive regression splines (MARS) via earth — details_mars_earth","text":"model 3 tuning parameters: num_terms: # Model Terms (type: integer, default: see ) prod_degree: Degree Interaction (type: integer, default: 1L) prune_method: Pruning Method (type: character, default: ‘backward’) default value num_terms depends number predictor columns. data frame x, default min(200, max(20, 2 * ncol(x))) + 1 (see earth::earth() reference ).","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mars_earth.html","id":"translation-from-parsnip-to-the-original-package-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (regression)","title":"Multivariate adaptive regression splines (MARS) via earth — details_mars_earth","text":"","code":"mars(num_terms = integer(1), prod_degree = integer(1), prune_method = character(1)) %>% set_engine(\"earth\") %>% set_mode(\"regression\") %>% translate() ## MARS Model Specification (regression) ## ## Main Arguments: ## num_terms = integer(1) ## prod_degree = integer(1) ## prune_method = character(1) ## ## Computational engine: earth ## ## Model fit template: ## earth::earth(formula = missing_arg(), data = missing_arg(), weights = missing_arg(), ## nprune = integer(1), degree = integer(1), pmethod = character(1), ## keepxy = TRUE)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mars_earth.html","id":"translation-from-parsnip-to-the-original-package-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (classification)","title":"Multivariate adaptive regression splines (MARS) via earth — details_mars_earth","text":"alternate method using MARs categorical outcomes can found discrim_flexible().","code":"mars(num_terms = integer(1), prod_degree = integer(1), prune_method = character(1)) %>% set_engine(\"earth\") %>% set_mode(\"classification\") %>% translate() ## MARS Model Specification (classification) ## ## Main Arguments: ## num_terms = integer(1) ## prod_degree = integer(1) ## prune_method = character(1) ## ## Engine-Specific Arguments: ## glm = list(family = stats::binomial) ## ## Computational engine: earth ## ## Model fit template: ## earth::earth(formula = missing_arg(), data = missing_arg(), weights = missing_arg(), ## nprune = integer(1), degree = integer(1), pmethod = character(1), ## glm = list(family = stats::binomial), keepxy = TRUE)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mars_earth.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Multivariate adaptive regression splines (MARS) via earth — details_mars_earth","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mars_earth.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Multivariate adaptive regression splines (MARS) via earth — details_mars_earth","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights. Note earth package documentation : “current implementation, building models weights can slow.”","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mars_earth.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Multivariate adaptive regression splines (MARS) via earth — details_mars_earth","text":"model object contains data required make predictions. saving model purpose prediction, size saved object might substantially reduced using functions butcher package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mars_earth.html","id":"examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Multivariate adaptive regression splines (MARS) via earth — details_mars_earth","text":"“Fitting Predicting parsnip” article contains examples mars() \"earth\" engine.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mars_earth.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Multivariate adaptive regression splines (MARS) via earth — details_mars_earth","text":"Friedman, J. 1991. “Multivariate Adaptive Regression Splines.” Annals Statistics, vol. 19, . 1, pp. 1-67. Milborrow, S. “Notes earth package.” Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mlp_brulee.html","id":null,"dir":"Reference","previous_headings":"","what":"Multilayer perceptron via brulee — details_mlp_brulee","title":"Multilayer perceptron via brulee — details_mlp_brulee","text":"brulee::brulee_mlp() fits neural network.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mlp_brulee.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Multilayer perceptron via brulee — details_mlp_brulee","text":"engine, multiple modes: classification regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mlp_brulee.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Multilayer perceptron via brulee — details_mlp_brulee","text":"model 7 tuning parameters: hidden_units: # Hidden Units (type: integer, default: 3L) penalty: Amount Regularization (type: double, default: 0.0) mixture: Proportion Lasso Penalty (type: double, default: 0.0) epochs: # Epochs (type: integer, default: 100L) dropout: Dropout Rate (type: double, default: 0.0) learn_rate: Learning Rate (type: double, default: 0.01) activation: Activation Function (type: character, default: ‘relu’) use L1 penalty (.k.. lasso penalty) force parameters strictly zero (packages glmnet). zeroing parameters specific feature optimization method used packages. penalty dropout used model. engine arguments interest: momentum(): number used use historical gradient infomration optimization. batch_size(): integer number training set points batch. class_weights(): Numeric class weights. See brulee::brulee_mlp(). stop_iter(): non-negative integer many iterations improvement stopping. (default: 5L).","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mlp_brulee.html","id":"translation-from-parsnip-to-the-original-package-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (regression)","title":"Multilayer perceptron via brulee — details_mlp_brulee","text":"Note parsnip automatically sets linear activation last layer.","code":"mlp( hidden_units = integer(1), penalty = double(1), dropout = double(1), epochs = integer(1), learn_rate = double(1), activation = character(1) ) %>% set_engine(\"brulee\") %>% set_mode(\"regression\") %>% translate() ## Single Layer Neural Network Model Specification (regression) ## ## Main Arguments: ## hidden_units = integer(1) ## penalty = double(1) ## dropout = double(1) ## epochs = integer(1) ## activation = character(1) ## learn_rate = double(1) ## ## Computational engine: brulee ## ## Model fit template: ## brulee::brulee_mlp(x = missing_arg(), y = missing_arg(), hidden_units = integer(1), ## penalty = double(1), dropout = double(1), epochs = integer(1), ## activation = character(1), learn_rate = double(1))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mlp_brulee.html","id":"translation-from-parsnip-to-the-original-package-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (classification)","title":"Multilayer perceptron via brulee — details_mlp_brulee","text":"","code":"mlp( hidden_units = integer(1), penalty = double(1), dropout = double(1), epochs = integer(1), learn_rate = double(1), activation = character(1) ) %>% set_engine(\"brulee\") %>% set_mode(\"classification\") %>% translate() ## Single Layer Neural Network Model Specification (classification) ## ## Main Arguments: ## hidden_units = integer(1) ## penalty = double(1) ## dropout = double(1) ## epochs = integer(1) ## activation = character(1) ## learn_rate = double(1) ## ## Computational engine: brulee ## ## Model fit template: ## brulee::brulee_mlp(x = missing_arg(), y = missing_arg(), hidden_units = integer(1), ## penalty = double(1), dropout = double(1), epochs = integer(1), ## activation = character(1), learn_rate = double(1))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mlp_brulee.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Multilayer perceptron via brulee — details_mlp_brulee","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Predictors scale. One way achieve center scale predictor mean zero variance one.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mlp_brulee.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Multilayer perceptron via brulee — details_mlp_brulee","text":"underlying model implementation allow case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mlp_brulee.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Multilayer perceptron via brulee — details_mlp_brulee","text":"Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mlp_h2o.html","id":null,"dir":"Reference","previous_headings":"","what":"Multilayer perceptron via h2o — details_mlp_h2o","title":"Multilayer perceptron via h2o — details_mlp_h2o","text":"h2o::h2o.deeplearning() fits feed-forward neural network.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mlp_h2o.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Multilayer perceptron via h2o — details_mlp_h2o","text":"engine, multiple modes: classification regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mlp_h2o.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Multilayer perceptron via h2o — details_mlp_h2o","text":"model 6 tuning parameters: hidden_units: # Hidden Units (type: integer, default: 200L) penalty: Amount Regularization (type: double, default: 0.0) dropout: Dropout Rate (type: double, default: 0.5) epochs: # Epochs (type: integer, default: 10) activation: Activation function (type: character, default: ‘see ’) learn_rate: Learning Rate (type: double, default: 0.005) naming activation functions h2o::h2o.deeplearning() differs parsnip’s conventions. Currently, “relu” “tanh” supported converted internally “Rectifier” “Tanh” passed fitting function. penalty corresponds l2 penalty. h2o::h2o.deeplearning() also supports specifying l1 penalty directly engine argument l1. engine arguments interest: stopping_rounds controls early stopping rounds based convergence another engine parameter stopping_metric. default, h2o::h2o.deeplearning stops training simple moving average length 5 stopping_metric improve 5 scoring events. mostly useful used alongside engine parameter validation, proportion train-validation split, parsnip split pass two data frames h2o. h2o::h2o.deeplearning evaluate metric early stopping criteria validation set. h2o uses 50% dropout ratio controlled dropout hidden layers default. h2o::h2o.deeplearning() provides engine argument input_dropout_ratio dropout ratios input layer, defaults 0.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mlp_h2o.html","id":"translation-from-parsnip-to-the-original-package-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (regression)","title":"Multilayer perceptron via h2o — details_mlp_h2o","text":"agua::h2o_train_mlp wrapper around h2o::h2o.deeplearning().","code":"mlp( hidden_units = integer(1), penalty = double(1), dropout = double(1), epochs = integer(1), learn_rate = double(1), activation = character(1) ) %>% set_engine(\"h2o\") %>% set_mode(\"regression\") %>% translate() ## Single Layer Neural Network Model Specification (regression) ## ## Main Arguments: ## hidden_units = integer(1) ## penalty = double(1) ## dropout = double(1) ## epochs = integer(1) ## activation = character(1) ## learn_rate = double(1) ## ## Computational engine: h2o ## ## Model fit template: ## agua::h2o_train_mlp(x = missing_arg(), y = missing_arg(), weights = missing_arg(), ## validation_frame = missing_arg(), hidden = integer(1), l2 = double(1), ## hidden_dropout_ratios = double(1), epochs = integer(1), activation = character(1), ## rate = double(1))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mlp_h2o.html","id":"translation-from-parsnip-to-the-original-package-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (classification)","title":"Multilayer perceptron via h2o — details_mlp_h2o","text":"","code":"mlp( hidden_units = integer(1), penalty = double(1), dropout = double(1), epochs = integer(1), learn_rate = double(1), activation = character(1) ) %>% set_engine(\"h2o\") %>% set_mode(\"classification\") %>% translate() ## Single Layer Neural Network Model Specification (classification) ## ## Main Arguments: ## hidden_units = integer(1) ## penalty = double(1) ## dropout = double(1) ## epochs = integer(1) ## activation = character(1) ## learn_rate = double(1) ## ## Computational engine: h2o ## ## Model fit template: ## agua::h2o_train_mlp(x = missing_arg(), y = missing_arg(), weights = missing_arg(), ## validation_frame = missing_arg(), hidden = integer(1), l2 = double(1), ## hidden_dropout_ratios = double(1), epochs = integer(1), activation = character(1), ## rate = double(1))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mlp_h2o.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Multilayer perceptron via h2o — details_mlp_h2o","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Predictors scale. One way achieve center scale predictor mean zero variance one. default, h2o::h2o.deeplearning() uses argument standardize = TRUE center scale numeric columns.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mlp_h2o.html","id":"initializing-h-o","dir":"Reference","previous_headings":"","what":"Initializing h2o","title":"Multilayer perceptron via h2o — details_mlp_h2o","text":"use h2o engine tidymodels, please run h2o::h2o.init() first. default, connects R local h2o server. needs done every new R session. can also connect remote h2o server IP address, details see h2o::h2o.init(). can control number threads thread pool used h2o nthreads argument. default, uses CPUs host. different usual parallel processing mechanism tidymodels tuning, tidymodels parallelizes resamples, h2o parallelizes hyperparameter combinations given resample. h2o automatically shut local h2o instance started R R terminated. manually stop h2o server, run h2o::h2o.shutdown().","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mlp_h2o.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Multilayer perceptron via h2o — details_mlp_h2o","text":"Models fitted engine may require native serialization methods properly saved /passed R sessions. learn preparing fitted models serialization, see bundle package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mlp_keras.html","id":null,"dir":"Reference","previous_headings":"","what":"Multilayer perceptron via keras — details_mlp_keras","title":"Multilayer perceptron via keras — details_mlp_keras","text":"keras_mlp() fits single layer, feed-forward neural network.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mlp_keras.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Multilayer perceptron via keras — details_mlp_keras","text":"engine, multiple modes: classification regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mlp_keras.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Multilayer perceptron via keras — details_mlp_keras","text":"model 5 tuning parameters: hidden_units: # Hidden Units (type: integer, default: 5L) penalty: Amount Regularization (type: double, default: 0.0) dropout: Dropout Rate (type: double, default: 0.0) epochs: # Epochs (type: integer, default: 20L) activation: Activation Function (type: character, default: ‘softmax’)","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mlp_keras.html","id":"translation-from-parsnip-to-the-original-package-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (regression)","title":"Multilayer perceptron via keras — details_mlp_keras","text":"","code":"mlp( hidden_units = integer(1), penalty = double(1), dropout = double(1), epochs = integer(1), activation = character(1) ) %>% set_engine(\"keras\") %>% set_mode(\"regression\") %>% translate() ## Single Layer Neural Network Model Specification (regression) ## ## Main Arguments: ## hidden_units = integer(1) ## penalty = double(1) ## dropout = double(1) ## epochs = integer(1) ## activation = character(1) ## ## Computational engine: keras ## ## Model fit template: ## parsnip::keras_mlp(x = missing_arg(), y = missing_arg(), hidden_units = integer(1), ## penalty = double(1), dropout = double(1), epochs = integer(1), ## activation = character(1))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mlp_keras.html","id":"translation-from-parsnip-to-the-original-package-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (classification)","title":"Multilayer perceptron via keras — details_mlp_keras","text":"","code":"mlp( hidden_units = integer(1), penalty = double(1), dropout = double(1), epochs = integer(1), activation = character(1) ) %>% set_engine(\"keras\") %>% set_mode(\"classification\") %>% translate() ## Single Layer Neural Network Model Specification (classification) ## ## Main Arguments: ## hidden_units = integer(1) ## penalty = double(1) ## dropout = double(1) ## epochs = integer(1) ## activation = character(1) ## ## Computational engine: keras ## ## Model fit template: ## parsnip::keras_mlp(x = missing_arg(), y = missing_arg(), hidden_units = integer(1), ## penalty = double(1), dropout = double(1), epochs = integer(1), ## activation = character(1))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mlp_keras.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Multilayer perceptron via keras — details_mlp_keras","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Predictors scale. One way achieve center scale predictor mean zero variance one.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mlp_keras.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Multilayer perceptron via keras — details_mlp_keras","text":"underlying model implementation allow case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mlp_keras.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Multilayer perceptron via keras — details_mlp_keras","text":"Models fitted engine may require native serialization methods properly saved /passed R sessions. learn preparing fitted models serialization, see bundle package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mlp_keras.html","id":"examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Multilayer perceptron via keras — details_mlp_keras","text":"“Fitting Predicting parsnip” article contains examples mlp() \"keras\" engine.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mlp_keras.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Multilayer perceptron via keras — details_mlp_keras","text":"Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mlp_nnet.html","id":null,"dir":"Reference","previous_headings":"","what":"Multilayer perceptron via nnet — details_mlp_nnet","title":"Multilayer perceptron via nnet — details_mlp_nnet","text":"nnet::nnet() fits single layer, feed-forward neural network.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mlp_nnet.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Multilayer perceptron via nnet — details_mlp_nnet","text":"engine, multiple modes: classification regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mlp_nnet.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Multilayer perceptron via nnet — details_mlp_nnet","text":"model 3 tuning parameters: hidden_units: # Hidden Units (type: integer, default: none) penalty: Amount Regularization (type: double, default: 0.0) epochs: # Epochs (type: integer, default: 100L) Note , nnet::nnet(), maximum number parameters argument fairly low value maxit = 1000. models, may need pass value via set_engine() model fail.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mlp_nnet.html","id":"translation-from-parsnip-to-the-original-package-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (regression)","title":"Multilayer perceptron via nnet — details_mlp_nnet","text":"Note parsnip automatically sets linear activation last layer.","code":"mlp( hidden_units = integer(1), penalty = double(1), epochs = integer(1) ) %>% set_engine(\"nnet\") %>% set_mode(\"regression\") %>% translate() ## Single Layer Neural Network Model Specification (regression) ## ## Main Arguments: ## hidden_units = integer(1) ## penalty = double(1) ## epochs = integer(1) ## ## Computational engine: nnet ## ## Model fit template: ## nnet::nnet(formula = missing_arg(), data = missing_arg(), size = integer(1), ## decay = double(1), maxit = integer(1), trace = FALSE, linout = TRUE)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mlp_nnet.html","id":"translation-from-parsnip-to-the-original-package-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (classification)","title":"Multilayer perceptron via nnet — details_mlp_nnet","text":"","code":"mlp( hidden_units = integer(1), penalty = double(1), epochs = integer(1) ) %>% set_engine(\"nnet\") %>% set_mode(\"classification\") %>% translate() ## Single Layer Neural Network Model Specification (classification) ## ## Main Arguments: ## hidden_units = integer(1) ## penalty = double(1) ## epochs = integer(1) ## ## Computational engine: nnet ## ## Model fit template: ## nnet::nnet(formula = missing_arg(), data = missing_arg(), size = integer(1), ## decay = double(1), maxit = integer(1), trace = FALSE, linout = FALSE)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mlp_nnet.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Multilayer perceptron via nnet — details_mlp_nnet","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Predictors scale. One way achieve center scale predictor mean zero variance one.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mlp_nnet.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Multilayer perceptron via nnet — details_mlp_nnet","text":"underlying model implementation allow case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mlp_nnet.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Multilayer perceptron via nnet — details_mlp_nnet","text":"model object contains data required make predictions. saving model purpose prediction, size saved object might substantially reduced using functions butcher package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mlp_nnet.html","id":"examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Multilayer perceptron via nnet — details_mlp_nnet","text":"“Fitting Predicting parsnip” article contains examples mlp() \"nnet\" engine.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_mlp_nnet.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Multilayer perceptron via nnet — details_mlp_nnet","text":"Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_brulee.html","id":null,"dir":"Reference","previous_headings":"","what":"Multinomial regression via brulee — details_multinom_reg_brulee","title":"Multinomial regression via brulee — details_multinom_reg_brulee","text":"brulee::brulee_multinomial_reg() fits model uses linear predictors predict multiclass data using multinomial distribution.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_brulee.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Multinomial regression via brulee — details_multinom_reg_brulee","text":"engine, single mode: classification","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_brulee.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Multinomial regression via brulee — details_multinom_reg_brulee","text":"model 2 tuning parameter: penalty: Amount Regularization (type: double, default: 0.001) mixture: Proportion Lasso Penalty (type: double, default: 0.0) use L1 penalty (.k.. lasso penalty) force parameters strictly zero (packages glmnet). zeroing parameters specific feature optimization method used packages. engine arguments interest: optimizer(): optimization method. See brulee::brulee_linear_reg(). epochs(): integer number passes training set. lean_rate(): number used accelerate gradient decsent process. momentum(): number used use historical gradient information optimization (optimizer = \"SGD\" ). batch_size(): integer number training set points batch. stop_iter(): non-negative integer many iterations improvement stopping. (default: 5L). class_weights(): Numeric class weights. See brulee::brulee_multinomial_reg().","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_brulee.html","id":"translation-from-parsnip-to-the-original-package-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (classification)","title":"Multinomial regression via brulee — details_multinom_reg_brulee","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Predictors scale. One way achieve center scale predictor mean zero variance one.","code":"multinom_reg(penalty = double(1)) %>% set_engine(\"brulee\") %>% translate() ## Multinomial Regression Model Specification (classification) ## ## Main Arguments: ## penalty = double(1) ## ## Computational engine: brulee ## ## Model fit template: ## brulee::brulee_multinomial_reg(x = missing_arg(), y = missing_arg(), ## penalty = double(1))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_brulee.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Multinomial regression via brulee — details_multinom_reg_brulee","text":"underlying model implementation allow case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_brulee.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Multinomial regression via brulee — details_multinom_reg_brulee","text":"Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_glmnet.html","id":null,"dir":"Reference","previous_headings":"","what":"Multinomial regression via glmnet — details_multinom_reg_glmnet","title":"Multinomial regression via glmnet — details_multinom_reg_glmnet","text":"glmnet::glmnet() fits model uses linear predictors predict multiclass data using multinomial distribution.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_glmnet.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Multinomial regression via glmnet — details_multinom_reg_glmnet","text":"engine, single mode: classification","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_glmnet.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Multinomial regression via glmnet — details_multinom_reg_glmnet","text":"model 2 tuning parameters: penalty: Amount Regularization (type: double, default: see ) mixture: Proportion Lasso Penalty (type: double, default: 1.0) penalty parameter default requires single numeric value. details , glmnet model general, see glmnet-details. mixture: mixture = 1 specifies pure lasso model, mixture = 0 specifies ridge regression model, 0 < mixture < 1 specifies elastic net model, interpolating lasso ridge.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_glmnet.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Multinomial regression via glmnet — details_multinom_reg_glmnet","text":"","code":"multinom_reg(penalty = double(1), mixture = double(1)) %>% set_engine(\"glmnet\") %>% translate() ## Multinomial Regression Model Specification (classification) ## ## Main Arguments: ## penalty = 0 ## mixture = double(1) ## ## Computational engine: glmnet ## ## Model fit template: ## glmnet::glmnet(x = missing_arg(), y = missing_arg(), weights = missing_arg(), ## alpha = double(1), family = \"multinomial\")"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_glmnet.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Multinomial regression via glmnet — details_multinom_reg_glmnet","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Predictors scale. One way achieve center scale predictor mean zero variance one. default, glmnet::glmnet() uses argument standardize = TRUE center scale data.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_glmnet.html","id":"examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Multinomial regression via glmnet — details_multinom_reg_glmnet","text":"“Fitting Predicting parsnip” article contains examples multinom_reg() \"glmnet\" engine.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_glmnet.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Multinomial regression via glmnet — details_multinom_reg_glmnet","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_glmnet.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Multinomial regression via glmnet — details_multinom_reg_glmnet","text":"model object contains data required make predictions. saving model purpose prediction, size saved object might substantially reduced using functions butcher package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_glmnet.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Multinomial regression via glmnet — details_multinom_reg_glmnet","text":"Hastie, T, R Tibshirani, M Wainwright. 2015. Statistical Learning Sparsity. CRC Press. Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_h2o.html","id":null,"dir":"Reference","previous_headings":"","what":"Multinomial regression via h2o — details_multinom_reg_h2o","title":"Multinomial regression via h2o — details_multinom_reg_h2o","text":"h2o::h2o.glm() fits model uses linear predictors predict multiclass data multinomial responses.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_h2o.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Multinomial regression via h2o — details_multinom_reg_h2o","text":"engine, single mode: classification","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_h2o.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Multinomial regression via h2o — details_multinom_reg_h2o","text":"model 2 tuning parameters: mixture: Proportion Lasso Penalty (type: double, default: see ) penalty: Amount Regularization (type: double, default: see ) default, given fixed penalty, h2o::h2o.glm() uses heuristic approach select optimal value penalty based training data. Setting engine parameter lambda_search TRUE enables efficient version grid search, see details https://docs.h2o.ai/h2o/latest-stable/h2o-docs/data-science/algo-params/lambda_search.html. choice mixture depends engine parameter solver, automatically chosen given training data specification model parameters. solver set 'L-BFGS', mixture defaults 0 (ridge regression) 0.5 otherwise.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_h2o.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Multinomial regression via h2o — details_multinom_reg_h2o","text":"agua::h2o_train_glm() multinom_reg() wrapper around h2o::h2o.glm() family = 'multinomial'.","code":"multinom_reg(penalty = double(1), mixture = double(1)) %>% set_engine(\"h2o\") %>% translate() ## Multinomial Regression Model Specification (classification) ## ## Main Arguments: ## penalty = double(1) ## mixture = double(1) ## ## Computational engine: h2o ## ## Model fit template: ## agua::h2o_train_glm(x = missing_arg(), y = missing_arg(), weights = missing_arg(), ## validation_frame = missing_arg(), lambda = double(1), alpha = double(1), ## family = \"multinomial\")"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_h2o.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Multinomial regression via h2o — details_multinom_reg_h2o","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Predictors scale. One way achieve center scale predictor mean zero variance one. default, h2o::h2o.glm() uses argument standardize = TRUE center scale data.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_h2o.html","id":"initializing-h-o","dir":"Reference","previous_headings":"","what":"Initializing h2o","title":"Multinomial regression via h2o — details_multinom_reg_h2o","text":"use h2o engine tidymodels, please run h2o::h2o.init() first. default, connects R local h2o server. needs done every new R session. can also connect remote h2o server IP address, details see h2o::h2o.init(). can control number threads thread pool used h2o nthreads argument. default, uses CPUs host. different usual parallel processing mechanism tidymodels tuning, tidymodels parallelizes resamples, h2o parallelizes hyperparameter combinations given resample. h2o automatically shut local h2o instance started R R terminated. manually stop h2o server, run h2o::h2o.shutdown().","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_keras.html","id":null,"dir":"Reference","previous_headings":"","what":"Multinomial regression via keras — details_multinom_reg_keras","title":"Multinomial regression via keras — details_multinom_reg_keras","text":"keras_mlp() fits model uses linear predictors predict multiclass data using multinomial distribution.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_keras.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Multinomial regression via keras — details_multinom_reg_keras","text":"engine, single mode: classification","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_keras.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Multinomial regression via keras — details_multinom_reg_keras","text":"model one tuning parameter: penalty: Amount Regularization (type: double, default: 0.0) penalty, amount regularization L2 penalty (.e., ridge weight decay).","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_keras.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Multinomial regression via keras — details_multinom_reg_keras","text":"keras_mlp() parsnip wrapper around keras code neural networks. model fits linear regression network single hidden unit.","code":"multinom_reg(penalty = double(1)) %>% set_engine(\"keras\") %>% translate() ## Multinomial Regression Model Specification (classification) ## ## Main Arguments: ## penalty = double(1) ## ## Computational engine: keras ## ## Model fit template: ## parsnip::keras_mlp(x = missing_arg(), y = missing_arg(), penalty = double(1), ## hidden_units = 1, act = \"linear\")"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_keras.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Multinomial regression via keras — details_multinom_reg_keras","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Predictors scale. One way achieve center scale predictor mean zero variance one.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_keras.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Multinomial regression via keras — details_multinom_reg_keras","text":"underlying model implementation allow case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_keras.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Multinomial regression via keras — details_multinom_reg_keras","text":"Models fitted engine may require native serialization methods properly saved /passed R sessions. learn preparing fitted models serialization, see bundle package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_keras.html","id":"examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Multinomial regression via keras — details_multinom_reg_keras","text":"“Fitting Predicting parsnip” article contains examples multinom_reg() \"keras\" engine.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_keras.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Multinomial regression via keras — details_multinom_reg_keras","text":"Hoerl, ., & Kennard, R. (2000). Ridge Regression: Biased Estimation Nonorthogonal Problems. Technometrics, 42(1), 80-86.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_nnet.html","id":null,"dir":"Reference","previous_headings":"","what":"Multinomial regression via nnet — details_multinom_reg_nnet","title":"Multinomial regression via nnet — details_multinom_reg_nnet","text":"nnet::multinom() fits model uses linear predictors predict multiclass data using multinomial distribution.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_nnet.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Multinomial regression via nnet — details_multinom_reg_nnet","text":"engine, single mode: classification","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_nnet.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Multinomial regression via nnet — details_multinom_reg_nnet","text":"model 1 tuning parameters: penalty: Amount Regularization (type: double, default: 0.0) penalty, amount regularization includes L2 penalty (.e., ridge weight decay).","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_nnet.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Multinomial regression via nnet — details_multinom_reg_nnet","text":"","code":"multinom_reg(penalty = double(1)) %>% set_engine(\"nnet\") %>% translate() ## Multinomial Regression Model Specification (classification) ## ## Main Arguments: ## penalty = double(1) ## ## Computational engine: nnet ## ## Model fit template: ## nnet::multinom(formula = missing_arg(), data = missing_arg(), ## decay = double(1), trace = FALSE)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_nnet.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Multinomial regression via nnet — details_multinom_reg_nnet","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Predictors scale. One way achieve center scale predictor mean zero variance one.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_nnet.html","id":"examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Multinomial regression via nnet — details_multinom_reg_nnet","text":"“Fitting Predicting parsnip” article contains examples multinom_reg() \"nnet\" engine.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_nnet.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Multinomial regression via nnet — details_multinom_reg_nnet","text":"underlying model implementation allow case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_nnet.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Multinomial regression via nnet — details_multinom_reg_nnet","text":"model object contains data required make predictions. saving model purpose prediction, size saved object might substantially reduced using functions butcher package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_nnet.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Multinomial regression via nnet — details_multinom_reg_nnet","text":"Luraschi, J, K Kuo, E Ruiz. 2019. Mastering nnet R. O’Reilly Media Hastie, T, R Tibshirani, M Wainwright. 2015. Statistical Learning Sparsity. CRC Press. Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_spark.html","id":null,"dir":"Reference","previous_headings":"","what":"Multinomial regression via spark — details_multinom_reg_spark","title":"Multinomial regression via spark — details_multinom_reg_spark","text":"sparklyr::ml_logistic_regression() fits model uses linear predictors predict multiclass data using multinomial distribution.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_spark.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Multinomial regression via spark — details_multinom_reg_spark","text":"engine, single mode: classification","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_spark.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Multinomial regression via spark — details_multinom_reg_spark","text":"model 2 tuning parameters: penalty: Amount Regularization (type: double, default: 0.0) mixture: Proportion Lasso Penalty (type: double, default: 0.0) penalty, amount regularization includes L1 penalty (.e., lasso) L2 penalty (.e., ridge weight decay). mixture: mixture = 1 specifies pure lasso model, mixture = 0 specifies ridge regression model, 0 < mixture < 1 specifies elastic net model, interpolating lasso ridge.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_spark.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Multinomial regression via spark — details_multinom_reg_spark","text":"","code":"multinom_reg(penalty = double(1), mixture = double(1)) %>% set_engine(\"spark\") %>% translate() ## Multinomial Regression Model Specification (classification) ## ## Main Arguments: ## penalty = double(1) ## mixture = double(1) ## ## Computational engine: spark ## ## Model fit template: ## sparklyr::ml_logistic_regression(x = missing_arg(), formula = missing_arg(), ## weights = missing_arg(), reg_param = double(1), elastic_net_param = double(1), ## family = \"multinomial\")"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_spark.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Multinomial regression via spark — details_multinom_reg_spark","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Predictors scale. One way achieve center scale predictor mean zero variance one. default, ml_multinom_regression() uses argument standardization = TRUE center scale data.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_spark.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Multinomial regression via spark — details_multinom_reg_spark","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights. Note , spark engines, case_weight argument value character string specify column numeric case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_spark.html","id":"other-details","dir":"Reference","previous_headings":"","what":"Other details","title":"Multinomial regression via spark — details_multinom_reg_spark","text":"models created using \"spark\" engine, several things consider. formula interface via fit() available; using fit_xy() generate error. predictions always Spark table format. names documented without dots. equivalent factor columns Spark tables class predictions returned character columns. retain model object new R session (via save()), model$fit element parsnip object serialized via ml_save(object$fit) separately saved disk. new session, object can reloaded reattached parsnip object.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_multinom_reg_spark.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Multinomial regression via spark — details_multinom_reg_spark","text":"Luraschi, J, K Kuo, E Ruiz. 2019. Mastering Spark R. O’Reilly Media Hastie, T, R Tibshirani, M Wainwright. 2015. Statistical Learning Sparsity. CRC Press. Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_naive_Bayes_h2o.html","id":null,"dir":"Reference","previous_headings":"","what":"Naive Bayes models via naivebayes — details_naive_Bayes_h2o","title":"Naive Bayes models via naivebayes — details_naive_Bayes_h2o","text":"h2o::h2o.naiveBayes() fits model uses Bayes' theorem compute probability class, given predictor values.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_naive_Bayes_h2o.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Naive Bayes models via naivebayes — details_naive_Bayes_h2o","text":"engine, single mode: classification","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_naive_Bayes_h2o.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Naive Bayes models via naivebayes — details_naive_Bayes_h2o","text":"model 1 tuning parameter: Laplace: Laplace Correction (type: double, default: 0.0) h2o::h2o.naiveBayes() provides several engine arguments deal imbalances rare classes: balance_classes logical value controlling /-sampling (imbalanced data). Defaults FALSE. class_sampling_factors /-sampling ratios per class (lexicographic order). specified, sampling factors automatically computed obtain class balance training. Require balance_classes TRUE. min_sdev: minimum standard deviation use observations without enough data, must greater 1e-10. min_prob: minimum probability use observations enough data.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_naive_Bayes_h2o.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Naive Bayes models via naivebayes — details_naive_Bayes_h2o","text":"agua extension package required fit model. agua::h2o_train_nb() wrapper around h2o::h2o.naiveBayes().","code":"naive_Bayes(Laplace = numeric(0)) %>% set_engine(\"h2o\") %>% translate() ## Naive Bayes Model Specification (classification) ## ## Main Arguments: ## Laplace = numeric(0) ## ## Computational engine: h2o ## ## Model fit template: ## agua::h2o_train_nb(x = missing_arg(), y = missing_arg(), weights = missing_arg(), ## validation_frame = missing_arg(), laplace = numeric(0))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_naive_Bayes_h2o.html","id":"initializing-h-o","dir":"Reference","previous_headings":"","what":"Initializing h2o","title":"Naive Bayes models via naivebayes — details_naive_Bayes_h2o","text":"use h2o engine tidymodels, please run h2o::h2o.init() first. default, connects R local h2o server. needs done every new R session. can also connect remote h2o server IP address, details see h2o::h2o.init(). can control number threads thread pool used h2o nthreads argument. default, uses CPUs host. different usual parallel processing mechanism tidymodels tuning, tidymodels parallelizes resamples, h2o parallelizes hyperparameter combinations given resample. h2o automatically shut local h2o instance started R R terminated. manually stop h2o server, run h2o::h2o.shutdown().","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_naive_Bayes_h2o.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Naive Bayes models via naivebayes — details_naive_Bayes_h2o","text":"Models fitted engine may require native serialization methods properly saved /passed R sessions. learn preparing fitted models serialization, see bundle package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_naive_Bayes_klaR.html","id":null,"dir":"Reference","previous_headings":"","what":"Naive Bayes models via klaR — details_naive_Bayes_klaR","title":"Naive Bayes models via klaR — details_naive_Bayes_klaR","text":"klaR::NaiveBayes() fits model uses Bayes' theorem compute probability class, given predictor values.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_naive_Bayes_klaR.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Naive Bayes models via klaR — details_naive_Bayes_klaR","text":"engine, single mode: classification","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_naive_Bayes_klaR.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Naive Bayes models via klaR — details_naive_Bayes_klaR","text":"model 2 tuning parameter: smoothness: Kernel Smoothness (type: double, default: 1.0) Laplace: Laplace Correction (type: double, default: 0.0) Note engine argument usekernel set TRUE default using klaR engine.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_naive_Bayes_klaR.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Naive Bayes models via klaR — details_naive_Bayes_klaR","text":"discrim extension package required fit model.","code":"library(discrim) naive_Bayes(smoothness = numeric(0), Laplace = numeric(0)) %>% set_engine(\"klaR\") %>% translate() ## Naive Bayes Model Specification (classification) ## ## Main Arguments: ## smoothness = numeric(0) ## Laplace = numeric(0) ## ## Computational engine: klaR ## ## Model fit template: ## discrim::klar_bayes_wrapper(x = missing_arg(), y = missing_arg(), ## adjust = numeric(0), fL = numeric(0), usekernel = TRUE)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_naive_Bayes_klaR.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Naive Bayes models via klaR — details_naive_Bayes_klaR","text":"columns qualitative predictors always represented factors (opposed dummy/indicator variables). predictors factors, underlying code treats multinomial data appropriately computes conditional distributions. Variance calculations used computations zero-variance predictors (.e., single unique value) eliminated fitting model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_naive_Bayes_klaR.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Naive Bayes models via klaR — details_naive_Bayes_klaR","text":"underlying model implementation allow case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_naive_Bayes_klaR.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Naive Bayes models via klaR — details_naive_Bayes_klaR","text":"Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_naive_Bayes_naivebayes.html","id":null,"dir":"Reference","previous_headings":"","what":"Naive Bayes models via naivebayes — details_naive_Bayes_naivebayes","title":"Naive Bayes models via naivebayes — details_naive_Bayes_naivebayes","text":"naivebayes::naive_bayes() fits model uses Bayes' theorem compute probability class, given predictor values.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_naive_Bayes_naivebayes.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Naive Bayes models via naivebayes — details_naive_Bayes_naivebayes","text":"engine, single mode: classification","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_naive_Bayes_naivebayes.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Naive Bayes models via naivebayes — details_naive_Bayes_naivebayes","text":"model 2 tuning parameter: smoothness: Kernel Smoothness (type: double, default: 1.0) Laplace: Laplace Correction (type: double, default: 0.0) Note engine argument usekernel set TRUE default using naivebayes engine.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_naive_Bayes_naivebayes.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Naive Bayes models via naivebayes — details_naive_Bayes_naivebayes","text":"discrim extension package required fit model.","code":"library(discrim) naive_Bayes(smoothness = numeric(0), Laplace = numeric(0)) %>% set_engine(\"naivebayes\") %>% translate() ## Naive Bayes Model Specification (classification) ## ## Main Arguments: ## smoothness = numeric(0) ## Laplace = numeric(0) ## ## Computational engine: naivebayes ## ## Model fit template: ## naivebayes::naive_bayes(x = missing_arg(), y = missing_arg(), ## adjust = numeric(0), laplace = numeric(0), usekernel = TRUE)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_naive_Bayes_naivebayes.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Naive Bayes models via naivebayes — details_naive_Bayes_naivebayes","text":"columns qualitative predictors always represented factors (opposed dummy/indicator variables). predictors factors, underlying code treats multinomial data appropriately computes conditional distributions. count data, integers can estimated using Poisson distribution argument usepoisson = TRUE passed engine argument. Variance calculations used computations zero-variance predictors (.e., single unique value) eliminated fitting model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_naive_Bayes_naivebayes.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Naive Bayes models via naivebayes — details_naive_Bayes_naivebayes","text":"underlying model implementation allow case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_naive_Bayes_naivebayes.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Naive Bayes models via naivebayes — details_naive_Bayes_naivebayes","text":"Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_nearest_neighbor_kknn.html","id":null,"dir":"Reference","previous_headings":"","what":"K-nearest neighbors via kknn — details_nearest_neighbor_kknn","title":"K-nearest neighbors via kknn — details_nearest_neighbor_kknn","text":"kknn::train.kknn() fits model uses K similar data points training set predict new samples.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_nearest_neighbor_kknn.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"K-nearest neighbors via kknn — details_nearest_neighbor_kknn","text":"engine, multiple modes: classification regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_nearest_neighbor_kknn.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"K-nearest neighbors via kknn — details_nearest_neighbor_kknn","text":"model 3 tuning parameters: neighbors: # Nearest Neighbors (type: integer, default: 5L) weight_func: Distance Weighting Function (type: character, default: ‘optimal’) dist_power: Minkowski Distance Order (type: double, default: 2.0)","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_nearest_neighbor_kknn.html","id":"translation-from-parsnip-to-the-original-package-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (regression)","title":"K-nearest neighbors via kknn — details_nearest_neighbor_kknn","text":"min_rows() adjust number neighbors chosen value consistent actual data dimensions.","code":"nearest_neighbor( neighbors = integer(1), weight_func = character(1), dist_power = double(1) ) %>% set_engine(\"kknn\") %>% set_mode(\"regression\") %>% translate() ## K-Nearest Neighbor Model Specification (regression) ## ## Main Arguments: ## neighbors = integer(1) ## weight_func = character(1) ## dist_power = double(1) ## ## Computational engine: kknn ## ## Model fit template: ## kknn::train.kknn(formula = missing_arg(), data = missing_arg(), ## ks = min_rows(0L, data, 5), kernel = character(1), distance = double(1))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_nearest_neighbor_kknn.html","id":"translation-from-parsnip-to-the-original-package-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (classification)","title":"K-nearest neighbors via kknn — details_nearest_neighbor_kknn","text":"","code":"nearest_neighbor( neighbors = integer(1), weight_func = character(1), dist_power = double(1) ) %>% set_engine(\"kknn\") %>% set_mode(\"classification\") %>% translate() ## K-Nearest Neighbor Model Specification (classification) ## ## Main Arguments: ## neighbors = integer(1) ## weight_func = character(1) ## dist_power = double(1) ## ## Computational engine: kknn ## ## Model fit template: ## kknn::train.kknn(formula = missing_arg(), data = missing_arg(), ## ks = min_rows(0L, data, 5), kernel = character(1), distance = double(1))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_nearest_neighbor_kknn.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"K-nearest neighbors via kknn — details_nearest_neighbor_kknn","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Predictors scale. One way achieve center scale predictor mean zero variance one.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_nearest_neighbor_kknn.html","id":"examples","dir":"Reference","previous_headings":"","what":"Examples","title":"K-nearest neighbors via kknn — details_nearest_neighbor_kknn","text":"“Fitting Predicting parsnip” article contains examples nearest_neighbor() \"kknn\" engine.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_nearest_neighbor_kknn.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"K-nearest neighbors via kknn — details_nearest_neighbor_kknn","text":"underlying model implementation allow case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_nearest_neighbor_kknn.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"K-nearest neighbors via kknn — details_nearest_neighbor_kknn","text":"model object contains data required make predictions. saving model purpose prediction, size saved object might substantially reduced using functions butcher package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_nearest_neighbor_kknn.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"K-nearest neighbors via kknn — details_nearest_neighbor_kknn","text":"Hechenbichler K. Schliep K.P. (2004) Weighted k-Nearest-Neighbor Techniques Ordinal Classification, Discussion Paper 399, SFB 386, Ludwig-Maximilians University Munich Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_pls_mixOmics.html","id":null,"dir":"Reference","previous_headings":"","what":"Partial least squares via mixOmics — details_pls_mixOmics","title":"Partial least squares via mixOmics — details_pls_mixOmics","text":"mixOmics package can fit several different types PLS models.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_pls_mixOmics.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Partial least squares via mixOmics — details_pls_mixOmics","text":"engine, multiple modes: classification regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_pls_mixOmics.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Partial least squares via mixOmics — details_pls_mixOmics","text":"model 2 tuning parameters: predictor_prop: Proportion Predictors (type: double, default: see ) num_comp: # Components (type: integer, default: 2L)","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_pls_mixOmics.html","id":"translation-from-parsnip-to-the-underlying-model-call-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the underlying model call (regression)","title":"Partial least squares via mixOmics — details_pls_mixOmics","text":"plsmod extension package required fit model. plsmod::pls_fit() function : Determines number predictors data. Adjusts num_comp value larger number factors. Determines whether sparsity required based value predictor_prop. Sets keepX argument mixOmics::spls() sparse models.","code":"library(plsmod) pls(num_comp = integer(1), predictor_prop = double(1)) %>% set_engine(\"mixOmics\") %>% set_mode(\"regression\") %>% translate() ## PLS Model Specification (regression) ## ## Main Arguments: ## predictor_prop = double(1) ## num_comp = integer(1) ## ## Computational engine: mixOmics ## ## Model fit template: ## plsmod::pls_fit(x = missing_arg(), y = missing_arg(), predictor_prop = double(1), ## ncomp = integer(1))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_pls_mixOmics.html","id":"translation-from-parsnip-to-the-underlying-model-call-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the underlying model call (classification)","title":"Partial least squares via mixOmics — details_pls_mixOmics","text":"plsmod extension package required fit model. case, plsmod::pls_fit() role eventually targets mixOmics::plsda() mixOmics::splsda().","code":"library(plsmod) pls(num_comp = integer(1), predictor_prop = double(1)) %>% set_engine(\"mixOmics\") %>% set_mode(\"classification\") %>% translate() ## PLS Model Specification (classification) ## ## Main Arguments: ## predictor_prop = double(1) ## num_comp = integer(1) ## ## Computational engine: mixOmics ## ## Model fit template: ## plsmod::pls_fit(x = missing_arg(), y = missing_arg(), predictor_prop = double(1), ## ncomp = integer(1))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_pls_mixOmics.html","id":"installing-mixomics","dir":"Reference","previous_headings":"","what":"Installing mixOmics","title":"Partial least squares via mixOmics — details_pls_mixOmics","text":"package available via Bioconductor repository accessible via CRAN. can install using:","code":"if (!require(\"remotes\", quietly = TRUE)) { install.packages(\"remotes\") } remotes::install_bioc(\"mixOmics\")"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_pls_mixOmics.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Partial least squares via mixOmics — details_pls_mixOmics","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Variance calculations used computations zero-variance predictors (.e., single unique value) eliminated fitting model. Predictors scale. One way achieve center scale predictor mean zero variance one.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_pls_mixOmics.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Partial least squares via mixOmics — details_pls_mixOmics","text":"underlying model implementation allow case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_pls_mixOmics.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Partial least squares via mixOmics — details_pls_mixOmics","text":"Rohart F Gautier B Singh Le Cao K-(2017). “mixOmics: R package ’omics feature selection multiple data integration.” PLoS computational biology, 13(11), e1005752.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_gee.html","id":null,"dir":"Reference","previous_headings":"","what":"Poisson regression via generalized estimating equations (GEE) — details_poisson_reg_gee","title":"Poisson regression via generalized estimating equations (GEE) — details_poisson_reg_gee","text":"gee::gee() uses generalized least squares fit different types models errors independent.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_gee.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Poisson regression via generalized estimating equations (GEE) — details_poisson_reg_gee","text":"engine, single mode: regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_gee.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Poisson regression via generalized estimating equations (GEE) — details_poisson_reg_gee","text":"model formal tuning parameters. may beneficial determine appropriate correlation structure use, typically affect predicted value model. effect inferential results parameter covariance values.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_gee.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Poisson regression via generalized estimating equations (GEE) — details_poisson_reg_gee","text":"multilevelmod extension package required fit model. multilevelmod::gee_fit() wrapper model around gee().","code":"library(multilevelmod) poisson_reg(engine = \"gee\") %>% set_engine(\"gee\") %>% translate() ## Poisson Regression Model Specification (regression) ## ## Computational engine: gee ## ## Model fit template: ## multilevelmod::gee_fit(formula = missing_arg(), data = missing_arg(), ## family = stats::poisson)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_gee.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Poisson regression via generalized estimating equations (GEE) — details_poisson_reg_gee","text":"specific preprocessing needs. However, helpful keep clustering/subject identifier column factor character (instead making dummy variables). See examples next section.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_gee.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Poisson regression via generalized estimating equations (GEE) — details_poisson_reg_gee","text":"underlying model implementation allow case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_gee.html","id":"other-details","dir":"Reference","previous_headings":"","what":"Other details","title":"Poisson regression via generalized estimating equations (GEE) — details_poisson_reg_gee","text":"gee:gee() gee:geepack() specify id/cluster variable using argument id requires vector. parsnip doesn’t work way enable model fit using artificial function id_var() used formula. , original package, call look like: parsnip, suggest using formula method fitting: using tidymodels infrastructure, may better use workflow. case, can add appropriate columns using add_variables() supply GEE formula adding model: gee::gee() function always prints warnings output even silent = TRUE. parsnip \"gee\" engine, contrast, silences console output coming gee::gee(), even silent = FALSE. Also, issues gee() function, supplementary call glm() needed get rank QR decomposition objects predict() can used.","code":"gee(breaks ~ tension, id = wool, data = warpbreaks, corstr = \"exchangeable\") library(tidymodels) poisson_reg() %>% set_engine(\"gee\", corstr = \"exchangeable\") %>% fit(y ~ time + x + id_var(subject), data = longitudinal_counts) library(tidymodels) gee_spec <- poisson_reg() %>% set_engine(\"gee\", corstr = \"exchangeable\") gee_wflow <- workflow() %>% # The data are included as-is using: add_variables(outcomes = y, predictors = c(time, x, subject)) %>% add_model(gee_spec, formula = y ~ time + x + id_var(subject)) fit(gee_wflow, data = longitudinal_counts)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_gee.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Poisson regression via generalized estimating equations (GEE) — details_poisson_reg_gee","text":"Liang, K.Y. Zeger, S.L. (1986) Longitudinal data analysis using generalized linear models. Biometrika, 73 13–22. Zeger, S.L. Liang, K.Y. (1986) Longitudinal data analysis discrete continuous outcomes. Biometrics, 42 121–130.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_glm.html","id":null,"dir":"Reference","previous_headings":"","what":"Poisson regression via glm — details_poisson_reg_glm","title":"Poisson regression via glm — details_poisson_reg_glm","text":"stats::glm() uses maximum likelihood fit model count data.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_glm.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Poisson regression via glm — details_poisson_reg_glm","text":"engine, single mode: regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_glm.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Poisson regression via glm — details_poisson_reg_glm","text":"engine tuning parameters.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_glm.html","id":"translation-from-parsnip-to-the-underlying-model-call-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the underlying model call (regression)","title":"Poisson regression via glm — details_poisson_reg_glm","text":"poissonreg extension package required fit model.","code":"library(poissonreg) poisson_reg() %>% set_engine(\"glm\") %>% translate() ## Poisson Regression Model Specification (regression) ## ## Computational engine: glm ## ## Model fit template: ## stats::glm(formula = missing_arg(), data = missing_arg(), weights = missing_arg(), ## family = stats::poisson)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_glm.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Poisson regression via glm — details_poisson_reg_glm","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_glm.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Poisson regression via glm — details_poisson_reg_glm","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_glm.html","id":"case-weights-1","dir":"Reference","previous_headings":"","what":"Case weights","title":"Poisson regression via glm — details_poisson_reg_glm","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights. However, documentation stats::glm() assumes specific type case weights used:“Non-NULL weights can used indicate different observations different dispersions (values weights inversely proportional dispersions); equivalently, elements weights positive integers w_i, response y_i mean w_i unit-weight observations. binomial GLM prior weights used give number trials response proportion successes: rarely used Poisson GLM.” frequency weights used application, glm_grouped() model (corresponding engine) may appropriate.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_glm.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Poisson regression via glm — details_poisson_reg_glm","text":"model object contains data required make predictions. saving model purpose prediction, size saved object might substantially reduced using functions butcher package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_glmer.html","id":null,"dir":"Reference","previous_headings":"","what":"Poisson regression via mixed models — details_poisson_reg_glmer","title":"Poisson regression via mixed models — details_poisson_reg_glmer","text":"\"glmer\" engine estimates fixed random effect regression parameters using maximum likelihood (restricted maximum likelihood) estimation.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_glmer.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Poisson regression via mixed models — details_poisson_reg_glmer","text":"engine, single mode: regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_glmer.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Poisson regression via mixed models — details_poisson_reg_glmer","text":"model tuning parameters.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_glmer.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Poisson regression via mixed models — details_poisson_reg_glmer","text":"multilevelmod extension package required fit model.","code":"library(multilevelmod) poisson_reg(engine = \"glmer\") %>% set_engine(\"glmer\") %>% translate() ## Poisson Regression Model Specification (regression) ## ## Computational engine: glmer ## ## Model fit template: ## lme4::glmer(formula = missing_arg(), data = missing_arg(), weights = missing_arg(), ## family = stats::poisson)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_glmer.html","id":"predicting-new-samples","dir":"Reference","previous_headings":"","what":"Predicting new samples","title":"Poisson regression via mixed models — details_poisson_reg_glmer","text":"model can use subject-specific coefficient estimates make predictions (.e. partial pooling). example, equation shows linear predictor (\\eta) random intercept: $$ denotes ith independent experimental unit (e.g. subject). model seen subject , can use subject’s data adjust population intercept specific subjects results. happens data predicted subject used model fit? case, package uses population parameter estimates prediction: Depending covariates model, might effect making prediction new samples. population parameters “best estimate” subject included model fit. tidymodels framework deliberately constrains predictions new data use training set data (prevent information leakage).","code":"\\eta_{i} = (\\beta_0 + b_{0i}) + \\beta_1x_{i1} \\hat{\\eta}_{i'} = \\hat{\\beta}_0+ \\hat{\\beta}x_{i'1}"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_glmer.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Poisson regression via mixed models — details_poisson_reg_glmer","text":"specific preprocessing needs. However, helpful keep clustering/subject identifier column factor character (instead making dummy variables). See examples next section.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_glmer.html","id":"other-details","dir":"Reference","previous_headings":"","what":"Other details","title":"Poisson regression via mixed models — details_poisson_reg_glmer","text":"model can accept case weights. parsnip, suggest using formula method fitting: using tidymodels infrastructure, may better use workflow. case, can add appropriate columns using add_variables() supply typical formula adding model:","code":"library(tidymodels) poisson_reg() %>% set_engine(\"glmer\") %>% fit(y ~ time + x + (1 | subject), data = longitudinal_counts) library(tidymodels) glmer_spec <- poisson_reg() %>% set_engine(\"glmer\") glmer_wflow <- workflow() %>% # The data are included as-is using: add_variables(outcomes = y, predictors = c(time, x, subject)) %>% add_model(glmer_spec, formula = y ~ time + x + (1 | subject)) fit(glmer_wflow, data = longitudinal_counts)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_glmer.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Poisson regression via mixed models — details_poisson_reg_glmer","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_glmer.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Poisson regression via mixed models — details_poisson_reg_glmer","text":"J Pinheiro, D Bates. 2000. Mixed-effects models S S-PLUS. Springer, New York, NY West, K, Band Welch, Galecki. 2014. Linear Mixed Models: Practical Guide Using Statistical Software. CRC Press. Thorson, J, Minto, C. 2015, Mixed effects: unifying framework statistical modelling fisheries biology. ICES Journal Marine Science, Volume 72, Issue 5, Pages 1245–1256. Harrison, XA, Donaldson, L, Correa-Cano, , Evans, J, Fisher, DN, Goodwin, CED, Robinson, BS, Hodgson, DJ, Inger, R. 2018. brief introduction mixed effects modelling multi-model inference ecology. PeerJ 6:e4794. DeBruine LM, Barr DJ. Understanding Mixed-Effects Models Data Simulation. 2021. Advances Methods Practices Psychological Science.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_glmnet.html","id":null,"dir":"Reference","previous_headings":"","what":"Poisson regression via glmnet — details_poisson_reg_glmnet","title":"Poisson regression via glmnet — details_poisson_reg_glmnet","text":"glmnet::glmnet() uses penalized maximum likelihood fit model count data.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_glmnet.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Poisson regression via glmnet — details_poisson_reg_glmnet","text":"engine, single mode: regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_glmnet.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Poisson regression via glmnet — details_poisson_reg_glmnet","text":"model 2 tuning parameters: penalty: Amount Regularization (type: double, default: see ) mixture: Proportion Lasso Penalty (type: double, default: 1.0) penalty parameter default requires single numeric value. details , glmnet model general, see glmnet-details. mixture: mixture = 1 specifies pure lasso model, mixture = 0 specifies ridge regression model, 0 < mixture < 1 specifies elastic net model, interpolating lasso ridge.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_glmnet.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Poisson regression via glmnet — details_poisson_reg_glmnet","text":"poissonreg extension package required fit model.","code":"library(poissonreg) poisson_reg(penalty = double(1), mixture = double(1)) %>% set_engine(\"glmnet\") %>% translate() ## Poisson Regression Model Specification (regression) ## ## Main Arguments: ## penalty = 0 ## mixture = double(1) ## ## Computational engine: glmnet ## ## Model fit template: ## glmnet::glmnet(x = missing_arg(), y = missing_arg(), weights = missing_arg(), ## alpha = double(1), family = \"poisson\")"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_glmnet.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Poisson regression via glmnet — details_poisson_reg_glmnet","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Predictors scale. One way achieve center scale predictor mean zero variance one. default, glmnet::glmnet() uses argument standardize = TRUE center scale data.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_glmnet.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Poisson regression via glmnet — details_poisson_reg_glmnet","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_glmnet.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Poisson regression via glmnet — details_poisson_reg_glmnet","text":"model object contains data required make predictions. saving model purpose prediction, size saved object might substantially reduced using functions butcher package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_h2o.html","id":null,"dir":"Reference","previous_headings":"","what":"Poisson regression via h2o — details_poisson_reg_h2o","title":"Poisson regression via h2o — details_poisson_reg_h2o","text":"h2o::h2o.glm() uses penalized maximum likelihood fit model count data.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_h2o.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Poisson regression via h2o — details_poisson_reg_h2o","text":"engine, single mode: regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_h2o.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Poisson regression via h2o — details_poisson_reg_h2o","text":"model 2 tuning parameters: mixture: Proportion Lasso Penalty (type: double, default: see ) penalty: Amount Regularization (type: double, default: see ) default, given fixed penalty, h2o::h2o.glm() uses heuristic approach select optimal value penalty based training data. Setting engine parameter lambda_search TRUE enables efficient version grid search, see details https://docs.h2o.ai/h2o/latest-stable/h2o-docs/data-science/algo-params/lambda_search.html. choice mixture depends engine parameter solver, automatically chosen given training data specification model parameters. solver set 'L-BFGS', mixture defaults 0 (ridge regression) 0.5 otherwise.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_h2o.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Poisson regression via h2o — details_poisson_reg_h2o","text":"agua::h2o_train_glm() poisson_reg() wrapper around h2o::h2o.glm() family = 'poisson'. agua extension package required fit model.","code":"library(poissonreg) poisson_reg(penalty = double(1), mixture = double(1)) %>% set_engine(\"h2o\") %>% translate() ## Poisson Regression Model Specification (regression) ## ## Main Arguments: ## penalty = double(1) ## mixture = double(1) ## ## Computational engine: h2o ## ## Model fit template: ## agua::h2o_train_glm(x = missing_arg(), y = missing_arg(), weights = missing_arg(), ## validation_frame = missing_arg(), lambda = double(1), alpha = double(1), ## family = \"poisson\")"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_h2o.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Poisson regression via h2o — details_poisson_reg_h2o","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Predictors scale. One way achieve center scale predictor mean zero variance one. default, h2o::h2o.glm() uses argument standardize = TRUE center scale numerical columns.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_h2o.html","id":"initializing-h-o","dir":"Reference","previous_headings":"","what":"Initializing h2o","title":"Poisson regression via h2o — details_poisson_reg_h2o","text":"use h2o engine tidymodels, please run h2o::h2o.init() first. default, connects R local h2o server. needs done every new R session. can also connect remote h2o server IP address, details see h2o::h2o.init(). can control number threads thread pool used h2o nthreads argument. default, uses CPUs host. different usual parallel processing mechanism tidymodels tuning, tidymodels parallelizes resamples, h2o parallelizes hyperparameter combinations given resample. h2o automatically shut local h2o instance started R R terminated. manually stop h2o server, run h2o::h2o.shutdown().","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_h2o.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Poisson regression via h2o — details_poisson_reg_h2o","text":"Models fitted engine may require native serialization methods properly saved /passed R sessions. learn preparing fitted models serialization, see bundle package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_hurdle.html","id":null,"dir":"Reference","previous_headings":"","what":"Poisson regression via pscl — details_poisson_reg_hurdle","title":"Poisson regression via pscl — details_poisson_reg_hurdle","text":"pscl::hurdle() uses maximum likelihood estimation fit model count data separate model terms predicting counts predicting probability zero count.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_hurdle.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Poisson regression via pscl — details_poisson_reg_hurdle","text":"engine, single mode: regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_hurdle.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Poisson regression via pscl — details_poisson_reg_hurdle","text":"engine tuning parameters.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_hurdle.html","id":"translation-from-parsnip-to-the-underlying-model-call-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the underlying model call (regression)","title":"Poisson regression via pscl — details_poisson_reg_hurdle","text":"poissonreg extension package required fit model.","code":"library(poissonreg) poisson_reg() %>% set_engine(\"hurdle\") %>% translate() ## Poisson Regression Model Specification (regression) ## ## Computational engine: hurdle ## ## Model fit template: ## pscl::hurdle(formula = missing_arg(), data = missing_arg(), weights = missing_arg())"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_hurdle.html","id":"preprocessing-and-special-formulas-for-zero-inflated-poisson-models","dir":"Reference","previous_headings":"","what":"Preprocessing and special formulas for zero-inflated Poisson models","title":"Poisson regression via pscl — details_poisson_reg_hurdle","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_hurdle.html","id":"specifying-the-statistical-model-details","dir":"Reference","previous_headings":"","what":"Specifying the statistical model details","title":"Poisson regression via pscl — details_poisson_reg_hurdle","text":"particular model, special formula used specify columns affect counts affect model probability zero counts. sets terms separated bar. example, y ~ x | z. type formula used base R infrastructure (e.g. model.matrix()) fitting parsnip model engine directly, formula method required formula just passed . example: However, using workflow, best approach avoid using workflows::add_formula() use workflows::add_variables() conjunction model formula: reason workflows::add_formula() try create model matrix either fail create dummy variables prematurely.","code":"library(tidymodels) tidymodels_prefer() data(\"bioChemists\", package = \"pscl\") poisson_reg() %>% set_engine(\"hurdle\") %>% fit(art ~ fem + mar | ment, data = bioChemists) ## parsnip model object ## ## ## Call: ## pscl::hurdle(formula = art ~ fem + mar | ment, data = data) ## ## Count model coefficients (truncated poisson with log link): ## (Intercept) femWomen marMarried ## 0.847598 -0.237351 0.008846 ## ## Zero hurdle model coefficients (binomial with logit link): ## (Intercept) ment ## 0.24871 0.08092 data(\"bioChemists\", package = \"pscl\") spec <- poisson_reg() %>% set_engine(\"hurdle\") workflow() %>% add_variables(outcomes = c(art), predictors = c(fem, mar, ment)) %>% add_model(spec, formula = art ~ fem + mar | ment) %>% fit(data = bioChemists) %>% extract_fit_engine() ## ## Call: ## pscl::hurdle(formula = art ~ fem + mar | ment, data = data) ## ## Count model coefficients (truncated poisson with log link): ## (Intercept) femWomen marMarried ## 0.847598 -0.237351 0.008846 ## ## Zero hurdle model coefficients (binomial with logit link): ## (Intercept) ment ## 0.24871 0.08092"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_hurdle.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Poisson regression via pscl — details_poisson_reg_hurdle","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_stan.html","id":null,"dir":"Reference","previous_headings":"","what":"Poisson regression via stan — details_poisson_reg_stan","title":"Poisson regression via stan — details_poisson_reg_stan","text":"rstanarm::stan_glm() uses Bayesian estimation fit model count data.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_stan.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Poisson regression via stan — details_poisson_reg_stan","text":"engine, single mode: regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_stan.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Poisson regression via stan — details_poisson_reg_stan","text":"engine tuning parameters.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_stan.html","id":"important-engine-specific-options","dir":"Reference","previous_headings":"","what":"Important engine-specific options","title":"Poisson regression via stan — details_poisson_reg_stan","text":"relevant arguments can passed set_engine(): chains: positive integer specifying number Markov chains. default 4. iter: positive integer specifying number iterations chain (including warmup). default 2000. seed: seed random number generation. cores: Number cores use executing chains parallel. prior: prior distribution (non-hierarchical) regression coefficients. \"stan\" engine fit hierarchical terms. prior_intercept: prior distribution intercept (centering predictors). See rstan::sampling() rstanarm::priors() information options.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_stan.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Poisson regression via stan — details_poisson_reg_stan","text":"poissonreg extension package required fit model. Note refresh default prevents logging estimation process. Change value set_engine() show MCMC logs.","code":"library(poissonreg) poisson_reg() %>% set_engine(\"stan\") %>% translate() ## Poisson Regression Model Specification (regression) ## ## Computational engine: stan ## ## Model fit template: ## rstanarm::stan_glm(formula = missing_arg(), data = missing_arg(), ## weights = missing_arg(), family = stats::poisson)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_stan.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Poisson regression via stan — details_poisson_reg_stan","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_stan.html","id":"other-details","dir":"Reference","previous_headings":"","what":"Other details","title":"Poisson regression via stan — details_poisson_reg_stan","text":"prediction, \"stan\" engine can compute posterior intervals analogous confidence prediction intervals. instances, units original outcome. std_error = TRUE, standard deviation posterior distribution (posterior predictive distribution appropriate) returned.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_stan.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Poisson regression via stan — details_poisson_reg_stan","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_stan.html","id":"examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Poisson regression via stan — details_poisson_reg_stan","text":"“Fitting Predicting parsnip” article contains examples poisson_reg() \"stan\" engine.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_stan.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Poisson regression via stan — details_poisson_reg_stan","text":"McElreath, R. 2020 Statistical Rethinking. CRC Press.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_stan_glmer.html","id":null,"dir":"Reference","previous_headings":"","what":"Poisson regression via hierarchical Bayesian methods — details_poisson_reg_stan_glmer","title":"Poisson regression via hierarchical Bayesian methods — details_poisson_reg_stan_glmer","text":"\"stan_glmer\" engine estimates hierarchical regression parameters using Bayesian estimation.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_stan_glmer.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Poisson regression via hierarchical Bayesian methods — details_poisson_reg_stan_glmer","text":"engine, single mode: regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_stan_glmer.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Poisson regression via hierarchical Bayesian methods — details_poisson_reg_stan_glmer","text":"model tuning parameters.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_stan_glmer.html","id":"important-engine-specific-options","dir":"Reference","previous_headings":"","what":"Important engine-specific options","title":"Poisson regression via hierarchical Bayesian methods — details_poisson_reg_stan_glmer","text":"relevant arguments can passed set_engine(): chains: positive integer specifying number Markov chains. default 4. iter: positive integer specifying number iterations chain (including warmup). default 2000. seed: seed random number generation. cores: Number cores use executing chains parallel. prior: prior distribution (non-hierarchical) regression coefficients. prior_intercept: prior distribution intercept (centering predictors). See ?rstanarm::stan_glmer ?rstan::sampling information.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_stan_glmer.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Poisson regression via hierarchical Bayesian methods — details_poisson_reg_stan_glmer","text":"multilevelmod extension package required fit model.","code":"library(multilevelmod) poisson_reg(engine = \"stan_glmer\") %>% set_engine(\"stan_glmer\") %>% translate() ## Poisson Regression Model Specification (regression) ## ## Computational engine: stan_glmer ## ## Model fit template: ## rstanarm::stan_glmer(formula = missing_arg(), data = missing_arg(), ## weights = missing_arg(), family = stats::poisson, refresh = 0)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_stan_glmer.html","id":"predicting-new-samples","dir":"Reference","previous_headings":"","what":"Predicting new samples","title":"Poisson regression via hierarchical Bayesian methods — details_poisson_reg_stan_glmer","text":"model can use subject-specific coefficient estimates make predictions (.e. partial pooling). example, equation shows linear predictor (\\eta) random intercept: $$ denotes ith independent experimental unit (e.g. subject). model seen subject , can use subject’s data adjust population intercept specific subjects results. happens data predicted subject used model fit? case, package uses population parameter estimates prediction: Depending covariates model, might effect making prediction new samples. population parameters “best estimate” subject included model fit. tidymodels framework deliberately constrains predictions new data use training set data (prevent information leakage).","code":"\\eta_{i} = (\\beta_0 + b_{0i}) + \\beta_1x_{i1} \\hat{\\eta}_{i'} = \\hat{\\beta}_0+ \\hat{\\beta}x_{i'1}"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_stan_glmer.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Poisson regression via hierarchical Bayesian methods — details_poisson_reg_stan_glmer","text":"specific preprocessing needs. However, helpful keep clustering/subject identifier column factor character (instead making dummy variables). See examples next section.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_stan_glmer.html","id":"other-details","dir":"Reference","previous_headings":"","what":"Other details","title":"Poisson regression via hierarchical Bayesian methods — details_poisson_reg_stan_glmer","text":"model can accept case weights. parsnip, suggest using formula method fitting: using tidymodels infrastructure, may better use workflow. case, can add appropriate columns using add_variables() supply typical formula adding model: prediction, \"stan_glmer\" engine can compute posterior intervals analogous confidence prediction intervals. instances, units original outcome. std_error = TRUE, standard deviation posterior distribution (posterior predictive distribution appropriate) returned.","code":"library(tidymodels) poisson_reg() %>% set_engine(\"stan_glmer\") %>% fit(y ~ time + x + (1 | subject), data = longitudinal_counts) library(tidymodels) glmer_spec <- poisson_reg() %>% set_engine(\"stan_glmer\") glmer_wflow <- workflow() %>% # The data are included as-is using: add_variables(outcomes = y, predictors = c(time, x, subject)) %>% add_model(glmer_spec, formula = y ~ time + x + (1 | subject)) fit(glmer_wflow, data = longitudinal_counts)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_stan_glmer.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Poisson regression via hierarchical Bayesian methods — details_poisson_reg_stan_glmer","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_stan_glmer.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Poisson regression via hierarchical Bayesian methods — details_poisson_reg_stan_glmer","text":"McElreath, R. 2020 Statistical Rethinking. CRC Press. Sorensen, T, Vasishth, S. 2016. Bayesian linear mixed models using Stan: tutorial psychologists, linguists, cognitive scientists, arXiv:1506.06201.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_zeroinfl.html","id":null,"dir":"Reference","previous_headings":"","what":"Poisson regression via pscl — details_poisson_reg_zeroinfl","title":"Poisson regression via pscl — details_poisson_reg_zeroinfl","text":"pscl::zeroinfl() uses maximum likelihood estimation fit model count data separate model terms predicting counts predicting probability zero count.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_zeroinfl.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Poisson regression via pscl — details_poisson_reg_zeroinfl","text":"engine, single mode: regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_zeroinfl.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Poisson regression via pscl — details_poisson_reg_zeroinfl","text":"engine tuning parameters.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_zeroinfl.html","id":"translation-from-parsnip-to-the-underlying-model-call-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the underlying model call (regression)","title":"Poisson regression via pscl — details_poisson_reg_zeroinfl","text":"poissonreg extension package required fit model.","code":"library(poissonreg) poisson_reg() %>% set_engine(\"zeroinfl\") %>% translate() ## Poisson Regression Model Specification (regression) ## ## Computational engine: zeroinfl ## ## Model fit template: ## pscl::zeroinfl(formula = missing_arg(), data = missing_arg(), ## weights = missing_arg())"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_zeroinfl.html","id":"preprocessing-and-special-formulas-for-zero-inflated-poisson-models","dir":"Reference","previous_headings":"","what":"Preprocessing and special formulas for zero-inflated Poisson models","title":"Poisson regression via pscl — details_poisson_reg_zeroinfl","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_zeroinfl.html","id":"specifying-the-statistical-model-details","dir":"Reference","previous_headings":"","what":"Specifying the statistical model details","title":"Poisson regression via pscl — details_poisson_reg_zeroinfl","text":"particular model, special formula used specify columns affect counts affect model probability zero counts. sets terms separated bar. example, y ~ x | z. type formula used base R infrastructure (e.g. model.matrix()) fitting parsnip model engine directly, formula method required formula just passed . example: However, using workflow, best approach avoid using workflows::add_formula() use workflows::add_variables() conjunction model formula: reason workflows::add_formula() try create model matrix either fail create dummy variables prematurely.","code":"library(tidymodels) tidymodels_prefer() data(\"bioChemists\", package = \"pscl\") poisson_reg() %>% set_engine(\"zeroinfl\") %>% fit(art ~ fem + mar | ment, data = bioChemists) ## parsnip model object ## ## ## Call: ## pscl::zeroinfl(formula = art ~ fem + mar | ment, data = data) ## ## Count model coefficients (poisson with log link): ## (Intercept) femWomen marMarried ## 0.82840 -0.21365 0.02576 ## ## Zero-inflation model coefficients (binomial with logit link): ## (Intercept) ment ## -0.363 -0.166 data(\"bioChemists\", package = \"pscl\") spec <- poisson_reg() %>% set_engine(\"zeroinfl\") workflow() %>% add_variables(outcomes = c(art), predictors = c(fem, mar, ment)) %>% add_model(spec, formula = art ~ fem + mar | ment) %>% fit(data = bioChemists) %>% extract_fit_engine() ## ## Call: ## pscl::zeroinfl(formula = art ~ fem + mar | ment, data = data) ## ## Count model coefficients (poisson with log link): ## (Intercept) femWomen marMarried ## 0.82840 -0.21365 0.02576 ## ## Zero-inflation model coefficients (binomial with logit link): ## (Intercept) ment ## -0.363 -0.166"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_poisson_reg_zeroinfl.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Poisson regression via pscl — details_poisson_reg_zeroinfl","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_proportional_hazards_glmnet.html","id":null,"dir":"Reference","previous_headings":"","what":"Proportional hazards regression — details_proportional_hazards_glmnet","title":"Proportional hazards regression — details_proportional_hazards_glmnet","text":"glmnet::glmnet() fits regularized Cox proportional hazards model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_proportional_hazards_glmnet.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Proportional hazards regression — details_proportional_hazards_glmnet","text":"engine, single mode: censored regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_proportional_hazards_glmnet.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Proportional hazards regression — details_proportional_hazards_glmnet","text":"model 2 tuning parameters: penalty: Amount Regularization (type: double, default: see ) mixture: Proportion Lasso Penalty (type: double, default: 1.0) penalty parameter default requires single numeric value. details , glmnet model general, see glmnet-details. mixture: mixture = 1 specifies pure lasso model, mixture = 0 specifies ridge regression model, 0 < mixture < 1 specifies elastic net model, interpolating lasso ridge.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_proportional_hazards_glmnet.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Proportional hazards regression — details_proportional_hazards_glmnet","text":"censored extension package required fit model.","code":"library(censored) proportional_hazards(penalty = double(1), mixture = double(1)) %>% set_engine(\"glmnet\") %>% translate() ## Proportional Hazards Model Specification (censored regression) ## ## Main Arguments: ## penalty = 0 ## mixture = double(1) ## ## Computational engine: glmnet ## ## Model fit template: ## censored::coxnet_train(formula = missing_arg(), data = missing_arg(), ## weights = missing_arg(), alpha = double(1))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_proportional_hazards_glmnet.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Proportional hazards regression — details_proportional_hazards_glmnet","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Predictors scale. One way achieve center scale predictor mean zero variance one. default, glmnet::glmnet() uses argument standardize = TRUE center scale data.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_proportional_hazards_glmnet.html","id":"other-details","dir":"Reference","previous_headings":"","what":"Other details","title":"Proportional hazards regression — details_proportional_hazards_glmnet","text":"model fit intercept. model formula (required) can include special terms, survival::strata(). allows baseline hazard differ groups contained function. (learn using special terms formulas tidymodels, see ?model_formula.) column used inside strata() treated qualitative matter type. different syntax offered glmnet::glmnet() package (.e., glmnet::stratifySurv()) recommended . example, model, numeric column rx used estimate two different baseline hazards value column: Note columns used strata() function also estimated regular portion model (.e., within linear predictor). Predictions type \"time\" predictions mean survival time.","code":"library(survival) library(censored) library(dplyr) library(tidyr) mod <- proportional_hazards(penalty = 0.01) %>% set_engine(\"glmnet\", nlambda = 5) %>% fit(Surv(futime, fustat) ~ age + ecog.ps + strata(rx), data = ovarian) pred_data <- data.frame(age = c(50, 50), ecog.ps = c(1, 1), rx = c(1, 2)) # Different survival probabilities for different values of 'rx' predict(mod, pred_data, type = \"survival\", time = 500) %>% bind_cols(pred_data) %>% unnest(.pred) ## # A tibble: 2 × 5 ## .eval_time .pred_survival age ecog.ps rx ## ## 1 500 0.666 50 1 1 ## 2 500 0.769 50 1 2"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_proportional_hazards_glmnet.html","id":"linear-predictor-values","dir":"Reference","previous_headings":"","what":"Linear predictor values","title":"Proportional hazards regression — details_proportional_hazards_glmnet","text":"Since risk regression parametric survival models modeling different characteristics (e.g. relative hazard versus event time), linear predictors going opposite directions. example, parametric models, linear predictor increases time. proportional hazards models linear predictor decreases time (since hazard increasing). , linear predictors two quantities opposite signs. tidymodels treat different models differently computing performance metrics. standardize across model types, default proportional hazards models increasing values time. result, sign linear predictor opposite value produced predict() method engine package. behavior can changed using increasing argument calling predict() model object.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_proportional_hazards_glmnet.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Proportional hazards regression — details_proportional_hazards_glmnet","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_proportional_hazards_glmnet.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Proportional hazards regression — details_proportional_hazards_glmnet","text":"model object contains data required make predictions. saving model purpose prediction, size saved object might substantially reduced using functions butcher package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_proportional_hazards_glmnet.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Proportional hazards regression — details_proportional_hazards_glmnet","text":"Simon N, Friedman J, Hastie T, Tibshirani R. 2011. “Regularization Paths Cox’s Proportional Hazards Model via Coordinate Descent.” Journal Statistical Software, Articles 39 (5): 1–13. . Hastie T, Tibshirani R, Wainwright M. 2015. Statistical Learning Sparsity. CRC Press. Kuhn M, Johnson K. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_proportional_hazards_survival.html","id":null,"dir":"Reference","previous_headings":"","what":"Proportional hazards regression — details_proportional_hazards_survival","title":"Proportional hazards regression — details_proportional_hazards_survival","text":"survival::coxph() fits Cox proportional hazards model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_proportional_hazards_survival.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Proportional hazards regression — details_proportional_hazards_survival","text":"engine, single mode: censored regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_proportional_hazards_survival.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Proportional hazards regression — details_proportional_hazards_survival","text":"model tuning parameters.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_proportional_hazards_survival.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Proportional hazards regression — details_proportional_hazards_survival","text":"censored extension package required fit model.","code":"library(censored) proportional_hazards() %>% set_engine(\"survival\") %>% set_mode(\"censored regression\") %>% translate() ## Proportional Hazards Model Specification (censored regression) ## ## Computational engine: survival ## ## Model fit template: ## survival::coxph(formula = missing_arg(), data = missing_arg(), ## weights = missing_arg(), x = TRUE, model = TRUE)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_proportional_hazards_survival.html","id":"other-details","dir":"Reference","previous_headings":"","what":"Other details","title":"Proportional hazards regression — details_proportional_hazards_survival","text":"model fit intercept. main interface model uses formula method since model specification typically involved use survival::Surv(). model formula can include special terms, survival::strata(). allows baseline hazard differ groups contained function. column used inside strata() treated qualitative matter type. learn using special terms formulas tidymodels, see ?model_formula. example, model, numeric column rx used estimate two different baseline hazards value column: Note columns used strata() function estimated regular portion model (.e., within linear predictor). Predictions type \"time\" predictions mean survival time.","code":"library(survival) proportional_hazards() %>% fit(Surv(futime, fustat) ~ age + strata(rx), data = ovarian) %>% extract_fit_engine() %>% # Two different hazards for each value of 'rx' basehaz() ## hazard time strata ## 1 0.02250134 59 rx=1 ## 2 0.05088586 115 rx=1 ## 3 0.09467873 156 rx=1 ## 4 0.14809975 268 rx=1 ## 5 0.30670509 329 rx=1 ## 6 0.46962698 431 rx=1 ## 7 0.46962698 448 rx=1 ## 8 0.46962698 477 rx=1 ## 9 1.07680229 638 rx=1 ## 10 1.07680229 803 rx=1 ## 11 1.07680229 855 rx=1 ## 12 1.07680229 1040 rx=1 ## 13 1.07680229 1106 rx=1 ## 14 0.05843331 353 rx=2 ## 15 0.12750063 365 rx=2 ## 16 0.12750063 377 rx=2 ## 17 0.12750063 421 rx=2 ## 18 0.23449656 464 rx=2 ## 19 0.35593895 475 rx=2 ## 20 0.50804209 563 rx=2 ## 21 0.50804209 744 rx=2 ## 22 0.50804209 769 rx=2 ## 23 0.50804209 770 rx=2 ## 24 0.50804209 1129 rx=2 ## 25 0.50804209 1206 rx=2 ## 26 0.50804209 1227 rx=2"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_proportional_hazards_survival.html","id":"linear-predictor-values","dir":"Reference","previous_headings":"","what":"Linear predictor values","title":"Proportional hazards regression — details_proportional_hazards_survival","text":"Since risk regression parametric survival models modeling different characteristics (e.g. relative hazard versus event time), linear predictors going opposite directions. example, parametric models, linear predictor increases time. proportional hazards models linear predictor decreases time (since hazard increasing). , linear predictors two quantities opposite signs. tidymodels treat different models differently computing performance metrics. standardize across model types, default proportional hazards models increasing values time. result, sign linear predictor opposite value produced predict() method engine package. behavior can changed using increasing argument calling predict() model object.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_proportional_hazards_survival.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Proportional hazards regression — details_proportional_hazards_survival","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_proportional_hazards_survival.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Proportional hazards regression — details_proportional_hazards_survival","text":"Andersen P, Gill R. 1982. Cox’s regression model counting processes, large sample study. Annals Statistics 10, 1100-1120.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_aorsf.html","id":null,"dir":"Reference","previous_headings":"","what":"Oblique random survival forests via aorsf — details_rand_forest_aorsf","title":"Oblique random survival forests via aorsf — details_rand_forest_aorsf","text":"aorsf::orsf() fits model creates large number decision trees, de-correlated others. final prediction uses predictions individual trees combines .","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_aorsf.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Oblique random survival forests via aorsf — details_rand_forest_aorsf","text":"engine, single mode: censored regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_aorsf.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Oblique random survival forests via aorsf — details_rand_forest_aorsf","text":"model 3 tuning parameters: trees: # Trees (type: integer, default: 500L) min_n: Minimal Node Size (type: integer, default: 5L) mtry: # Randomly Selected Predictors (type: integer, default: ceiling(sqrt(n_predictors))) Additionally, model one engine-specific tuning parameter: split_min_stat: Minimum test statistic required split node. Default 3.841459 log-rank test, roughly p-value 0.05.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_aorsf.html","id":"translation-from-parsnip-to-the-original-package-censored-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (censored regression)","title":"Oblique random survival forests via aorsf — details_rand_forest_aorsf","text":"censored extension package required fit model.","code":"library(censored) rand_forest() %>% set_engine(\"aorsf\") %>% set_mode(\"censored regression\") %>% translate() ## Random Forest Model Specification (censored regression) ## ## Computational engine: aorsf ## ## Model fit template: ## aorsf::orsf(formula = missing_arg(), data = missing_arg(), weights = missing_arg())"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_aorsf.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Oblique random survival forests via aorsf — details_rand_forest_aorsf","text":"engine require special encoding predictors. Categorical predictors can partitioned groups factor levels (e.g. {, c} vs {b, d}) splitting node. Dummy variables required model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_aorsf.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Oblique random survival forests via aorsf — details_rand_forest_aorsf","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_aorsf.html","id":"other-details","dir":"Reference","previous_headings":"","what":"Other details","title":"Oblique random survival forests via aorsf — details_rand_forest_aorsf","text":"Predictions survival probability time exceeding maximum observed event time predicted survival probability maximum observed time training data.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_aorsf.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Oblique random survival forests via aorsf — details_rand_forest_aorsf","text":"Jaeger BC, Long DL, Long DM, Sims M, Szychowski JM, Min YI, Mcclure LA, Howard G, Simon N. Oblique random survival forests. Annals applied statistics 2019 Sep; 13(3):1847-83. DOI: 10.1214/19-AOAS1261 Jaeger BC, Welden S, Lenoir K, Pajewski NM. aorsf: R package supervised learning using oblique random survival forest. Journal Open Source Software 2022, 7(77), 1 4705. . Jaeger BC, Welden S, Lenoir K, Speiser JL, Segar MW, Pandey , Pajewski NM. Accelerated interpretable oblique random survival forests. arXiv e-prints 2022 Aug; arXiv-2208. URL: https://arxiv.org/abs/2208.01129","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_h2o.html","id":null,"dir":"Reference","previous_headings":"","what":"Random forests via h2o — details_rand_forest_h2o","title":"Random forests via h2o — details_rand_forest_h2o","text":"h2o::h2o.randomForest() fits model creates large number decision trees, independent others. final prediction uses predictions individual trees combines .","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_h2o.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Random forests via h2o — details_rand_forest_h2o","text":"engine, multiple modes: classification regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_h2o.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Random forests via h2o — details_rand_forest_h2o","text":"model 3 tuning parameters: trees: # Trees (type: integer, default: 50L) min_n: Minimal Node Size (type: integer, default: 1) mtry: # Randomly Selected Predictors (type: integer, default: see ) mtry depends number columns model mode. default h2o::h2o.randomForest() floor(sqrt(ncol(x))) classification floor(ncol(x)/3) regression.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_h2o.html","id":"translation-from-parsnip-to-the-original-package-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (regression)","title":"Random forests via h2o — details_rand_forest_h2o","text":"agua::h2o_train_rf() wrapper around h2o::h2o.randomForest(). min_rows() min_cols() adjust number neighbors chosen value consistent actual data dimensions.","code":"rand_forest( mtry = integer(1), trees = integer(1), min_n = integer(1) ) %>% set_engine(\"h2o\") %>% set_mode(\"regression\") %>% translate() ## Random Forest Model Specification (regression) ## ## Main Arguments: ## mtry = integer(1) ## trees = integer(1) ## min_n = integer(1) ## ## Computational engine: h2o ## ## Model fit template: ## agua::h2o_train_rf(x = missing_arg(), y = missing_arg(), weights = missing_arg(), ## validation_frame = missing_arg(), mtries = integer(1), ntrees = integer(1), ## min_rows = integer(1))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_h2o.html","id":"translation-from-parsnip-to-the-original-package-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (classification)","title":"Random forests via h2o — details_rand_forest_h2o","text":"","code":"rand_forest( mtry = integer(1), trees = integer(1), min_n = integer(1) ) %>% set_engine(\"h2o\") %>% set_mode(\"classification\") %>% translate() ## Random Forest Model Specification (classification) ## ## Main Arguments: ## mtry = integer(1) ## trees = integer(1) ## min_n = integer(1) ## ## Computational engine: h2o ## ## Model fit template: ## agua::h2o_train_rf(x = missing_arg(), y = missing_arg(), weights = missing_arg(), ## validation_frame = missing_arg(), mtries = integer(1), ntrees = integer(1), ## min_rows = integer(1))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_h2o.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Random forests via h2o — details_rand_forest_h2o","text":"engine require special encoding predictors. Categorical predictors can partitioned groups factor levels (e.g. {, c} vs {b, d}) splitting node. Dummy variables required model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_h2o.html","id":"initializing-h-o","dir":"Reference","previous_headings":"","what":"Initializing h2o","title":"Random forests via h2o — details_rand_forest_h2o","text":"use h2o engine tidymodels, please run h2o::h2o.init() first. default, connects R local h2o server. needs done every new R session. can also connect remote h2o server IP address, details see h2o::h2o.init(). can control number threads thread pool used h2o nthreads argument. default, uses CPUs host. different usual parallel processing mechanism tidymodels tuning, tidymodels parallelizes resamples, h2o parallelizes hyperparameter combinations given resample. h2o automatically shut local h2o instance started R R terminated. manually stop h2o server, run h2o::h2o.shutdown().","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_h2o.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Random forests via h2o — details_rand_forest_h2o","text":"Models fitted engine may require native serialization methods properly saved /passed R sessions. learn preparing fitted models serialization, see bundle package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_partykit.html","id":null,"dir":"Reference","previous_headings":"","what":"Random forests via partykit — details_rand_forest_partykit","title":"Random forests via partykit — details_rand_forest_partykit","text":"partykit::cforest() fits model creates large number decision trees, independent others. final prediction uses predictions individual trees combines .","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_partykit.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Random forests via partykit — details_rand_forest_partykit","text":"engine, multiple modes: censored regression, regression, classification","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_partykit.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Random forests via partykit — details_rand_forest_partykit","text":"model 3 tuning parameters: trees: # Trees (type: integer, default: 500L) min_n: Minimal Node Size (type: integer, default: 20L) mtry: # Randomly Selected Predictors (type: integer, default: 5L)","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_partykit.html","id":"translation-from-parsnip-to-the-original-package-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (regression)","title":"Random forests via partykit — details_rand_forest_partykit","text":"bonsai extension package required fit model.","code":"library(bonsai) rand_forest() %>% set_engine(\"partykit\") %>% set_mode(\"regression\") %>% translate() ## Random Forest Model Specification (regression) ## ## Computational engine: partykit ## ## Model fit template: ## parsnip::cforest_train(formula = missing_arg(), data = missing_arg(), ## weights = missing_arg())"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_partykit.html","id":"translation-from-parsnip-to-the-original-package-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (classification)","title":"Random forests via partykit — details_rand_forest_partykit","text":"bonsai extension package required fit model. parsnip::cforest_train() wrapper around partykit::cforest() (functions) makes easier run model.","code":"library(bonsai) rand_forest() %>% set_engine(\"partykit\") %>% set_mode(\"classification\") %>% translate() ## Random Forest Model Specification (classification) ## ## Computational engine: partykit ## ## Model fit template: ## parsnip::cforest_train(formula = missing_arg(), data = missing_arg(), ## weights = missing_arg())"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_partykit.html","id":"translation-from-parsnip-to-the-original-package-censored-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (censored regression)","title":"Random forests via partykit — details_rand_forest_partykit","text":"censored extension package required fit model. censored::cond_inference_surv_cforest() wrapper around partykit::cforest() (functions) makes easier run model.","code":"library(censored) rand_forest() %>% set_engine(\"partykit\") %>% set_mode(\"censored regression\") %>% translate() ## Random Forest Model Specification (censored regression) ## ## Computational engine: partykit ## ## Model fit template: ## parsnip::cforest_train(formula = missing_arg(), data = missing_arg(), ## weights = missing_arg())"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_partykit.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Random forests via partykit — details_rand_forest_partykit","text":"engine require special encoding predictors. Categorical predictors can partitioned groups factor levels (e.g. {, c} vs {b, d}) splitting node. Dummy variables required model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_partykit.html","id":"other-details","dir":"Reference","previous_headings":"","what":"Other details","title":"Random forests via partykit — details_rand_forest_partykit","text":"Predictions type \"time\" predictions median survival time.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_partykit.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Random forests via partykit — details_rand_forest_partykit","text":"partykit: Modular Toolkit Recursive Partytioning R Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_randomForest.html","id":null,"dir":"Reference","previous_headings":"","what":"Random forests via randomForest — details_rand_forest_randomForest","title":"Random forests via randomForest — details_rand_forest_randomForest","text":"randomForest::randomForest() fits model creates large number decision trees, independent others. final prediction uses predictions individual trees combines .","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_randomForest.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Random forests via randomForest — details_rand_forest_randomForest","text":"engine, multiple modes: classification regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_randomForest.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Random forests via randomForest — details_rand_forest_randomForest","text":"model 3 tuning parameters: mtry: # Randomly Selected Predictors (type: integer, default: see ) trees: # Trees (type: integer, default: 500L) min_n: Minimal Node Size (type: integer, default: see ) mtry depends number columns model mode. default randomForest::randomForest() floor(sqrt(ncol(x))) classification floor(ncol(x)/3) regression. min_n depends mode. regression, value 5 default. classification, value 10 used.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_randomForest.html","id":"translation-from-parsnip-to-the-original-package-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (regression)","title":"Random forests via randomForest — details_rand_forest_randomForest","text":"min_rows() min_cols() adjust number neighbors chosen value consistent actual data dimensions.","code":"rand_forest( mtry = integer(1), trees = integer(1), min_n = integer(1) ) %>% set_engine(\"randomForest\") %>% set_mode(\"regression\") %>% translate() ## Random Forest Model Specification (regression) ## ## Main Arguments: ## mtry = integer(1) ## trees = integer(1) ## min_n = integer(1) ## ## Computational engine: randomForest ## ## Model fit template: ## randomForest::randomForest(x = missing_arg(), y = missing_arg(), ## mtry = min_cols(~integer(1), x), ntree = integer(1), nodesize = min_rows(~integer(1), ## x))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_randomForest.html","id":"translation-from-parsnip-to-the-original-package-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (classification)","title":"Random forests via randomForest — details_rand_forest_randomForest","text":"","code":"rand_forest( mtry = integer(1), trees = integer(1), min_n = integer(1) ) %>% set_engine(\"randomForest\") %>% set_mode(\"classification\") %>% translate() ## Random Forest Model Specification (classification) ## ## Main Arguments: ## mtry = integer(1) ## trees = integer(1) ## min_n = integer(1) ## ## Computational engine: randomForest ## ## Model fit template: ## randomForest::randomForest(x = missing_arg(), y = missing_arg(), ## mtry = min_cols(~integer(1), x), ntree = integer(1), nodesize = min_rows(~integer(1), ## x))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_randomForest.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Random forests via randomForest — details_rand_forest_randomForest","text":"engine require special encoding predictors. Categorical predictors can partitioned groups factor levels (e.g. {, c} vs {b, d}) splitting node. Dummy variables required model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_randomForest.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Random forests via randomForest — details_rand_forest_randomForest","text":"model object contains data required make predictions. saving model purpose prediction, size saved object might substantially reduced using functions butcher package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_randomForest.html","id":"examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Random forests via randomForest — details_rand_forest_randomForest","text":"“Fitting Predicting parsnip” article contains examples rand_forest() \"randomForest\" engine.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_randomForest.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Random forests via randomForest — details_rand_forest_randomForest","text":"Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_ranger.html","id":null,"dir":"Reference","previous_headings":"","what":"Random forests via ranger — details_rand_forest_ranger","title":"Random forests via ranger — details_rand_forest_ranger","text":"ranger::ranger() fits model creates large number decision trees, independent others. final prediction uses predictions individual trees combines .","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_ranger.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Random forests via ranger — details_rand_forest_ranger","text":"engine, multiple modes: classification regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_ranger.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Random forests via ranger — details_rand_forest_ranger","text":"model 3 tuning parameters: mtry: # Randomly Selected Predictors (type: integer, default: see ) trees: # Trees (type: integer, default: 500L) min_n: Minimal Node Size (type: integer, default: see ) mtry depends number columns. default ranger::ranger() floor(sqrt(ncol(x))). min_n depends mode. regression, value 5 default. classification, value 10 used.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_ranger.html","id":"translation-from-parsnip-to-the-original-package-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (regression)","title":"Random forests via ranger — details_rand_forest_ranger","text":"min_rows() min_cols() adjust number neighbors chosen value consistent actual data dimensions.","code":"rand_forest( mtry = integer(1), trees = integer(1), min_n = integer(1) ) %>% set_engine(\"ranger\") %>% set_mode(\"regression\") %>% translate() ## Random Forest Model Specification (regression) ## ## Main Arguments: ## mtry = integer(1) ## trees = integer(1) ## min_n = integer(1) ## ## Computational engine: ranger ## ## Model fit template: ## ranger::ranger(x = missing_arg(), y = missing_arg(), weights = missing_arg(), ## mtry = min_cols(~integer(1), x), num.trees = integer(1), ## min.node.size = min_rows(~integer(1), x), num.threads = 1, ## verbose = FALSE, seed = sample.int(10^5, 1))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_ranger.html","id":"translation-from-parsnip-to-the-original-package-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (classification)","title":"Random forests via ranger — details_rand_forest_ranger","text":"Note ranger probability forest always fit (unless probability argument changed user via set_engine()).","code":"rand_forest( mtry = integer(1), trees = integer(1), min_n = integer(1) ) %>% set_engine(\"ranger\") %>% set_mode(\"classification\") %>% translate() ## Random Forest Model Specification (classification) ## ## Main Arguments: ## mtry = integer(1) ## trees = integer(1) ## min_n = integer(1) ## ## Computational engine: ranger ## ## Model fit template: ## ranger::ranger(x = missing_arg(), y = missing_arg(), weights = missing_arg(), ## mtry = min_cols(~integer(1), x), num.trees = integer(1), ## min.node.size = min_rows(~integer(1), x), num.threads = 1, ## verbose = FALSE, seed = sample.int(10^5, 1), probability = TRUE)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_ranger.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Random forests via ranger — details_rand_forest_ranger","text":"engine require special encoding predictors. Categorical predictors can partitioned groups factor levels (e.g. {, c} vs {b, d}) splitting node. Dummy variables required model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_ranger.html","id":"other-notes","dir":"Reference","previous_headings":"","what":"Other notes","title":"Random forests via ranger — details_rand_forest_ranger","text":"default, parallel processing turned . tuning, efficient parallelize resamples tuning parameters. parallelize construction trees within ranger model, change num.threads argument via set_engine(). ranger confidence intervals, intervals constructed using form estimate +/- z * std_error. classification probabilities, values can fall outside [0, 1] coerced range.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_ranger.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Random forests via ranger — details_rand_forest_ranger","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_ranger.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Random forests via ranger — details_rand_forest_ranger","text":"model object contains data required make predictions. saving model purpose prediction, size saved object might substantially reduced using functions butcher package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_ranger.html","id":"examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Random forests via ranger — details_rand_forest_ranger","text":"“Fitting Predicting parsnip” article contains examples rand_forest() \"ranger\" engine.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_ranger.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Random forests via ranger — details_rand_forest_ranger","text":"Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_spark.html","id":null,"dir":"Reference","previous_headings":"","what":"Random forests via spark — details_rand_forest_spark","title":"Random forests via spark — details_rand_forest_spark","text":"sparklyr::ml_random_forest() fits model creates large number decision trees, independent others. final prediction uses predictions individual trees combines .","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_spark.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Random forests via spark — details_rand_forest_spark","text":"engine, multiple modes: classification regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_spark.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Random forests via spark — details_rand_forest_spark","text":"model 3 tuning parameters: mtry: # Randomly Selected Predictors (type: integer, default: see ) trees: # Trees (type: integer, default: 20L) min_n: Minimal Node Size (type: integer, default: 1L) mtry depends number columns model mode. default sparklyr::ml_random_forest() floor(sqrt(ncol(x))) classification floor(ncol(x)/3) regression.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_spark.html","id":"translation-from-parsnip-to-the-original-package-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (regression)","title":"Random forests via spark — details_rand_forest_spark","text":"min_rows() min_cols() adjust number neighbors chosen value consistent actual data dimensions.","code":"rand_forest( mtry = integer(1), trees = integer(1), min_n = integer(1) ) %>% set_engine(\"spark\") %>% set_mode(\"regression\") %>% translate() ## Random Forest Model Specification (regression) ## ## Main Arguments: ## mtry = integer(1) ## trees = integer(1) ## min_n = integer(1) ## ## Computational engine: spark ## ## Model fit template: ## sparklyr::ml_random_forest(x = missing_arg(), formula = missing_arg(), ## type = \"regression\", feature_subset_strategy = integer(1), ## num_trees = integer(1), min_instances_per_node = min_rows(~integer(1), ## x), seed = sample.int(10^5, 1))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_spark.html","id":"translation-from-parsnip-to-the-original-package-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (classification)","title":"Random forests via spark — details_rand_forest_spark","text":"","code":"rand_forest( mtry = integer(1), trees = integer(1), min_n = integer(1) ) %>% set_engine(\"spark\") %>% set_mode(\"classification\") %>% translate() ## Random Forest Model Specification (classification) ## ## Main Arguments: ## mtry = integer(1) ## trees = integer(1) ## min_n = integer(1) ## ## Computational engine: spark ## ## Model fit template: ## sparklyr::ml_random_forest(x = missing_arg(), formula = missing_arg(), ## type = \"classification\", feature_subset_strategy = integer(1), ## num_trees = integer(1), min_instances_per_node = min_rows(~integer(1), ## x), seed = sample.int(10^5, 1))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_spark.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Random forests via spark — details_rand_forest_spark","text":"engine require special encoding predictors. Categorical predictors can partitioned groups factor levels (e.g. {, c} vs {b, d}) splitting node. Dummy variables required model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_spark.html","id":"other-details","dir":"Reference","previous_headings":"","what":"Other details","title":"Random forests via spark — details_rand_forest_spark","text":"models created using \"spark\" engine, several things consider. formula interface via fit() available; using fit_xy() generate error. predictions always Spark table format. names documented without dots. equivalent factor columns Spark tables class predictions returned character columns. retain model object new R session (via save()), model$fit element parsnip object serialized via ml_save(object$fit) separately saved disk. new session, object can reloaded reattached parsnip object.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_spark.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Random forests via spark — details_rand_forest_spark","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights. Note , spark engines, case_weight argument value character string specify column numeric case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rand_forest_spark.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Random forests via spark — details_rand_forest_spark","text":"Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rule_fit_h2o.html","id":null,"dir":"Reference","previous_headings":"","what":"RuleFit models via h2o — details_rule_fit_h2o","title":"RuleFit models via h2o — details_rule_fit_h2o","text":"h2o::h2o.rulefit() fits model derives simple feature rules tree ensemble uses rules features regularized (LASSO) model. agua::h2o_train_rule() wrapper around function.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rule_fit_h2o.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"RuleFit models via h2o — details_rule_fit_h2o","text":"engine, multiple modes: classification regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rule_fit_h2o.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"RuleFit models via h2o — details_rule_fit_h2o","text":"model 3 tuning parameters: trees: # Trees (type: integer, default: 50L) tree_depth: Tree Depth (type: integer, default: 3L) penalty: Amount Regularization (type: double, default: 0) Note penalty h2o engine `rule_fit()`` corresponds L1 penalty (LASSO). engine arguments interest: algorithm: algorithm use generate rules. one “AUTO”, “DRF”, “GBM”, defaults “AUTO”. min_rule_length: Minimum length tree depth, opposite tree_dpeth, defaults 3. max_num_rules: maximum number rules return. default value -1 means number rules selected diminishing returns model deviance. model_type: type base learners ensemble, one : “rules_and_linear”, “rules”, “linear”, defaults “rules_and_linear”.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rule_fit_h2o.html","id":"translation-from-parsnip-to-the-underlying-model-call-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the underlying model call (regression)","title":"RuleFit models via h2o — details_rule_fit_h2o","text":"agua::h2o_train_rule() wrapper around h2o::h2o.rulefit(). agua extension package required fit model.","code":"library(rules) rule_fit( trees = integer(1), tree_depth = integer(1), penalty = numeric(1) ) %>% set_engine(\"h2o\") %>% set_mode(\"regression\") %>% translate() ## RuleFit Model Specification (regression) ## ## Main Arguments: ## trees = integer(1) ## tree_depth = integer(1) ## penalty = numeric(1) ## ## Computational engine: h2o ## ## Model fit template: ## agua::h2o_train_rule(x = missing_arg(), y = missing_arg(), weights = missing_arg(), ## validation_frame = missing_arg(), rule_generation_ntrees = integer(1), ## max_rule_length = integer(1), lambda = numeric(1))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rule_fit_h2o.html","id":"translation-from-parsnip-to-the-underlying-model-call-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the underlying model call (classification)","title":"RuleFit models via h2o — details_rule_fit_h2o","text":"agua::h2o_train_rule() rule_fit() wrapper around h2o::h2o.rulefit(). agua extension package required fit model.","code":"rule_fit( trees = integer(1), tree_depth = integer(1), penalty = numeric(1) ) %>% set_engine(\"h2o\") %>% set_mode(\"classification\") %>% translate() ## RuleFit Model Specification (classification) ## ## Main Arguments: ## trees = integer(1) ## tree_depth = integer(1) ## penalty = numeric(1) ## ## Computational engine: h2o ## ## Model fit template: ## agua::h2o_train_rule(x = missing_arg(), y = missing_arg(), weights = missing_arg(), ## validation_frame = missing_arg(), rule_generation_ntrees = integer(1), ## max_rule_length = integer(1), lambda = numeric(1))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rule_fit_h2o.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"RuleFit models via h2o — details_rule_fit_h2o","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rule_fit_h2o.html","id":"other-details","dir":"Reference","previous_headings":"","what":"Other details","title":"RuleFit models via h2o — details_rule_fit_h2o","text":"use h2o engine tidymodels, please run h2o::h2o.init() first. default, connects R local h2o server. needs done every new R session. can also connect remote h2o server IP address, details see h2o::h2o.init(). can control number threads thread pool used h2o nthreads argument. default, uses CPUs host. different usual parallel processing mechanism tidymodels tuning, tidymodels parallelizes resamples, h2o parallelizes hyperparameter combinations given resample. h2o automatically shut local h2o instance started R R terminated. manually stop h2o server, run h2o::h2o.shutdown().","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rule_fit_h2o.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"RuleFit models via h2o — details_rule_fit_h2o","text":"Models fitted engine may require native serialization methods properly saved /passed R sessions. learn preparing fitted models serialization, see bundle package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rule_fit_xrf.html","id":null,"dir":"Reference","previous_headings":"","what":"RuleFit models via xrf — details_rule_fit_xrf","title":"RuleFit models via xrf — details_rule_fit_xrf","text":"xrf::xrf() fits model derives simple feature rules tree ensemble uses rules features regularized model. rules::xrf_fit() wrapper around function.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rule_fit_xrf.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"RuleFit models via xrf — details_rule_fit_xrf","text":"engine, multiple modes: classification regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rule_fit_xrf.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"RuleFit models via xrf — details_rule_fit_xrf","text":"model 8 tuning parameters: mtry: Proportion Randomly Selected Predictors (type: double, default: see ) trees: # Trees (type: integer, default: 15L) min_n: Minimal Node Size (type: integer, default: 1L) tree_depth: Tree Depth (type: integer, default: 6L) learn_rate: Learning Rate (type: double, default: 0.3) loss_reduction: Minimum Loss Reduction (type: double, default: 0.0) sample_size: Proportion Observations Sampled (type: double, default: 1.0) penalty: Amount Regularization (type: double, default: 0.1)","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rule_fit_xrf.html","id":"translation-from-parsnip-to-the-underlying-model-call-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the underlying model call (regression)","title":"RuleFit models via xrf — details_rule_fit_xrf","text":"rules extension package required fit model.","code":"library(rules) rule_fit( mtry = numeric(1), trees = integer(1), min_n = integer(1), tree_depth = integer(1), learn_rate = numeric(1), loss_reduction = numeric(1), sample_size = numeric(1), penalty = numeric(1) ) %>% set_engine(\"xrf\") %>% set_mode(\"regression\") %>% translate() ## RuleFit Model Specification (regression) ## ## Main Arguments: ## mtry = numeric(1) ## trees = integer(1) ## min_n = integer(1) ## tree_depth = integer(1) ## learn_rate = numeric(1) ## loss_reduction = numeric(1) ## sample_size = numeric(1) ## penalty = numeric(1) ## ## Computational engine: xrf ## ## Model fit template: ## rules::xrf_fit(formula = missing_arg(), data = missing_arg(), ## xgb_control = missing_arg(), colsample_bynode = numeric(1), ## nrounds = integer(1), min_child_weight = integer(1), max_depth = integer(1), ## eta = numeric(1), gamma = numeric(1), subsample = numeric(1), ## lambda = numeric(1))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rule_fit_xrf.html","id":"translation-from-parsnip-to-the-underlying-model-call-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the underlying model call (classification)","title":"RuleFit models via xrf — details_rule_fit_xrf","text":"rules extension package required fit model.","code":"library(rules) rule_fit( mtry = numeric(1), trees = integer(1), min_n = integer(1), tree_depth = integer(1), learn_rate = numeric(1), loss_reduction = numeric(1), sample_size = numeric(1), penalty = numeric(1) ) %>% set_engine(\"xrf\") %>% set_mode(\"classification\") %>% translate() ## RuleFit Model Specification (classification) ## ## Main Arguments: ## mtry = numeric(1) ## trees = integer(1) ## min_n = integer(1) ## tree_depth = integer(1) ## learn_rate = numeric(1) ## loss_reduction = numeric(1) ## sample_size = numeric(1) ## penalty = numeric(1) ## ## Computational engine: xrf ## ## Model fit template: ## rules::xrf_fit(formula = missing_arg(), data = missing_arg(), ## xgb_control = missing_arg(), colsample_bynode = numeric(1), ## nrounds = integer(1), min_child_weight = integer(1), max_depth = integer(1), ## eta = numeric(1), gamma = numeric(1), subsample = numeric(1), ## lambda = numeric(1))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rule_fit_xrf.html","id":"differences-from-the-xrf-package","dir":"Reference","previous_headings":"","what":"Differences from the xrf package","title":"RuleFit models via xrf — details_rule_fit_xrf","text":"Note , per documentation ?xrf, transformations response variable supported. use rule_fit(), recommend using recipe instead formula method. Also, several configuration differences xrf() fit package wrapper used rules. differences default values : differences create disparity values penalty argument glmnet uses. Also, rules can also set penalty whereas xrf uses internal 5-fold cross-validation determine (default).","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rule_fit_xrf.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"RuleFit models via xrf — details_rule_fit_xrf","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators.","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rule_fit_xrf.html","id":"interpreting-mtry","dir":"Reference","previous_headings":"","what":"Interpreting mtry","title":"RuleFit models via xrf — details_rule_fit_xrf","text":"mtry argument denotes number predictors randomly sampled split creating tree models. engines, \"xgboost\", \"xrf\", \"lightgbm\", interpret analogue mtry argument proportion predictors randomly sampled split rather count. settings, tuning preprocessors influence number predictors, parameterization quite helpful—interpreting mtry proportion means [0, 1] always valid range parameter, regardless input data. parsnip extensions accommodate parameterization using counts argument: logical indicating whether mtry interpreted number predictors randomly sampled split. TRUE indicates mtry interpreted sense count, FALSE indicates argument interpreted sense proportion. mtry main model argument boost_tree() rand_forest(), thus engine-specific interface. , regardless engine, counts defaults TRUE. engines support proportion interpretation (currently \"xgboost\" \"xrf\", via rules package, \"lightgbm\" via bonsai package) user can pass counts = FALSE argument set_engine() supply mtry values within [0, 1].","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rule_fit_xrf.html","id":"early-stopping","dir":"Reference","previous_headings":"","what":"Early stopping","title":"RuleFit models via xrf — details_rule_fit_xrf","text":"stop_iter() argument allows model prematurely stop training objective function improve within early_stop iterations. best way use feature conjunction internal validation set. , pass validation parameter xgb_train() via parsnip set_engine() function. proportion training set reserved measuring performance (stopping early). model specification early_stop >= trees, early_stop converted trees - 1 warning issued.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rule_fit_xrf.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"RuleFit models via xrf — details_rule_fit_xrf","text":"underlying model implementation allow case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_rule_fit_xrf.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"RuleFit models via xrf — details_rule_fit_xrf","text":"Friedman Popescu. “Predictive learning via rule ensembles.” Ann. Appl. Stat. 2 (3) 916- 954, September 2008","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_surv_reg_flexsurv.html","id":null,"dir":"Reference","previous_headings":"","what":"Parametric survival regression — details_surv_reg_flexsurv","title":"Parametric survival regression — details_surv_reg_flexsurv","text":"flexsurv::flexsurvreg() fits parametric survival model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_surv_reg_flexsurv.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Parametric survival regression — details_surv_reg_flexsurv","text":"engine, single mode: regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_surv_reg_flexsurv.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Parametric survival regression — details_surv_reg_flexsurv","text":"model 1 tuning parameters: dist: Distribution (type: character, default: ‘weibull’)","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_surv_reg_flexsurv.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Parametric survival regression — details_surv_reg_flexsurv","text":"","code":"surv_reg(dist = character(1)) %>% set_engine(\"flexsurv\") %>% set_mode(\"regression\") %>% translate() ## Parametric Survival Regression Model Specification (regression) ## ## Main Arguments: ## dist = character(1) ## ## Computational engine: flexsurv ## ## Model fit template: ## flexsurv::flexsurvreg(formula = missing_arg(), data = missing_arg(), ## weights = missing_arg(), dist = character(1))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_surv_reg_flexsurv.html","id":"other-details","dir":"Reference","previous_headings":"","what":"Other details","title":"Parametric survival regression — details_surv_reg_flexsurv","text":"main interface model uses formula method since model specification typically involved use survival::Surv(). engine, stratification specified via strata(), please see flexsurv::flexsurvreg() alternative specifications.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_surv_reg_flexsurv.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Parametric survival regression — details_surv_reg_flexsurv","text":"model object contains data required make predictions. saving model purpose prediction, size saved object might substantially reduced using functions butcher package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_surv_reg_flexsurv.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Parametric survival regression — details_surv_reg_flexsurv","text":"Jackson, C. 2016. flexsurv: Platform Parametric Survival Modeling R. Journal Statistical Software, 70(8), 1 - 33.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_surv_reg_survival.html","id":null,"dir":"Reference","previous_headings":"","what":"Parametric survival regression — details_surv_reg_survival","title":"Parametric survival regression — details_surv_reg_survival","text":"survival::survreg() fits parametric survival model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_surv_reg_survival.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Parametric survival regression — details_surv_reg_survival","text":"engine, single mode: regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_surv_reg_survival.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Parametric survival regression — details_surv_reg_survival","text":"model 1 tuning parameters: dist: Distribution (type: character, default: ‘weibull’)","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_surv_reg_survival.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Parametric survival regression — details_surv_reg_survival","text":"","code":"surv_reg(dist = character(1)) %>% set_engine(\"survival\") %>% set_mode(\"regression\") %>% translate() ## Parametric Survival Regression Model Specification (regression) ## ## Main Arguments: ## dist = character(1) ## ## Computational engine: survival ## ## Model fit template: ## survival::survreg(formula = missing_arg(), data = missing_arg(), ## weights = missing_arg(), dist = character(1), model = TRUE)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_surv_reg_survival.html","id":"other-details","dir":"Reference","previous_headings":"","what":"Other details","title":"Parametric survival regression — details_surv_reg_survival","text":"Note model = TRUE needed produce quantile predictions stratification variable can overridden cases. main interface model uses formula method since model specification typically involved use survival::Surv(). model formula can include special terms, survival::strata(). allows model scale parameter differ groups contained function. column used inside strata() treated qualitative matter type. learn using special terms formulas tidymodels, see ?model_formula. example, model, numeric column rx used estimate two different scale parameters value column:","code":"library(survival) surv_reg() %>% fit(Surv(futime, fustat) ~ age + strata(rx), data = ovarian) %>% extract_fit_engine() ## Call: ## survival::survreg(formula = Surv(futime, fustat) ~ age + strata(rx), ## data = data, model = TRUE) ## ## Coefficients: ## (Intercept) age ## 12.8734120 -0.1033569 ## ## Scale: ## rx=1 rx=2 ## 0.7695509 0.4703602 ## ## Loglik(model)= -89.4 Loglik(intercept only)= -97.1 ## Chisq= 15.36 on 1 degrees of freedom, p= 8.88e-05 ## n= 26"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_surv_reg_survival.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Parametric survival regression — details_surv_reg_survival","text":"Kalbfleisch, J. D. Prentice, R. L. 2002 statistical analysis failure time data, Wiley.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_survival_reg_flexsurv.html","id":null,"dir":"Reference","previous_headings":"","what":"Parametric survival regression — details_survival_reg_flexsurv","title":"Parametric survival regression — details_survival_reg_flexsurv","text":"flexsurv::flexsurvreg() fits parametric survival model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_survival_reg_flexsurv.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Parametric survival regression — details_survival_reg_flexsurv","text":"engine, single mode: censored regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_survival_reg_flexsurv.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Parametric survival regression — details_survival_reg_flexsurv","text":"model 1 tuning parameters: dist: Distribution (type: character, default: ‘weibull’)","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_survival_reg_flexsurv.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Parametric survival regression — details_survival_reg_flexsurv","text":"censored extension package required fit model.","code":"library(censored) survival_reg(dist = character(1)) %>% set_engine(\"flexsurv\") %>% set_mode(\"censored regression\") %>% translate() ## Parametric Survival Regression Model Specification (censored regression) ## ## Main Arguments: ## dist = character(1) ## ## Computational engine: flexsurv ## ## Model fit template: ## flexsurv::flexsurvreg(formula = missing_arg(), data = missing_arg(), ## weights = missing_arg(), dist = character(1))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_survival_reg_flexsurv.html","id":"other-details","dir":"Reference","previous_headings":"","what":"Other details","title":"Parametric survival regression — details_survival_reg_flexsurv","text":"main interface model uses formula method since model specification typically involved use survival::Surv(). engine, stratification specified via strata(), please see flexsurv::flexsurvreg() alternative specifications. Predictions type \"time\" predictions mean survival time.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_survival_reg_flexsurv.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Parametric survival regression — details_survival_reg_flexsurv","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_survival_reg_flexsurv.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Parametric survival regression — details_survival_reg_flexsurv","text":"model object contains data required make predictions. saving model purpose prediction, size saved object might substantially reduced using functions butcher package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_survival_reg_flexsurv.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Parametric survival regression — details_survival_reg_flexsurv","text":"Jackson, C. 2016. flexsurv: Platform Parametric Survival Modeling R. Journal Statistical Software, 70(8), 1 - 33.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_survival_reg_flexsurvspline.html","id":null,"dir":"Reference","previous_headings":"","what":"Flexible parametric survival regression — details_survival_reg_flexsurvspline","title":"Flexible parametric survival regression — details_survival_reg_flexsurvspline","text":"flexsurv::flexsurvspline() fits flexible parametric survival model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_survival_reg_flexsurvspline.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Flexible parametric survival regression — details_survival_reg_flexsurvspline","text":"engine, single mode: censored regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_survival_reg_flexsurvspline.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Flexible parametric survival regression — details_survival_reg_flexsurvspline","text":"model one engine-specific tuning parameter: k: Number knots spline. default k = 0.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_survival_reg_flexsurvspline.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Flexible parametric survival regression — details_survival_reg_flexsurvspline","text":"censored extension package required fit model.","code":"library(censored) survival_reg() %>% set_engine(\"flexsurvspline\") %>% set_mode(\"censored regression\") %>% translate() ## Parametric Survival Regression Model Specification (censored regression) ## ## Computational engine: flexsurvspline ## ## Model fit template: ## flexsurv::flexsurvspline(formula = missing_arg(), data = missing_arg(), ## weights = missing_arg())"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_survival_reg_flexsurvspline.html","id":"other-details","dir":"Reference","previous_headings":"","what":"Other details","title":"Flexible parametric survival regression — details_survival_reg_flexsurvspline","text":"main interface model uses formula method since model specification typically involved use survival::Surv(). engine, stratification specified via strata(), please see flexsurv::flexsurvspline() alternative specifications. Predictions type \"time\" predictions mean survival time.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_survival_reg_flexsurvspline.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Flexible parametric survival regression — details_survival_reg_flexsurvspline","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_survival_reg_flexsurvspline.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Flexible parametric survival regression — details_survival_reg_flexsurvspline","text":"model object contains data required make predictions. saving model purpose prediction, size saved object might substantially reduced using functions butcher package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_survival_reg_flexsurvspline.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Flexible parametric survival regression — details_survival_reg_flexsurvspline","text":"Jackson, C. 2016. flexsurv: Platform Parametric Survival Modeling R. Journal Statistical Software, 70(8), 1 - 33.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_survival_reg_survival.html","id":null,"dir":"Reference","previous_headings":"","what":"Parametric survival regression — details_survival_reg_survival","title":"Parametric survival regression — details_survival_reg_survival","text":"survival::survreg() fits parametric survival model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_survival_reg_survival.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Parametric survival regression — details_survival_reg_survival","text":"engine, single mode: censored regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_survival_reg_survival.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Parametric survival regression — details_survival_reg_survival","text":"model 1 tuning parameters: dist: Distribution (type: character, default: ‘weibull’)","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_survival_reg_survival.html","id":"translation-from-parsnip-to-the-original-package","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package","title":"Parametric survival regression — details_survival_reg_survival","text":"censored extension package required fit model.","code":"library(censored) survival_reg(dist = character(1)) %>% set_engine(\"survival\") %>% set_mode(\"censored regression\") %>% translate() ## Parametric Survival Regression Model Specification (censored regression) ## ## Main Arguments: ## dist = character(1) ## ## Computational engine: survival ## ## Model fit template: ## survival::survreg(formula = missing_arg(), data = missing_arg(), ## weights = missing_arg(), dist = character(1), model = TRUE)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_survival_reg_survival.html","id":"other-details","dir":"Reference","previous_headings":"","what":"Other details","title":"Parametric survival regression — details_survival_reg_survival","text":"translated syntax , note model = TRUE needed produce quantile predictions stratification variable can overridden cases. main interface model uses formula method since model specification typically involved use survival::Surv(). model formula can include special terms, survival::strata(). allows model scale parameter differ groups contained function. column used inside strata() treated qualitative matter type. learn using special terms formulas tidymodels, see ?model_formula. example, model, numeric column rx used estimate two different scale parameters value column: Predictions type \"time\" predictions mean survival time.","code":"library(survival) survival_reg() %>% fit(Surv(futime, fustat) ~ age + strata(rx), data = ovarian) %>% extract_fit_engine() ## Call: ## survival::survreg(formula = Surv(futime, fustat) ~ age + strata(rx), ## data = data, model = TRUE) ## ## Coefficients: ## (Intercept) age ## 12.8734120 -0.1033569 ## ## Scale: ## rx=1 rx=2 ## 0.7695509 0.4703602 ## ## Loglik(model)= -89.4 Loglik(intercept only)= -97.1 ## Chisq= 15.36 on 1 degrees of freedom, p= 8.88e-05 ## n= 26"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_survival_reg_survival.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Parametric survival regression — details_survival_reg_survival","text":"model can utilize case weights model fitting. use , see documentation case_weights examples tidymodels.org. fit() fit_xy() arguments arguments called case_weights expect vectors case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_survival_reg_survival.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Parametric survival regression — details_survival_reg_survival","text":"model object contains data required make predictions. saving model purpose prediction, size saved object might substantially reduced using functions butcher package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_survival_reg_survival.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Parametric survival regression — details_survival_reg_survival","text":"Kalbfleisch, J. D. Prentice, R. L. 2002 statistical analysis failure time data, Wiley.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_linear_LiblineaR.html","id":null,"dir":"Reference","previous_headings":"","what":"Linear support vector machines (SVMs) via LiblineaR — details_svm_linear_LiblineaR","title":"Linear support vector machines (SVMs) via LiblineaR — details_svm_linear_LiblineaR","text":"LiblineaR::LiblineaR() fits support vector machine model. classification, model tries maximize width margin classes. regression, model optimizes robust loss function affected large model residuals.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_linear_LiblineaR.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Linear support vector machines (SVMs) via LiblineaR — details_svm_linear_LiblineaR","text":"engine, multiple modes: classification regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_linear_LiblineaR.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Linear support vector machines (SVMs) via LiblineaR — details_svm_linear_LiblineaR","text":"model 2 tuning parameters: cost: Cost (type: double, default: 1.0) margin: Insensitivity Margin (type: double, default: default) engine fits models L2-regularized L2-loss. LiblineaR::LiblineaR() documentation, types 1 (classification) 11 (regression).","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_linear_LiblineaR.html","id":"translation-from-parsnip-to-the-original-package-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (regression)","title":"Linear support vector machines (SVMs) via LiblineaR — details_svm_linear_LiblineaR","text":"","code":"svm_linear( cost = double(1), margin = double(1) ) %>% set_engine(\"LiblineaR\") %>% set_mode(\"regression\") %>% translate() ## Linear Support Vector Machine Model Specification (regression) ## ## Main Arguments: ## cost = double(1) ## margin = double(1) ## ## Computational engine: LiblineaR ## ## Model fit template: ## LiblineaR::LiblineaR(x = missing_arg(), y = missing_arg(), C = double(1), ## svr_eps = double(1), type = 11)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_linear_LiblineaR.html","id":"translation-from-parsnip-to-the-original-package-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (classification)","title":"Linear support vector machines (SVMs) via LiblineaR — details_svm_linear_LiblineaR","text":"margin parameter apply classification models. Note LiblineaR engine produce class probabilities. optimizing model using tune package, default metrics require class probabilities. use tune_*() functions, metric set must passed argument contains metrics hard class predictions (e.g., accuracy).","code":"svm_linear( cost = double(1) ) %>% set_engine(\"LiblineaR\") %>% set_mode(\"classification\") %>% translate() ## Linear Support Vector Machine Model Specification (classification) ## ## Main Arguments: ## cost = double(1) ## ## Computational engine: LiblineaR ## ## Model fit template: ## LiblineaR::LiblineaR(x = missing_arg(), y = missing_arg(), C = double(1), ## type = 1)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_linear_LiblineaR.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Linear support vector machines (SVMs) via LiblineaR — details_svm_linear_LiblineaR","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Predictors scale. One way achieve center scale predictor mean zero variance one.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_linear_LiblineaR.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Linear support vector machines (SVMs) via LiblineaR — details_svm_linear_LiblineaR","text":"underlying model implementation allow case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_linear_LiblineaR.html","id":"examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Linear support vector machines (SVMs) via LiblineaR — details_svm_linear_LiblineaR","text":"“Fitting Predicting parsnip” article contains examples svm_linear() \"LiblineaR\" engine.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_linear_LiblineaR.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Linear support vector machines (SVMs) via LiblineaR — details_svm_linear_LiblineaR","text":"Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_linear_kernlab.html","id":null,"dir":"Reference","previous_headings":"","what":"Linear support vector machines (SVMs) via kernlab — details_svm_linear_kernlab","title":"Linear support vector machines (SVMs) via kernlab — details_svm_linear_kernlab","text":"kernlab::ksvm() fits support vector machine model. classification, model tries maximize width margin classes. regression, model optimizes robust loss function affected large model residuals.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_linear_kernlab.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Linear support vector machines (SVMs) via kernlab — details_svm_linear_kernlab","text":"engine, multiple modes: classification regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_linear_kernlab.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Linear support vector machines (SVMs) via kernlab — details_svm_linear_kernlab","text":"model 2 tuning parameters: cost: Cost (type: double, default: 1.0) margin: Insensitivity Margin (type: double, default: 0.1)","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_linear_kernlab.html","id":"translation-from-parsnip-to-the-original-package-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (regression)","title":"Linear support vector machines (SVMs) via kernlab — details_svm_linear_kernlab","text":"","code":"svm_linear( cost = double(1), margin = double(1) ) %>% set_engine(\"kernlab\") %>% set_mode(\"regression\") %>% translate() ## Linear Support Vector Machine Model Specification (regression) ## ## Main Arguments: ## cost = double(1) ## margin = double(1) ## ## Computational engine: kernlab ## ## Model fit template: ## kernlab::ksvm(x = missing_arg(), data = missing_arg(), C = double(1), ## epsilon = double(1), kernel = \"vanilladot\")"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_linear_kernlab.html","id":"translation-from-parsnip-to-the-original-package-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (classification)","title":"Linear support vector machines (SVMs) via kernlab — details_svm_linear_kernlab","text":"margin parameter apply classification models. Note \"kernlab\" engine naturally estimate class probabilities. produce , decision values model converted probabilities using Platt scaling. method fits additional model top SVM model. fitting Platt scaling model, random numbers used reproducible controlled R’s random number stream.","code":"svm_linear( cost = double(1) ) %>% set_engine(\"kernlab\") %>% set_mode(\"classification\") %>% translate() ## Linear Support Vector Machine Model Specification (classification) ## ## Main Arguments: ## cost = double(1) ## ## Computational engine: kernlab ## ## Model fit template: ## kernlab::ksvm(x = missing_arg(), data = missing_arg(), C = double(1), ## kernel = \"vanilladot\", prob.model = TRUE)"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_linear_kernlab.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Linear support vector machines (SVMs) via kernlab — details_svm_linear_kernlab","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Predictors scale. One way achieve center scale predictor mean zero variance one.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_linear_kernlab.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Linear support vector machines (SVMs) via kernlab — details_svm_linear_kernlab","text":"underlying model implementation allow case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_linear_kernlab.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Linear support vector machines (SVMs) via kernlab — details_svm_linear_kernlab","text":"model object contains data required make predictions. saving model purpose prediction, size saved object might substantially reduced using functions butcher package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_linear_kernlab.html","id":"examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Linear support vector machines (SVMs) via kernlab — details_svm_linear_kernlab","text":"“Fitting Predicting parsnip” article contains examples svm_linear() \"kernlab\" engine.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_linear_kernlab.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Linear support vector machines (SVMs) via kernlab — details_svm_linear_kernlab","text":"Lin, HT, R Weng. “Note Platt’s Probabilistic Outputs Support Vector Machines” Karatzoglou, , Smola, , Hornik, K, Zeileis. 2004. “kernlab - S4 Package Kernel Methods R.”, Journal Statistical Software. Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_poly_kernlab.html","id":null,"dir":"Reference","previous_headings":"","what":"Polynomial support vector machines (SVMs) via kernlab — details_svm_poly_kernlab","title":"Polynomial support vector machines (SVMs) via kernlab — details_svm_poly_kernlab","text":"kernlab::ksvm() fits support vector machine model. classification, model tries maximize width margin classes. regression, model optimizes robust loss function affected large model residuals.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_poly_kernlab.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Polynomial support vector machines (SVMs) via kernlab — details_svm_poly_kernlab","text":"engine, multiple modes: classification regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_poly_kernlab.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Polynomial support vector machines (SVMs) via kernlab — details_svm_poly_kernlab","text":"model 4 tuning parameters: cost: Cost (type: double, default: 1.0) degree: Degree Interaction (type: integer, default: 1L1) scale_factor: Scale Factor (type: double, default: 1.0) margin: Insensitivity Margin (type: double, default: 0.1)","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_poly_kernlab.html","id":"translation-from-parsnip-to-the-original-package-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (regression)","title":"Polynomial support vector machines (SVMs) via kernlab — details_svm_poly_kernlab","text":"","code":"svm_poly( cost = double(1), degree = integer(1), scale_factor = double(1), margin = double(1) ) %>% set_engine(\"kernlab\") %>% set_mode(\"regression\") %>% translate() ## Polynomial Support Vector Machine Model Specification (regression) ## ## Main Arguments: ## cost = double(1) ## degree = integer(1) ## scale_factor = double(1) ## margin = double(1) ## ## Computational engine: kernlab ## ## Model fit template: ## kernlab::ksvm(x = missing_arg(), data = missing_arg(), C = double(1), ## epsilon = double(1), kernel = \"polydot\", kpar = list(degree = ~integer(1), ## scale = ~double(1)))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_poly_kernlab.html","id":"translation-from-parsnip-to-the-original-package-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (classification)","title":"Polynomial support vector machines (SVMs) via kernlab — details_svm_poly_kernlab","text":"margin parameter apply classification models. Note \"kernlab\" engine naturally estimate class probabilities. produce , decision values model converted probabilities using Platt scaling. method fits additional model top SVM model. fitting Platt scaling model, random numbers used reproducible controlled R’s random number stream.","code":"svm_poly( cost = double(1), degree = integer(1), scale_factor = double(1) ) %>% set_engine(\"kernlab\") %>% set_mode(\"classification\") %>% translate() ## Polynomial Support Vector Machine Model Specification (classification) ## ## Main Arguments: ## cost = double(1) ## degree = integer(1) ## scale_factor = double(1) ## ## Computational engine: kernlab ## ## Model fit template: ## kernlab::ksvm(x = missing_arg(), data = missing_arg(), C = double(1), ## kernel = \"polydot\", prob.model = TRUE, kpar = list(degree = ~integer(1), ## scale = ~double(1)))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_poly_kernlab.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Polynomial support vector machines (SVMs) via kernlab — details_svm_poly_kernlab","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Predictors scale. One way achieve center scale predictor mean zero variance one.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_poly_kernlab.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Polynomial support vector machines (SVMs) via kernlab — details_svm_poly_kernlab","text":"underlying model implementation allow case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_poly_kernlab.html","id":"examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Polynomial support vector machines (SVMs) via kernlab — details_svm_poly_kernlab","text":"“Fitting Predicting parsnip” article contains examples svm_poly() \"kernlab\" engine.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_poly_kernlab.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Polynomial support vector machines (SVMs) via kernlab — details_svm_poly_kernlab","text":"model object contains data required make predictions. saving model purpose prediction, size saved object might substantially reduced using functions butcher package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_poly_kernlab.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Polynomial support vector machines (SVMs) via kernlab — details_svm_poly_kernlab","text":"Lin, HT, R Weng. “Note Platt’s Probabilistic Outputs Support Vector Machines” Karatzoglou, , Smola, , Hornik, K, Zeileis. 2004. “kernlab - S4 Package Kernel Methods R.”, Journal Statistical Software. Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_rbf_kernlab.html","id":null,"dir":"Reference","previous_headings":"","what":"Radial basis function support vector machines (SVMs) via kernlab — details_svm_rbf_kernlab","title":"Radial basis function support vector machines (SVMs) via kernlab — details_svm_rbf_kernlab","text":"kernlab::ksvm() fits support vector machine model. classification, model tries maximize width margin classes. regression, model optimizes robust loss function affected large model residuals.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_rbf_kernlab.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Radial basis function support vector machines (SVMs) via kernlab — details_svm_rbf_kernlab","text":"engine, multiple modes: classification regression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_rbf_kernlab.html","id":"tuning-parameters","dir":"Reference","previous_headings":"","what":"Tuning Parameters","title":"Radial basis function support vector machines (SVMs) via kernlab — details_svm_rbf_kernlab","text":"model 3 tuning parameters: cost: Cost (type: double, default: 1.0) rbf_sigma: Radial Basis Function sigma (type: double, default: see ) margin: Insensitivity Margin (type: double, default: 0.1) default radial basis function kernel parameter. kernlab estimates data using heuristic method. See kernlab::sigest(). method uses random numbers , without setting seed fitting, model reproducible.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_rbf_kernlab.html","id":"translation-from-parsnip-to-the-original-package-regression-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (regression)","title":"Radial basis function support vector machines (SVMs) via kernlab — details_svm_rbf_kernlab","text":"","code":"svm_rbf( cost = double(1), rbf_sigma = double(1), margin = double(1) ) %>% set_engine(\"kernlab\") %>% set_mode(\"regression\") %>% translate() ## Radial Basis Function Support Vector Machine Model Specification (regression) ## ## Main Arguments: ## cost = double(1) ## rbf_sigma = double(1) ## margin = double(1) ## ## Computational engine: kernlab ## ## Model fit template: ## kernlab::ksvm(x = missing_arg(), data = missing_arg(), C = double(1), ## epsilon = double(1), kernel = \"rbfdot\", kpar = list(sigma = ~double(1)))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_rbf_kernlab.html","id":"translation-from-parsnip-to-the-original-package-classification-","dir":"Reference","previous_headings":"","what":"Translation from parsnip to the original package (classification)","title":"Radial basis function support vector machines (SVMs) via kernlab — details_svm_rbf_kernlab","text":"margin parameter apply classification models. Note \"kernlab\" engine naturally estimate class probabilities. produce , decision values model converted probabilities using Platt scaling. method fits additional model top SVM model. fitting Platt scaling model, random numbers used reproducible controlled R’s random number stream.","code":"svm_rbf( cost = double(1), rbf_sigma = double(1) ) %>% set_engine(\"kernlab\") %>% set_mode(\"classification\") %>% translate() ## Radial Basis Function Support Vector Machine Model Specification (classification) ## ## Main Arguments: ## cost = double(1) ## rbf_sigma = double(1) ## ## Computational engine: kernlab ## ## Model fit template: ## kernlab::ksvm(x = missing_arg(), data = missing_arg(), C = double(1), ## kernel = \"rbfdot\", prob.model = TRUE, kpar = list(sigma = ~double(1)))"},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_rbf_kernlab.html","id":"preprocessing-requirements","dir":"Reference","previous_headings":"","what":"Preprocessing requirements","title":"Radial basis function support vector machines (SVMs) via kernlab — details_svm_rbf_kernlab","text":"Factor/categorical predictors need converted numeric values (e.g., dummy indicator variables) engine. using formula method via fit(), parsnip convert factor columns indicators. Predictors scale. One way achieve center scale predictor mean zero variance one.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_rbf_kernlab.html","id":"case-weights","dir":"Reference","previous_headings":"","what":"Case weights","title":"Radial basis function support vector machines (SVMs) via kernlab — details_svm_rbf_kernlab","text":"underlying model implementation allow case weights.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_rbf_kernlab.html","id":"saving-fitted-model-objects","dir":"Reference","previous_headings":"","what":"Saving fitted model objects","title":"Radial basis function support vector machines (SVMs) via kernlab — details_svm_rbf_kernlab","text":"model object contains data required make predictions. saving model purpose prediction, size saved object might substantially reduced using functions butcher package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_rbf_kernlab.html","id":"examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Radial basis function support vector machines (SVMs) via kernlab — details_svm_rbf_kernlab","text":"“Fitting Predicting parsnip” article contains examples svm_rbf() \"kernlab\" engine.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/details_svm_rbf_kernlab.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Radial basis function support vector machines (SVMs) via kernlab — details_svm_rbf_kernlab","text":"Lin, HT, R Weng. “Note Platt’s Probabilistic Outputs Support Vector Machines” Karatzoglou, , Smola, , Hornik, K, Zeileis. 2004. “kernlab - S4 Package Kernel Methods R.”, Journal Statistical Software. Kuhn, M, K Johnson. 2013. Applied Predictive Modeling. Springer.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/discrim_flexible.html","id":null,"dir":"Reference","previous_headings":"","what":"Flexible discriminant analysis — discrim_flexible","title":"Flexible discriminant analysis — discrim_flexible","text":"discrim_flexible() defines model fits discriminant analysis model can use nonlinear features created using multivariate adaptive regression splines (MARS). function can fit classification models. different ways fit model, method estimation chosen setting model engine. engine-specific pages model listed . earth¹² information parsnip used modeling https://www.tidymodels.org/.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/discrim_flexible.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Flexible discriminant analysis — discrim_flexible","text":"","code":"discrim_flexible( mode = \"classification\", num_terms = NULL, prod_degree = NULL, prune_method = NULL, engine = \"earth\" )"},{"path":"https://parsnip.tidymodels.org/dev/reference/discrim_flexible.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Flexible discriminant analysis — discrim_flexible","text":"mode single character string prediction outcome mode. Possible values model \"unknown\", \"regression\", \"classification\". num_terms number features retained final model, including intercept. prod_degree highest possible interaction degree. prune_method pruning method. engine single character string specifying computational engine use fitting.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/discrim_flexible.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Flexible discriminant analysis — discrim_flexible","text":"function defines type model fit. engine specified, method fit model also defined. See set_engine() setting engine, including set engine arguments. model trained fit fit() function used data. arguments function mode engine captured quosures. pass values programmatically, use injection operator like :","code":"value <- 1 discrim_flexible(argument = !!value)"},{"path":"https://parsnip.tidymodels.org/dev/reference/discrim_flexible.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Flexible discriminant analysis — discrim_flexible","text":"https://www.tidymodels.org, Tidy Modeling R, searchable table parsnip models","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/discrim_linear.html","id":null,"dir":"Reference","previous_headings":"","what":"Linear discriminant analysis — discrim_linear","title":"Linear discriminant analysis — discrim_linear","text":"discrim_linear() defines model estimates multivariate distribution predictors separately data class (usually Gaussian common covariance matrix). Bayes' theorem used compute probability class, given predictor values. function can fit classification models. different ways fit model, method estimation chosen setting model engine. engine-specific pages model listed . MASS¹² mda² sda² sparsediscrim² information parsnip used modeling https://www.tidymodels.org/.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/discrim_linear.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Linear discriminant analysis — discrim_linear","text":"","code":"discrim_linear( mode = \"classification\", penalty = NULL, regularization_method = NULL, engine = \"MASS\" )"},{"path":"https://parsnip.tidymodels.org/dev/reference/discrim_linear.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Linear discriminant analysis — discrim_linear","text":"mode single character string type model. possible value model \"classification\". penalty non-negative number representing amount regularization used engines. regularization_method character string type regularized estimation. Possible values : \"diagonal\", \"min_distance\", \"shrink_cov\", \"shrink_mean\" (sparsediscrim engine ). engine single character string specifying computational engine use fitting.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/discrim_linear.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Linear discriminant analysis — discrim_linear","text":"function defines type model fit. engine specified, method fit model also defined. See set_engine() setting engine, including set engine arguments. model trained fit fit() function used data. arguments function mode engine captured quosures. pass values programmatically, use injection operator like :","code":"value <- 1 discrim_linear(argument = !!value)"},{"path":"https://parsnip.tidymodels.org/dev/reference/discrim_linear.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Linear discriminant analysis — discrim_linear","text":"https://www.tidymodels.org, Tidy Modeling R, searchable table parsnip models","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/discrim_quad.html","id":null,"dir":"Reference","previous_headings":"","what":"Quadratic discriminant analysis — discrim_quad","title":"Quadratic discriminant analysis — discrim_quad","text":"discrim_quad() defines model estimates multivariate distribution predictors separately data class (usually Gaussian separate covariance matrices). Bayes' theorem used compute probability class, given predictor values. function can fit classification models. different ways fit model, method estimation chosen setting model engine. engine-specific pages model listed . MASS¹² sparsediscrim² information parsnip used modeling https://www.tidymodels.org/.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/discrim_quad.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Quadratic discriminant analysis — discrim_quad","text":"","code":"discrim_quad( mode = \"classification\", regularization_method = NULL, engine = \"MASS\" )"},{"path":"https://parsnip.tidymodels.org/dev/reference/discrim_quad.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Quadratic discriminant analysis — discrim_quad","text":"mode single character string type model. possible value model \"classification\". regularization_method character string type regularized estimation. Possible values : \"diagonal\", \"shrink_cov\", \"shrink_mean\" (sparsediscrim engine ). engine single character string specifying computational engine use fitting.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/discrim_quad.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Quadratic discriminant analysis — discrim_quad","text":"function defines type model fit. engine specified, method fit model also defined. See set_engine() setting engine, including set engine arguments. model trained fit fit() function used data. arguments function mode engine captured quosures. pass values programmatically, use injection operator like :","code":"value <- 1 discrim_quad(argument = !!value)"},{"path":"https://parsnip.tidymodels.org/dev/reference/discrim_quad.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Quadratic discriminant analysis — discrim_quad","text":"https://www.tidymodels.org, Tidy Modeling R, searchable table parsnip models","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/discrim_regularized.html","id":null,"dir":"Reference","previous_headings":"","what":"Regularized discriminant analysis — discrim_regularized","title":"Regularized discriminant analysis — discrim_regularized","text":"discrim_regularized() defines model estimates multivariate distribution predictors separately data class. structure model can LDA, QDA, amalgam two. Bayes' theorem used compute probability class, given predictor values. function can fit classification models. different ways fit model, method estimation chosen setting model engine. engine-specific pages model listed . klaR¹² information parsnip used modeling https://www.tidymodels.org/.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/discrim_regularized.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Regularized discriminant analysis — discrim_regularized","text":"","code":"discrim_regularized( mode = \"classification\", frac_common_cov = NULL, frac_identity = NULL, engine = \"klaR\" )"},{"path":"https://parsnip.tidymodels.org/dev/reference/discrim_regularized.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Regularized discriminant analysis — discrim_regularized","text":"mode single character string prediction outcome mode. Possible values model \"unknown\", \"regression\", \"classification\". frac_common_cov, frac_identity Numeric values zero one. engine single character string specifying computational engine use fitting.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/discrim_regularized.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Regularized discriminant analysis — discrim_regularized","text":"many ways regularizing models. example, one form regularization penalize model parameters. Similarly, classic James–Stein regularization approach shrinks model structure less complex form. model fits specific type regularized model Friedman (1989) uses two types regularization. One modulates class-specific covariance matrix . allows model balance LDA QDA. second regularization component shrinks covariance matrix towards identity matrix. penalization approach, discrim_linear() mda engine can used. regularization methods can used discrim_linear() discrim_quad() can used via sparsediscrim engine functions. function defines type model fit. engine specified, method fit model also defined. See set_engine() setting engine, including set engine arguments. model trained fit fit() function used data. arguments function mode engine captured quosures. pass values programmatically, use injection operator like :","code":"value <- 1 discrim_regularized(argument = !!value)"},{"path":"https://parsnip.tidymodels.org/dev/reference/discrim_regularized.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Regularized discriminant analysis — discrim_regularized","text":"https://www.tidymodels.org, Tidy Modeling R, searchable table parsnip models Friedman, J (1989). Regularized Discriminant Analysis. Journal American Statistical Association, 84, 165-175.","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/doc-tools.html","id":null,"dir":"Reference","previous_headings":"","what":"Tools for documenting engines — doc-tools","title":"Tools for documenting engines — doc-tools","text":"parsnip fairly complex documentation system engines model detailed documentation syntax, tuning parameters, preprocessing needs, . functions called .R files programmatically generate content help files model. find_engine_files() identifies engines model creates bulleted list links specific help files. make_seealso_list() creates set links \"See Also\" list bottom help pages. find_engine_files() function, used , find engines model function.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/doc-tools.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Tools for documenting engines — doc-tools","text":"","code":"find_engine_files(mod) make_engine_list(mod) make_seealso_list(mod, pkg = \"parsnip\")"},{"path":"https://parsnip.tidymodels.org/dev/reference/doc-tools.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Tools for documenting engines — doc-tools","text":"mod character string model file (e.g. \"linear_reg\") pkg character string package function invoked.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/doc-tools.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Tools for documenting engines — doc-tools","text":"make_engine_list() returns character string creates bulleted list links specific help files. make_seealso_list() returns formatted character string links. find_engine_files() returns tibble.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/doc-tools.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Tools for documenting engines — doc-tools","text":"parsnip includes document (README-DOCS.md) step--step instructions details. See code determine installed (see References section). parsnip users need use functions documentation.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/doc-tools.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Tools for documenting engines — doc-tools","text":"https://github.com/tidymodels/parsnip/blob/main/inst/README-DOCS.md","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/doc-tools.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Tools for documenting engines — doc-tools","text":"","code":"# See this file for step-by-step instructions. system.file(\"README-DOCS.md\", package = \"parsnip\") #> [1] \"/home/runner/work/_temp/Library/parsnip/README-DOCS.md\" # Code examples: make_engine_list(\"linear_reg\") #> There are different ways to fit this model, and the method of estimation is chosen by setting the model \\emph{engine}. The engine-specific pages for this model are listed below. #> #> #> \\itemize{ #> \\item \\code{\\link[parsnip:details_linear_reg_lm]{lm}¹} #> \\item \\code{\\link[parsnip:details_linear_reg_brulee]{brulee}} #> \\item \\code{\\link[parsnip:details_linear_reg_gee]{gee}²} #> \\item \\code{\\link[parsnip:details_linear_reg_glm]{glm}} #> \\item \\code{\\link[parsnip:details_linear_reg_glmer]{glmer}²} #> \\item \\code{\\link[parsnip:details_linear_reg_glmnet]{glmnet}} #> \\item \\code{\\link[parsnip:details_linear_reg_gls]{gls}²} #> \\item \\code{\\link[parsnip:details_linear_reg_h2o]{h2o}²} #> \\item \\code{\\link[parsnip:details_linear_reg_keras]{keras}} #> \\item \\code{\\link[parsnip:details_linear_reg_lme]{lme}²} #> \\item \\code{\\link[parsnip:details_linear_reg_lmer]{lmer}²} #> \\item \\code{\\link[parsnip:details_linear_reg_spark]{spark}} #> \\item \\code{\\link[parsnip:details_linear_reg_stan]{stan}} #> \\item \\code{\\link[parsnip:details_linear_reg_stan_glmer]{stan_glmer}²} #> } #> #> #> ¹ The default engine. ² Requires a parsnip extension package. cat(make_engine_list(\"linear_reg\")) #> There are different ways to fit this model, and the method of estimation is chosen by setting the model \\emph{engine}. The engine-specific pages for this model are listed below. #> #> #> \\itemize{ #> \\item \\code{\\link[parsnip:details_linear_reg_lm]{lm}¹} #> \\item \\code{\\link[parsnip:details_linear_reg_brulee]{brulee}} #> \\item \\code{\\link[parsnip:details_linear_reg_gee]{gee}²} #> \\item \\code{\\link[parsnip:details_linear_reg_glm]{glm}} #> \\item \\code{\\link[parsnip:details_linear_reg_glmer]{glmer}²} #> \\item \\code{\\link[parsnip:details_linear_reg_glmnet]{glmnet}} #> \\item \\code{\\link[parsnip:details_linear_reg_gls]{gls}²} #> \\item \\code{\\link[parsnip:details_linear_reg_h2o]{h2o}²} #> \\item \\code{\\link[parsnip:details_linear_reg_keras]{keras}} #> \\item \\code{\\link[parsnip:details_linear_reg_lme]{lme}²} #> \\item \\code{\\link[parsnip:details_linear_reg_lmer]{lmer}²} #> \\item \\code{\\link[parsnip:details_linear_reg_spark]{spark}} #> \\item \\code{\\link[parsnip:details_linear_reg_stan]{stan}} #> \\item \\code{\\link[parsnip:details_linear_reg_stan_glmer]{stan_glmer}²} #> } #> #> #> ¹ The default engine. ² Requires a parsnip extension package."},{"path":"https://parsnip.tidymodels.org/dev/reference/dot-extract_surv_status.html","id":null,"dir":"Reference","previous_headings":"","what":"Extract survival status — .extract_surv_status","title":"Extract survival status — .extract_surv_status","text":"Extract status survival::Surv() object.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/dot-extract_surv_status.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Extract survival status — .extract_surv_status","text":"surv single survival::Surv() object.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/dot-extract_surv_status.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Extract survival status — .extract_surv_status","text":"numeric vector.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/dot-extract_surv_time.html","id":null,"dir":"Reference","previous_headings":"","what":"Extract survival time — .extract_surv_time","title":"Extract survival time — .extract_surv_time","text":"Extract time component(s) survival::Surv() object.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/dot-extract_surv_time.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Extract survival time — .extract_surv_time","text":"surv single survival::Surv() object.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/dot-extract_surv_time.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Extract survival time — .extract_surv_time","text":"vector type \"right\" \"left\" tibble otherwise.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/dot-model_param_name_key.html","id":null,"dir":"Reference","previous_headings":"","what":"Translate names of model tuning parameters — .model_param_name_key","title":"Translate names of model tuning parameters — .model_param_name_key","text":"function creates key connects identifiers users make tuning parameter names, standardized parsnip parameter names, argument names underlying fit function engine.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/dot-model_param_name_key.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Translate names of model tuning parameters — .model_param_name_key","text":"","code":".model_param_name_key(object, as_tibble = TRUE)"},{"path":"https://parsnip.tidymodels.org/dev/reference/dot-model_param_name_key.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Translate names of model tuning parameters — .model_param_name_key","text":"object workflow parsnip model specification. as_tibble logical. results tibble (default) list can facilitate renaming grid objects?","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/dot-model_param_name_key.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Translate names of model tuning parameters — .model_param_name_key","text":"tibble columns user, parsnip, engine, list named character vectors user_to_parsnip parsnip_to_engine.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/dot-model_param_name_key.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Translate names of model tuning parameters — .model_param_name_key","text":"","code":"mod <- linear_reg(penalty = tune(\"regularization\"), mixture = tune()) %>% set_engine(\"glmnet\") mod %>% .model_param_name_key() #> # A tibble: 2 × 3 #> user parsnip engine #> #> 1 regularization penalty lambda #> 2 mixture mixture alpha rn <- mod %>% .model_param_name_key(as_tibble = FALSE) rn #> $user_to_parsnip #> penalty mixture #> \"regularization\" \"mixture\" #> #> $parsnip_to_engine #> lambda alpha #> \"penalty\" \"mixture\" #> grid <- tidyr::crossing(regularization = c(0, 1), mixture = (0:3) / 3) grid %>% dplyr::rename(!!!rn$user_to_parsnip) #> # A tibble: 8 × 2 #> penalty mixture #> #> 1 0 0 #> 2 0 0.333 #> 3 0 0.667 #> 4 0 1 #> 5 1 0 #> 6 1 0.333 #> 7 1 0.667 #> 8 1 1 grid %>% dplyr::rename(!!!rn$user_to_parsnip) %>% dplyr::rename(!!!rn$parsnip_to_engine) #> # A tibble: 8 × 2 #> lambda alpha #> #> 1 0 0 #> 2 0 0.333 #> 3 0 0.667 #> 4 0 1 #> 5 1 0 #> 6 1 0.333 #> 7 1 0.667 #> 8 1 1"},{"path":"https://parsnip.tidymodels.org/dev/reference/eval_args.html","id":null,"dir":"Reference","previous_headings":"","what":"Evaluate parsnip model arguments — eval_args","title":"Evaluate parsnip model arguments — eval_args","text":"Evaluate parsnip model arguments","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/eval_args.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Evaluate parsnip model arguments — eval_args","text":"","code":"eval_args(spec, ...)"},{"path":"https://parsnip.tidymodels.org/dev/reference/eval_args.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Evaluate parsnip model arguments — eval_args","text":"spec model specification ... used.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/extension-check-helpers.html","id":null,"dir":"Reference","previous_headings":"","what":"Model Specification Checking: — spec_is_possible","title":"Model Specification Checking: — spec_is_possible","text":"helpers spec_is_possible(), spec_is_loaded(), prompt_missing_implementation() provide tooling checking model specifications. addition spec, engine, mode arguments, functions take arguments user_specified_engine user_specified_mode, denoting whether user specified engine mode, respectively.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/extension-check-helpers.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Model Specification Checking: — spec_is_possible","text":"","code":"spec_is_possible( spec, engine = spec$engine, user_specified_engine = spec$user_specified_engine, mode = spec$mode, user_specified_mode = spec$user_specified_mode ) spec_is_loaded( spec, engine = spec$engine, user_specified_engine = spec$user_specified_engine, mode = spec$mode, user_specified_mode = spec$user_specified_mode ) prompt_missing_implementation( spec, engine = spec$engine, user_specified_engine = spec$user_specified_engine, mode = spec$mode, user_specified_mode = spec$user_specified_mode, prompt, ... )"},{"path":"https://parsnip.tidymodels.org/dev/reference/extension-check-helpers.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Model Specification Checking: — spec_is_possible","text":"spec_is_possible() checks union current parsnip model environment model_info_table \"pre-registered\" model specifications determine whether model well-specified. See parsnip:::model_info_table table. spec_is_loaded() checks current parsnip model environment. spec_is_possible() executed automatically new_model_spec(), set_mode(), set_engine(), spec_is_loaded() executed automatically print.model_spec(), among places. spec_is_possible() used model specification still \"progress\" specified, spec_is_loaded called parsnip extension receives indication user \"done\" specifying model specification: print, fit, addition workflow, extract_*(), example. spec_is_loaded() FALSE, prompt_missing_implementation() helper construct informative message prompt users load install needed packages. prompt argument refers prompting function use, usually cli::cli_inform cli::cli_abort, ellipses passed function.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/extract-parsnip.html","id":null,"dir":"Reference","previous_headings":"","what":"Extract elements of a parsnip model object — extract-parsnip","title":"Extract elements of a parsnip model object — extract-parsnip","text":"functions extract various elements parsnip object. exist yet, error thrown. extract_spec_parsnip() returns parsnip model specification. extract_fit_engine() returns engine specific fit embedded within parsnip model fit. example, using linear_reg() \"lm\" engine, returns underlying lm object. extract_parameter_dials() returns single dials parameter object. extract_parameter_set_dials() returns set dials parameter objects.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/extract-parsnip.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Extract elements of a parsnip model object — extract-parsnip","text":"","code":"# S3 method for model_fit extract_spec_parsnip(x, ...) # S3 method for model_fit extract_fit_engine(x, ...) # S3 method for model_spec extract_parameter_set_dials(x, ...) # S3 method for model_spec extract_parameter_dials(x, parameter, ...)"},{"path":"https://parsnip.tidymodels.org/dev/reference/extract-parsnip.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Extract elements of a parsnip model object — extract-parsnip","text":"x parsnip model_fit object parsnip model_spec object. ... currently used. parameter single string parameter ID.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/extract-parsnip.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Extract elements of a parsnip model object — extract-parsnip","text":"extracted value parsnip object, x, described description section.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/extract-parsnip.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Extract elements of a parsnip model object — extract-parsnip","text":"Extracting underlying engine fit can helpful describing model (via print(), summary(), plot(), etc.) variable importance/explainers. However, users invoke predict() method extracted model. may preprocessing operations parsnip executed data prior giving model. Bypassing can lead errors silently generating incorrect predictions. Good: Bad:","code":"parsnip_fit %>% predict(new_data) parsnip_fit %>% extract_fit_engine() %>% predict(new_data)"},{"path":"https://parsnip.tidymodels.org/dev/reference/extract-parsnip.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Extract elements of a parsnip model object — extract-parsnip","text":"","code":"lm_spec <- linear_reg() %>% set_engine(\"lm\") lm_fit <- fit(lm_spec, mpg ~ ., data = mtcars) lm_spec #> Linear Regression Model Specification (regression) #> #> Computational engine: lm #> extract_spec_parsnip(lm_fit) #> Linear Regression Model Specification (regression) #> #> Computational engine: lm #> #> Model fit template: #> stats::lm(formula = missing_arg(), data = missing_arg(), weights = missing_arg()) extract_fit_engine(lm_fit) #> #> Call: #> stats::lm(formula = mpg ~ ., data = data) #> #> Coefficients: #> (Intercept) cyl disp hp drat #> 12.30337 -0.11144 0.01334 -0.02148 0.78711 #> wt qsec vs am gear #> -3.71530 0.82104 0.31776 2.52023 0.65541 #> carb #> -0.19942 #> lm(mpg ~ ., data = mtcars) #> #> Call: #> lm(formula = mpg ~ ., data = mtcars) #> #> Coefficients: #> (Intercept) cyl disp hp drat #> 12.30337 -0.11144 0.01334 -0.02148 0.78711 #> wt qsec vs am gear #> -3.71530 0.82104 0.31776 2.52023 0.65541 #> carb #> -0.19942 #>"},{"path":"https://parsnip.tidymodels.org/dev/reference/fit.html","id":null,"dir":"Reference","previous_headings":"","what":"Fit a Model Specification to a Dataset — fit.model_spec","title":"Fit a Model Specification to a Dataset — fit.model_spec","text":"fit() fit_xy() take model specification, translate required code substituting arguments, execute model fit routine.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/fit.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Fit a Model Specification to a Dataset — fit.model_spec","text":"","code":"# S3 method for model_spec fit( object, formula, data, case_weights = NULL, control = control_parsnip(), ... ) # S3 method for model_spec fit_xy(object, x, y, case_weights = NULL, control = control_parsnip(), ...)"},{"path":"https://parsnip.tidymodels.org/dev/reference/fit.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Fit a Model Specification to a Dataset — fit.model_spec","text":"object object class model_spec chosen engine (via set_engine()). formula object class formula (one can coerced class): symbolic description model fitted. data Optional, depending interface (see Details ). data frame containing relevant variables (e.g. outcome(s), predictors, case weights, etc). Note: needed, named argument used. case_weights optional classed vector numeric case weights. must return TRUE hardhat::is_case_weights() run . See hardhat::frequency_weights() hardhat::importance_weights() examples. control named list elements verbosity catch. See control_parsnip(). ... currently used; values passed ignored. options required fit model passed using set_engine(). x matrix, sparse matrix, data frame predictors. models support sparse matrix input. See parsnip::get_encoding() details. x column names. y vector, matrix data frame outcome data.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/fit.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Fit a Model Specification to a Dataset — fit.model_spec","text":"model_fit object contains several elements: lvl: outcome factor, contains factor levels time model fitting. spec: model specification object (object call fit) fit: model executed without error, model object. Otherwise, try-error object error message. preproc: objects needed convert formula non-formula interface (terms object) return value also class related fitted model (e.g. \"_glm\") base class \"model_fit\".","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/fit.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Fit a Model Specification to a Dataset — fit.model_spec","text":"fit() fit_xy() substitute current arguments model specification computational engine's code, check validity, fit model using data engine-specific code. Different model functions different interfaces (e.g. formula x/y) functions translate interface used fit() fit_xy() invoked one required underlying model. possible, functions attempt avoid making copies data. example, underlying model uses formula fit() invoked, original data references model fit. However, underlying model uses something else, x/y, formula evaluated data converted required format. case, calls resulting model objects reference temporary objects used fit model. model engine set, model's default engine used (discussed model page). verbosity option control_parsnip() greater zero, warning produced. like use alternative method generating contrasts supplying formula fit(), set global option contrasts preferred method. example, might set : options(contrasts = c(unordered = \"contr.helmert\", ordered = \"contr.poly\")). See help page stats::contr.treatment() possible contrast types. models \"censored regression\" modes, additional computation executed saved parsnip object. censor_probs element contains \"reverse Kaplan-Meier\" curve models probability censoring. may used later compute inverse probability censoring weights performance measures.","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/fit.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Fit a Model Specification to a Dataset — fit.model_spec","text":"","code":"# Although `glm()` only has a formula interface, different # methods for specifying the model can be used library(dplyr) library(modeldata) data(\"lending_club\") lr_mod <- logistic_reg() using_formula <- lr_mod %>% set_engine(\"glm\") %>% fit(Class ~ funded_amnt + int_rate, data = lending_club) using_xy <- lr_mod %>% set_engine(\"glm\") %>% fit_xy(x = lending_club[, c(\"funded_amnt\", \"int_rate\")], y = lending_club$Class) using_formula #> parsnip model object #> #> #> Call: stats::glm(formula = Class ~ funded_amnt + int_rate, family = stats::binomial, #> data = data) #> #> Coefficients: #> (Intercept) funded_amnt int_rate #> 5.131e+00 2.767e-06 -1.586e-01 #> #> Degrees of Freedom: 9856 Total (i.e. Null); 9854 Residual #> Null Deviance:\t 4055 #> Residual Deviance: 3698 \tAIC: 3704 using_xy #> parsnip model object #> #> #> Call: stats::glm(formula = ..y ~ ., family = stats::binomial, data = data) #> #> Coefficients: #> (Intercept) funded_amnt int_rate #> 5.131e+00 2.767e-06 -1.586e-01 #> #> Degrees of Freedom: 9856 Total (i.e. Null); 9854 Residual #> Null Deviance:\t 4055 #> Residual Deviance: 3698 \tAIC: 3704"},{"path":"https://parsnip.tidymodels.org/dev/reference/fit_control.html","id":null,"dir":"Reference","previous_headings":"","what":"Control the fit function — fit_control","title":"Control the fit function — fit_control","text":"Pass options fit.model_spec() function control output computations","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/fit_control.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Control the fit function — fit_control","text":"","code":"fit_control(verbosity = 1L, catch = FALSE)"},{"path":"https://parsnip.tidymodels.org/dev/reference/fit_control.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Control the fit function — fit_control","text":"verbosity integer control verbose output . value zero, messages output shown packages loaded model fit. value 1, package loading quiet model fits can produce output screen (depending contain verbose-type argument). value 2 , output displayed execution time fit recorded printed. catch logical value TRUE evaluate model inside try(, silent = TRUE). model fails, object still returned (without error) inherits class \"try-error\".","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/fit_control.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Control the fit function — fit_control","text":"S3 object class \"control_parsnip\" named list results function call","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/fit_control.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Control the fit function — fit_control","text":"fit_control() deprecated favor control_parsnip().","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/fit_control.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Control the fit function — fit_control","text":"","code":"fit_control(verbosity = 2L) #> Warning: `fit_control()` was deprecated in parsnip 0.1.8. #> ℹ Please use `control_parsnip()` instead. #> parsnip control object #> - verbose level 2"},{"path":"https://parsnip.tidymodels.org/dev/reference/format-internals.html","id":null,"dir":"Reference","previous_headings":"","what":"Internal functions that format predictions — format-internals","title":"Internal functions that format predictions — format-internals","text":"used ensure appropriate column names inside tibbles.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/format-internals.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Internal functions that format predictions — format-internals","text":"","code":"format_num(x) format_class(x) format_classprobs(x) format_time(x) format_survival(x) format_linear_pred(x) format_hazard(x)"},{"path":"https://parsnip.tidymodels.org/dev/reference/format-internals.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Internal functions that format predictions — format-internals","text":"x data frame vector (depending context function).","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/format-internals.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Internal functions that format predictions — format-internals","text":"tibble","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/gen_additive_mod.html","id":null,"dir":"Reference","previous_headings":"","what":"Generalized additive models (GAMs) — gen_additive_mod","title":"Generalized additive models (GAMs) — gen_additive_mod","text":"gen_additive_mod() defines model can use smoothed functions numeric predictors generalized linear model. function can fit classification regression models. different ways fit model, method estimation chosen setting model engine. engine-specific pages model listed . mgcv¹ information parsnip used modeling https://www.tidymodels.org/.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/gen_additive_mod.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Generalized additive models (GAMs) — gen_additive_mod","text":"","code":"gen_additive_mod( mode = \"unknown\", select_features = NULL, adjust_deg_free = NULL, engine = \"mgcv\" )"},{"path":"https://parsnip.tidymodels.org/dev/reference/gen_additive_mod.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Generalized additive models (GAMs) — gen_additive_mod","text":"mode single character string prediction outcome mode. Possible values model \"unknown\", \"regression\", \"classification\". select_features TRUE FALSE. TRUE, model ability eliminate predictor (via penalization). Increasing adjust_deg_free increase likelihood removing predictors. adjust_deg_free select_features = TRUE, acts multiplier smoothness. Increase beyond 1 produce smoother models. engine single character string specifying computational engine use fitting.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/gen_additive_mod.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Generalized additive models (GAMs) — gen_additive_mod","text":"function defines type model fit. engine specified, method fit model also defined. See set_engine() setting engine, including set engine arguments. model trained fit fit() function used data. arguments function mode engine captured quosures. pass values programmatically, use injection operator like :","code":"value <- 1 gen_additive_mod(argument = !!value)"},{"path":"https://parsnip.tidymodels.org/dev/reference/gen_additive_mod.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Generalized additive models (GAMs) — gen_additive_mod","text":"https://www.tidymodels.org, Tidy Modeling R, searchable table parsnip models","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/gen_additive_mod.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Generalized additive models (GAMs) — gen_additive_mod","text":"","code":"show_engines(\"gen_additive_mod\") #> # A tibble: 2 × 2 #> engine mode #> #> 1 mgcv regression #> 2 mgcv classification gen_additive_mod() #> GAM Model Specification (unknown mode) #> #> Computational engine: mgcv #>"},{"path":"https://parsnip.tidymodels.org/dev/reference/get_model_env.html","id":null,"dir":"Reference","previous_headings":"","what":"Working with the parsnip model environment — get_model_env","title":"Working with the parsnip model environment — get_model_env","text":"functions read write environment package stores information model specifications.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/get_model_env.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Working with the parsnip model environment — get_model_env","text":"","code":"get_model_env() get_from_env(items) set_in_env(...) set_env_val(name, value)"},{"path":"https://parsnip.tidymodels.org/dev/reference/get_model_env.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Working with the parsnip model environment — get_model_env","text":"items character string objects model environment. ... Named values assigned model environment. name single character value new symbol model environment. value single value new value model environment.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/get_model_env.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Working with the parsnip model environment — get_model_env","text":"\"build parsnip model\" https://www.tidymodels.org/learn/develop/models/","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/get_model_env.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Working with the parsnip model environment — get_model_env","text":"","code":"# Access the model data: current_code <- get_model_env() ls(envir = current_code) #> [1] \"C5_rules\" \"C5_rules_args\" #> [3] \"C5_rules_fit\" \"C5_rules_modes\" #> [5] \"C5_rules_pkgs\" \"C5_rules_predict\" #> [7] \"auto_ml\" \"auto_ml_args\" #> [9] \"auto_ml_fit\" \"auto_ml_modes\" #> [11] \"auto_ml_pkgs\" \"auto_ml_predict\" #> [13] \"bag_mars\" \"bag_mars_args\" #> [15] \"bag_mars_fit\" \"bag_mars_modes\" #> [17] \"bag_mars_pkgs\" \"bag_mars_predict\" #> [19] \"bag_mlp\" \"bag_mlp_args\" #> [21] \"bag_mlp_fit\" \"bag_mlp_modes\" #> [23] \"bag_mlp_pkgs\" \"bag_mlp_predict\" #> [25] \"bag_tree\" \"bag_tree_args\" #> [27] \"bag_tree_fit\" \"bag_tree_modes\" #> [29] \"bag_tree_pkgs\" \"bag_tree_predict\" #> [31] \"bart\" \"bart_args\" #> [33] \"bart_encoding\" \"bart_fit\" #> [35] \"bart_modes\" \"bart_pkgs\" #> [37] \"bart_predict\" \"boost_tree\" #> [39] \"boost_tree_args\" \"boost_tree_encoding\" #> [41] \"boost_tree_fit\" \"boost_tree_modes\" #> [43] \"boost_tree_pkgs\" \"boost_tree_predict\" #> [45] \"cubist_rules\" \"cubist_rules_args\" #> [47] \"cubist_rules_fit\" \"cubist_rules_modes\" #> [49] \"cubist_rules_pkgs\" \"cubist_rules_predict\" #> [51] \"decision_tree\" \"decision_tree_args\" #> [53] \"decision_tree_encoding\" \"decision_tree_fit\" #> [55] \"decision_tree_modes\" \"decision_tree_pkgs\" #> [57] \"decision_tree_predict\" \"discrim_flexible\" #> [59] \"discrim_flexible_args\" \"discrim_flexible_fit\" #> [61] \"discrim_flexible_modes\" \"discrim_flexible_pkgs\" #> [63] \"discrim_flexible_predict\" \"discrim_linear\" #> [65] \"discrim_linear_args\" \"discrim_linear_fit\" #> [67] \"discrim_linear_modes\" \"discrim_linear_pkgs\" #> [69] \"discrim_linear_predict\" \"discrim_quad\" #> [71] \"discrim_quad_args\" \"discrim_quad_fit\" #> [73] \"discrim_quad_modes\" \"discrim_quad_pkgs\" #> [75] \"discrim_quad_predict\" \"discrim_regularized\" #> [77] \"discrim_regularized_args\" \"discrim_regularized_fit\" #> [79] \"discrim_regularized_modes\" \"discrim_regularized_pkgs\" #> [81] \"discrim_regularized_predict\" \"gen_additive_mod\" #> [83] \"gen_additive_mod_args\" \"gen_additive_mod_encoding\" #> [85] \"gen_additive_mod_fit\" \"gen_additive_mod_modes\" #> [87] \"gen_additive_mod_pkgs\" \"gen_additive_mod_predict\" #> [89] \"linear_reg\" \"linear_reg_args\" #> [91] \"linear_reg_encoding\" \"linear_reg_fit\" #> [93] \"linear_reg_modes\" \"linear_reg_pkgs\" #> [95] \"linear_reg_predict\" \"logistic_reg\" #> [97] \"logistic_reg_args\" \"logistic_reg_encoding\" #> [99] \"logistic_reg_fit\" \"logistic_reg_modes\" #> [101] \"logistic_reg_pkgs\" \"logistic_reg_predict\" #> [103] \"mars\" \"mars_args\" #> [105] \"mars_encoding\" \"mars_fit\" #> [107] \"mars_modes\" \"mars_pkgs\" #> [109] \"mars_predict\" \"mlp\" #> [111] \"mlp_args\" \"mlp_encoding\" #> [113] \"mlp_fit\" \"mlp_modes\" #> [115] \"mlp_pkgs\" \"mlp_predict\" #> [117] \"models\" \"modes\" #> [119] \"multinom_reg\" \"multinom_reg_args\" #> [121] \"multinom_reg_encoding\" \"multinom_reg_fit\" #> [123] \"multinom_reg_modes\" \"multinom_reg_pkgs\" #> [125] \"multinom_reg_predict\" \"naive_Bayes\" #> [127] \"naive_Bayes_args\" \"naive_Bayes_fit\" #> [129] \"naive_Bayes_modes\" \"naive_Bayes_pkgs\" #> [131] \"naive_Bayes_predict\" \"nearest_neighbor\" #> [133] \"nearest_neighbor_args\" \"nearest_neighbor_encoding\" #> [135] \"nearest_neighbor_fit\" \"nearest_neighbor_modes\" #> [137] \"nearest_neighbor_pkgs\" \"nearest_neighbor_predict\" #> [139] \"null_model\" \"null_model_args\" #> [141] \"null_model_encoding\" \"null_model_fit\" #> [143] \"null_model_modes\" \"null_model_pkgs\" #> [145] \"null_model_predict\" \"pls\" #> [147] \"pls_args\" \"pls_fit\" #> [149] \"pls_modes\" \"pls_pkgs\" #> [151] \"pls_predict\" \"poisson_reg\" #> [153] \"poisson_reg_args\" \"poisson_reg_fit\" #> [155] \"poisson_reg_modes\" \"poisson_reg_pkgs\" #> [157] \"poisson_reg_predict\" \"proportional_hazards\" #> [159] \"proportional_hazards_args\" \"proportional_hazards_fit\" #> [161] \"proportional_hazards_modes\" \"proportional_hazards_pkgs\" #> [163] \"proportional_hazards_predict\" \"rand_forest\" #> [165] \"rand_forest_args\" \"rand_forest_encoding\" #> [167] \"rand_forest_fit\" \"rand_forest_modes\" #> [169] \"rand_forest_pkgs\" \"rand_forest_predict\" #> [171] \"rule_fit\" \"rule_fit_args\" #> [173] \"rule_fit_fit\" \"rule_fit_modes\" #> [175] \"rule_fit_pkgs\" \"rule_fit_predict\" #> [177] \"surv_reg\" \"surv_reg_args\" #> [179] \"surv_reg_encoding\" \"surv_reg_fit\" #> [181] \"surv_reg_modes\" \"surv_reg_pkgs\" #> [183] \"surv_reg_predict\" \"survival_reg\" #> [185] \"survival_reg_args\" \"survival_reg_fit\" #> [187] \"survival_reg_modes\" \"survival_reg_pkgs\" #> [189] \"survival_reg_predict\" \"svm_linear\" #> [191] \"svm_linear_args\" \"svm_linear_encoding\" #> [193] \"svm_linear_fit\" \"svm_linear_modes\" #> [195] \"svm_linear_pkgs\" \"svm_linear_predict\" #> [197] \"svm_poly\" \"svm_poly_args\" #> [199] \"svm_poly_encoding\" \"svm_poly_fit\" #> [201] \"svm_poly_modes\" \"svm_poly_pkgs\" #> [203] \"svm_poly_predict\" \"svm_rbf\" #> [205] \"svm_rbf_args\" \"svm_rbf_encoding\" #> [207] \"svm_rbf_fit\" \"svm_rbf_modes\" #> [209] \"svm_rbf_pkgs\" \"svm_rbf_predict\""},{"path":"https://parsnip.tidymodels.org/dev/reference/glance.model_fit.html","id":null,"dir":"Reference","previous_headings":"","what":"Construct a single row summary ","title":"Construct a single row summary ","text":"method glances model parsnip model object, exists.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/glance.model_fit.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Construct a single row summary ","text":"","code":"# S3 method for model_fit glance(x, ...)"},{"path":"https://parsnip.tidymodels.org/dev/reference/glance.model_fit.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Construct a single row summary ","text":"x model R object convert single-row data frame ... arguments passed methods","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/glance.model_fit.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Construct a single row summary ","text":"tibble","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/glm_grouped.html","id":null,"dir":"Reference","previous_headings":"","what":"Fit a grouped binomial outcome from a data set with case weights — glm_grouped","title":"Fit a grouped binomial outcome from a data set with case weights — glm_grouped","text":"stats::glm() assumes tabular data set case weights corresponds \"different observations different dispersions\" (see ?glm). cases, case weights reflect covariate pattern observed multiple times (.e., frequency weights). case, stats::glm() expects data formatted number events factor level outcome can given formula cbind(events_1, events_2). glm_grouped() converts data integer case weights expected \"number events\" format binomial data.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/glm_grouped.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Fit a grouped binomial outcome from a data set with case weights — glm_grouped","text":"","code":"glm_grouped(formula, data, weights, ...)"},{"path":"https://parsnip.tidymodels.org/dev/reference/glm_grouped.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Fit a grouped binomial outcome from a data set with case weights — glm_grouped","text":"formula formula object one outcome two-level factors. data data frame outcomes predictors (case weights). weights integer vector weights whose length number rows data. non-integer numeric, converted integer (warning). ... Options pass stats::glm(). family set, automatically assigned basic binomial family.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/glm_grouped.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Fit a grouped binomial outcome from a data set with case weights — glm_grouped","text":"object produced stats::glm().","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/glm_grouped.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Fit a grouped binomial outcome from a data set with case weights — glm_grouped","text":"","code":"#---------------------------------------------------------------------------- # The same data set formatted three ways # First with basic case weights that, from ?glm, are used inappropriately. ucb_weighted <- as.data.frame(UCBAdmissions) ucb_weighted$Freq <- as.integer(ucb_weighted$Freq) head(ucb_weighted) #> Admit Gender Dept Freq #> 1 Admitted Male A 512 #> 2 Rejected Male A 313 #> 3 Admitted Female A 89 #> 4 Rejected Female A 19 #> 5 Admitted Male B 353 #> 6 Rejected Male B 207 nrow(ucb_weighted) #> [1] 24 # Format when yes/no data are in individual rows (probably still inappropriate) library(tidyr) ucb_long <- uncount(ucb_weighted, Freq) head(ucb_long) #> Admit Gender Dept #> 1 Admitted Male A #> 2 Admitted Male A #> 3 Admitted Male A #> 4 Admitted Male A #> 5 Admitted Male A #> 6 Admitted Male A nrow(ucb_long) #> [1] 4526 # Format where the outcome is formatted as number of events ucb_events <- ucb_weighted %>% tidyr::pivot_wider( id_cols = c(Gender, Dept), names_from = Admit, values_from = Freq, values_fill = 0L ) head(ucb_events) #> # A tibble: 6 × 4 #> Gender Dept Admitted Rejected #> #> 1 Male A 512 313 #> 2 Female A 89 19 #> 3 Male B 353 207 #> 4 Female B 17 8 #> 5 Male C 120 205 #> 6 Female C 202 391 nrow(ucb_events) #> [1] 12 #---------------------------------------------------------------------------- # Different model fits # Treat data as separate Bernoulli data: glm(Admit ~ Gender + Dept, data = ucb_long, family = binomial) #> #> Call: glm(formula = Admit ~ Gender + Dept, family = binomial, data = ucb_long) #> #> Coefficients: #> (Intercept) GenderFemale DeptB DeptC DeptD #> -0.58205 -0.09987 0.04340 1.26260 1.29461 #> DeptE DeptF #> 1.73931 3.30648 #> #> Degrees of Freedom: 4525 Total (i.e. Null); 4519 Residual #> Null Deviance:\t 6044 #> Residual Deviance: 5187 \tAIC: 5201 # Weights produce the same statistics glm( Admit ~ Gender + Dept, data = ucb_weighted, family = binomial, weights = ucb_weighted$Freq ) #> #> Call: glm(formula = Admit ~ Gender + Dept, family = binomial, data = ucb_weighted, #> weights = ucb_weighted$Freq) #> #> Coefficients: #> (Intercept) GenderFemale DeptB DeptC DeptD #> -0.58205 -0.09987 0.04340 1.26260 1.29461 #> DeptE DeptF #> 1.73931 3.30648 #> #> Degrees of Freedom: 23 Total (i.e. Null); 17 Residual #> Null Deviance:\t 6044 #> Residual Deviance: 5187 \tAIC: 5201 # Data as binomial \"x events out of n trials\" format. Note that, to get the same # coefficients, the order of the levels must be reversed. glm( cbind(Rejected, Admitted) ~ Gender + Dept, data = ucb_events, family = binomial ) #> #> Call: glm(formula = cbind(Rejected, Admitted) ~ Gender + Dept, family = binomial, #> data = ucb_events) #> #> Coefficients: #> (Intercept) GenderFemale DeptB DeptC DeptD #> -0.58205 -0.09987 0.04340 1.26260 1.29461 #> DeptE DeptF #> 1.73931 3.30648 #> #> Degrees of Freedom: 11 Total (i.e. Null); 5 Residual #> Null Deviance:\t 877.1 #> Residual Deviance: 20.2 \tAIC: 103.1 # The new function that starts with frequency weights and gets the correct place: glm_grouped(Admit ~ Gender + Dept, data = ucb_weighted, weights = ucb_weighted$Freq) #> #> Call: glm(formula = formula, family = \"binomial\", data = data) #> #> Coefficients: #> (Intercept) GenderFemale DeptB DeptC DeptD #> -0.58205 -0.09987 0.04340 1.26260 1.29461 #> DeptE DeptF #> 1.73931 3.30648 #> #> Degrees of Freedom: 11 Total (i.e. Null); 5 Residual #> Null Deviance:\t 877.1 #> Residual Deviance: 20.2 \tAIC: 103.1"},{"path":"https://parsnip.tidymodels.org/dev/reference/glmnet-details.html","id":null,"dir":"Reference","previous_headings":"","what":"Technical aspects of the glmnet model — glmnet-details","title":"Technical aspects of the glmnet model — glmnet-details","text":"glmnet popular statistical model regularized generalized linear models. notes reflect common questions particular model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/glmnet-details.html","id":"tidymodels-and-glmnet","dir":"Reference","previous_headings":"","what":"tidymodels and glmnet","title":"Technical aspects of the glmnet model — glmnet-details","text":"implementation glmnet package nice features. example, one main tuning parameters, regularization penalty, need specified fitting model. package fits compendium values, called regularization path. values depend data set value alpha, mixture parameter pure ridge model (alpha = 0) pure lasso model (alpha = 1). predicting, penalty values can simultaneously predicted, even exactly regularization path. , model approximates closest path values produce prediction. argument called lambda glmnet() function used specify path. discussion , linear_reg() used. information true parsnip models \"glmnet\" engine.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/glmnet-details.html","id":"fitting-and-predicting-using-parsnip","dir":"Reference","previous_headings":"","what":"Fitting and predicting using parsnip","title":"Technical aspects of the glmnet model — glmnet-details","text":"Recall tidymodels uses standardized parameter names across models chosen low jargon. argument penalty equivalent glmnet calls lambda value mixture alpha value. tidymodels, predict() methods defined make one prediction time. model, means predictions single penalty value. reason, models glmnet engines require user always specify single penalty value model defined. example, linear regression: predict() method called, automatically uses penalty given model defined. example: However, penalty values can predicted simultaneously using multi_predict() method:","code":"linear_reg(penalty = 1) %>% set_engine(\"glmnet\") library(tidymodels) fit <- linear_reg(penalty = 1) %>% set_engine(\"glmnet\") %>% fit(mpg ~ ., data = mtcars) # predict at penalty = 1 predict(fit, mtcars[1:3,]) ## # A tibble: 3 × 1 ## .pred ## ## 1 22.2 ## 2 21.5 ## 3 24.9 # predict at c(0.00, 0.01) multi_predict(fit, mtcars[1:3,], penalty = c(0.00, 0.01)) ## # A tibble: 3 × 1 ## .pred ## ## 1 ## 2 ## 3 # unnested: multi_predict(fit, mtcars[1:3,], penalty = c(0.00, 0.01)) %>% add_rowindex() %>% unnest(cols = \".pred\") ## # A tibble: 6 × 3 ## penalty .pred .row ## ## 1 0 22.6 1 ## 2 0.01 22.5 1 ## 3 0 22.1 2 ## 4 0.01 22.1 2 ## 5 0 26.3 3 ## 6 0.01 26.3 3"},{"path":"https://parsnip.tidymodels.org/dev/reference/glmnet-details.html","id":"where-did-lambda-go-","dir":"Reference","previous_headings":"","what":"Where did lambda go?","title":"Technical aspects of the glmnet model — glmnet-details","text":"may appear odd lambda value get used fit: Internally, value penalty = 1 saved parsnip object value set lambda. enables full path fit glmnet(). See section setting path.","code":"linear_reg(penalty = 1) %>% set_engine(\"glmnet\") %>% translate() ## Linear Regression Model Specification (regression) ## ## Main Arguments: ## penalty = 1 ## ## Computational engine: glmnet ## ## Model fit template: ## glmnet::glmnet(x = missing_arg(), y = missing_arg(), weights = missing_arg(), ## family = \"gaussian\")"},{"path":"https://parsnip.tidymodels.org/dev/reference/glmnet-details.html","id":"how-do-i-set-the-regularization-path-","dir":"Reference","previous_headings":"","what":"How do I set the regularization path?","title":"Technical aspects of the glmnet model — glmnet-details","text":"Regardless value use penalty, full coefficient path used glmnet::glmnet() called. want manually set path? Normally, pass vector lambda glmnet::glmnet(). parsnip models use glmnet engine can use special optional argument called path_values. argument glmnet::glmnet(); used parsnip independently set path. example, found want fully ridge regression model (.e., mixture = 0), can get wrong coefficients path contain zero (see issue #431). want use path, argument passed engine-specific option:","code":"coef_path_values <- c(0, 10^seq(-5, 1, length.out = 7)) fit_ridge <- linear_reg(penalty = 1, mixture = 0) %>% set_engine(\"glmnet\", path_values = coef_path_values) %>% fit(mpg ~ ., data = mtcars) all.equal(sort(fit_ridge$fit$lambda), coef_path_values) ## [1] TRUE # predict at penalty = 1 predict(fit_ridge, mtcars[1:3,]) ## # A tibble: 3 × 1 ## .pred ## ## 1 22.1 ## 2 21.8 ## 3 26.6"},{"path":"https://parsnip.tidymodels.org/dev/reference/glmnet-details.html","id":"tidying-the-model-object","dir":"Reference","previous_headings":"","what":"Tidying the model object","title":"Technical aspects of the glmnet model — glmnet-details","text":"broom::tidy() function gives summary object tibble. tl;dr tidy() glmnet model produced parsnip gives coefficients value given penalty. parsnip makes model, gives extra class. Use tidy() method object, produces coefficients penalty originally requested: Note tidy() method glmnet objects broom package. used directly underlying glmnet object, returns coefficients path: can nice plots might contain penalty value interested .","code":"tidy(fit) ## # A tibble: 11 × 3 ## term estimate penalty ## ## 1 (Intercept) 35.3 1 ## 2 cyl -0.872 1 ## 3 disp 0 1 ## 4 hp -0.0101 1 ## 5 drat 0 1 ## 6 wt -2.59 1 ## # ℹ 5 more rows # Use the basic tidy() method for glmnet all_tidy_coefs <- broom:::tidy.glmnet(fit$fit) all_tidy_coefs ## # A tibble: 640 × 5 ## term step estimate lambda dev.ratio ## ## 1 (Intercept) 1 20.1 5.15 0 ## 2 (Intercept) 2 21.6 4.69 0.129 ## 3 (Intercept) 3 23.2 4.27 0.248 ## 4 (Intercept) 4 24.7 3.89 0.347 ## 5 (Intercept) 5 26.0 3.55 0.429 ## 6 (Intercept) 6 27.2 3.23 0.497 ## # ℹ 634 more rows length(unique(all_tidy_coefs$lambda)) ## [1] 79"},{"path":"https://parsnip.tidymodels.org/dev/reference/glmnet_helpers.html","id":null,"dir":"Reference","previous_headings":"","what":"Helper functions for checking the penalty of glmnet models — .check_glmnet_penalty_fit","title":"Helper functions for checking the penalty of glmnet models — .check_glmnet_penalty_fit","text":"functions developer use. .check_glmnet_penalty_fit() checks model specification fitting glmnet model contains single value. .check_glmnet_penalty_predict() checks penalty value used prediction valid. called predict(), needs single value. Multiple values allowed multi_predict().","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/glmnet_helpers.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Helper functions for checking the penalty of glmnet models — .check_glmnet_penalty_fit","text":"","code":".check_glmnet_penalty_fit(x) .check_glmnet_penalty_predict(penalty = NULL, object, multi = FALSE)"},{"path":"https://parsnip.tidymodels.org/dev/reference/glmnet_helpers.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Helper functions for checking the penalty of glmnet models — .check_glmnet_penalty_fit","text":"x object class model_spec. penalty penalty value check. object object class model_fit. multi logical indicating multiple values allowed.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/glmnet_helpers_prediction.html","id":null,"dir":"Reference","previous_headings":"","what":"Organize glmnet predictions — .organize_glmnet_pred","title":"Organize glmnet predictions — .organize_glmnet_pred","text":"function developer use organizes predictions glmnet models.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/glmnet_helpers_prediction.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Organize glmnet predictions — .organize_glmnet_pred","text":"","code":".organize_glmnet_pred(x, object)"},{"path":"https://parsnip.tidymodels.org/dev/reference/glmnet_helpers_prediction.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Organize glmnet predictions — .organize_glmnet_pred","text":"x Predictions returned predict() method glmnet models. object object class model_fit.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/has_multi_predict.html","id":null,"dir":"Reference","previous_headings":"","what":"Tools for models that predict on sub-models — has_multi_predict","title":"Tools for models that predict on sub-models — has_multi_predict","text":"has_multi_predict() tests see object can make multiple predictions submodels object. multi_predict_args() returns names arguments multi_predict() model ().","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/has_multi_predict.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Tools for models that predict on sub-models — has_multi_predict","text":"","code":"has_multi_predict(object, ...) # S3 method for default has_multi_predict(object, ...) # S3 method for model_fit has_multi_predict(object, ...) # S3 method for workflow has_multi_predict(object, ...) multi_predict_args(object, ...) # S3 method for default multi_predict_args(object, ...) # S3 method for model_fit multi_predict_args(object, ...) # S3 method for workflow multi_predict_args(object, ...)"},{"path":"https://parsnip.tidymodels.org/dev/reference/has_multi_predict.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Tools for models that predict on sub-models — has_multi_predict","text":"object object test. ... currently used.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/has_multi_predict.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Tools for models that predict on sub-models — has_multi_predict","text":"has_multi_predict() returns single logical value multi_predict_args() returns character vector argument names (NA none exist).","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/has_multi_predict.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Tools for models that predict on sub-models — has_multi_predict","text":"","code":"lm_model_idea <- linear_reg() %>% set_engine(\"lm\") has_multi_predict(lm_model_idea) #> [1] FALSE lm_model_fit <- fit(lm_model_idea, mpg ~ ., data = mtcars) has_multi_predict(lm_model_fit) #> [1] FALSE multi_predict_args(lm_model_fit) #> [1] NA library(kknn) knn_fit <- nearest_neighbor(mode = \"regression\", neighbors = 5) %>% set_engine(\"kknn\") %>% fit(mpg ~ ., mtcars) multi_predict_args(knn_fit) #> [1] \"neighbors\" multi_predict(knn_fit, mtcars[1, -1], neighbors = 1:4)$.pred #> [[1]] #> # A tibble: 4 × 2 #> neighbors .pred #> #> 1 1 21 #> 2 2 21 #> 3 3 20.9 #> 4 4 21.0 #>"},{"path":"https://parsnip.tidymodels.org/dev/reference/keras_mlp.html","id":null,"dir":"Reference","previous_headings":"","what":"Simple interface to MLP models via keras — keras_mlp","title":"Simple interface to MLP models via keras — keras_mlp","text":"Instead building keras model sequentially, keras_mlp can used create feedforward network single hidden layer. Regularization via either weight decay dropout.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/keras_mlp.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Simple interface to MLP models via keras — keras_mlp","text":"","code":"keras_mlp( x, y, hidden_units = 5, penalty = 0, dropout = 0, epochs = 20, activation = \"softmax\", seeds = sample.int(10^5, size = 3), ... )"},{"path":"https://parsnip.tidymodels.org/dev/reference/keras_mlp.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Simple interface to MLP models via keras — keras_mlp","text":"x data frame matrix predictors y vector (factor numeric) matrix (numeric) outcome data. hidden_units integer number hidden units. penalty non-negative real number amount weight decay. Either parameter dropout can specified. dropout proportion parameters set zero. Either parameter penalty can specified. epochs integer number passes data. activation character string type activation function layers. seeds vector three positive integers control randomness calculations. ... Additional named arguments pass keras::compile() keras::fit(). Arguments sorted passed either function internally.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/keras_mlp.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Simple interface to MLP models via keras — keras_mlp","text":"keras model object.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/keras_predict_classes.html","id":null,"dir":"Reference","previous_headings":"","what":"Wrapper for keras class predictions — keras_predict_classes","title":"Wrapper for keras class predictions — keras_predict_classes","text":"Wrapper keras class predictions","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/keras_predict_classes.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Wrapper for keras class predictions — keras_predict_classes","text":"","code":"keras_predict_classes(object, x)"},{"path":"https://parsnip.tidymodels.org/dev/reference/keras_predict_classes.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Wrapper for keras class predictions — keras_predict_classes","text":"object keras model fit x data set.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/knit_engine_docs.html","id":null,"dir":"Reference","previous_headings":"","what":"Knit engine-specific documentation — knit_engine_docs","title":"Knit engine-specific documentation — knit_engine_docs","text":"Knit engine-specific documentation","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/knit_engine_docs.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Knit engine-specific documentation — knit_engine_docs","text":"","code":"knit_engine_docs(pattern = NULL)"},{"path":"https://parsnip.tidymodels.org/dev/reference/knit_engine_docs.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Knit engine-specific documentation — knit_engine_docs","text":"pattern regular expression specify files knit. default knits engine documentation files.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/knit_engine_docs.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Knit engine-specific documentation — knit_engine_docs","text":"tibble column file file name result (character vector echos output file name , failure, error message).","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/linear_reg.html","id":null,"dir":"Reference","previous_headings":"","what":"Linear regression — linear_reg","title":"Linear regression — linear_reg","text":"linear_reg() defines model can predict numeric values predictors using linear function. function can fit regression models. different ways fit model, method estimation chosen setting model engine. engine-specific pages model listed . lm¹ brulee gee² glm glmer² glmnet gls² h2o² keras lme² lmer² spark stan stan_glmer² information parsnip used modeling https://www.tidymodels.org/.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/linear_reg.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Linear regression — linear_reg","text":"","code":"linear_reg(mode = \"regression\", engine = \"lm\", penalty = NULL, mixture = NULL)"},{"path":"https://parsnip.tidymodels.org/dev/reference/linear_reg.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Linear regression — linear_reg","text":"mode single character string type model. possible value model \"regression\". engine single character string specifying computational engine use fitting. Possible engines listed . default model \"lm\". penalty non-negative number representing total amount regularization (specific engines ). mixture number zero one (inclusive) denoting proportion L1 regularization (.e. lasso) model. mixture = 1 specifies pure lasso model, mixture = 0 specifies ridge regression model, 0 < mixture < 1 specifies elastic net model, interpolating lasso ridge. Available specific engines .","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/linear_reg.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Linear regression — linear_reg","text":"function defines type model fit. engine specified, method fit model also defined. See set_engine() setting engine, including set engine arguments. model trained fit fit() function used data. arguments function mode engine captured quosures. pass values programmatically, use injection operator like :","code":"value <- 1 linear_reg(argument = !!value)"},{"path":"https://parsnip.tidymodels.org/dev/reference/linear_reg.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Linear regression — linear_reg","text":"https://www.tidymodels.org, Tidy Modeling R, searchable table parsnip models","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/linear_reg.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Linear regression — linear_reg","text":"","code":"show_engines(\"linear_reg\") #> # A tibble: 7 × 2 #> engine mode #> #> 1 lm regression #> 2 glm regression #> 3 glmnet regression #> 4 stan regression #> 5 spark regression #> 6 keras regression #> 7 brulee regression linear_reg() #> Linear Regression Model Specification (regression) #> #> Computational engine: lm #>"},{"path":"https://parsnip.tidymodels.org/dev/reference/list_md_problems.html","id":null,"dir":"Reference","previous_headings":"","what":"Locate and show errors/warnings in engine-specific documentation — list_md_problems","title":"Locate and show errors/warnings in engine-specific documentation — list_md_problems","text":"Locate show errors/warnings engine-specific documentation","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/list_md_problems.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Locate and show errors/warnings in engine-specific documentation — list_md_problems","text":"","code":"list_md_problems()"},{"path":"https://parsnip.tidymodels.org/dev/reference/list_md_problems.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Locate and show errors/warnings in engine-specific documentation — list_md_problems","text":"tibble column file file name, line indicating line error/warning occurred, problem showing error/warning message.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/logistic_reg.html","id":null,"dir":"Reference","previous_headings":"","what":"Logistic regression — logistic_reg","title":"Logistic regression — logistic_reg","text":"logistic_reg() defines generalized linear model binary outcomes. linear combination predictors used model log odds event. function can fit classification models. different ways fit model, method estimation chosen setting model engine. engine-specific pages model listed . glm¹ brulee gee² glmer² glmnet h2o² keras LiblineaR spark stan stan_glmer² information parsnip used modeling https://www.tidymodels.org/.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/logistic_reg.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Logistic regression — logistic_reg","text":"","code":"logistic_reg( mode = \"classification\", engine = \"glm\", penalty = NULL, mixture = NULL )"},{"path":"https://parsnip.tidymodels.org/dev/reference/logistic_reg.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Logistic regression — logistic_reg","text":"mode single character string type model. possible value model \"classification\". engine single character string specifying computational engine use fitting. Possible engines listed . default model \"glm\". penalty non-negative number representing total amount regularization (specific engines ). keras models, corresponds purely L2 regularization (aka weight decay) models can either combination L1 L2 (depending value mixture). mixture number zero one (inclusive) giving proportion L1 regularization (.e. lasso) model. mixture = 1 specifies pure lasso model, mixture = 0 specifies ridge regression model, 0 < mixture < 1 specifies elastic net model, interpolating lasso ridge. Available specific engines . LiblineaR models, mixture must exactly 1 0 .","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/logistic_reg.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Logistic regression — logistic_reg","text":"function defines type model fit. engine specified, method fit model also defined. See set_engine() setting engine, including set engine arguments. model trained fit fit() function used data. arguments function mode engine captured quosures. pass values programmatically, use injection operator like : model fits classification model binary outcomes; multiclass outcomes, see multinom_reg().","code":"value <- 1 logistic_reg(argument = !!value)"},{"path":"https://parsnip.tidymodels.org/dev/reference/logistic_reg.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Logistic regression — logistic_reg","text":"https://www.tidymodels.org, Tidy Modeling R, searchable table parsnip models","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/logistic_reg.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Logistic regression — logistic_reg","text":"","code":"show_engines(\"logistic_reg\") #> # A tibble: 7 × 2 #> engine mode #> #> 1 glm classification #> 2 glmnet classification #> 3 LiblineaR classification #> 4 spark classification #> 5 keras classification #> 6 stan classification #> 7 brulee classification logistic_reg() #> Logistic Regression Model Specification (classification) #> #> Computational engine: glm #>"},{"path":"https://parsnip.tidymodels.org/dev/reference/make_call.html","id":null,"dir":"Reference","previous_headings":"","what":"Make a parsnip call expression — make_call","title":"Make a parsnip call expression — make_call","text":"Make parsnip call expression","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/make_call.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Make a parsnip call expression — make_call","text":"","code":"make_call(fun, ns, args, ...)"},{"path":"https://parsnip.tidymodels.org/dev/reference/make_call.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Make a parsnip call expression — make_call","text":"fun character string function name. ns character string package name. args named list argument values.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/make_call.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Make a parsnip call expression — make_call","text":"call.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/make_call.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Make a parsnip call expression — make_call","text":"arguments spliced ns::fun() call. missing, null, single logical, spliced.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/make_classes.html","id":null,"dir":"Reference","previous_headings":"","what":"Prepend a new class — make_classes","title":"Prepend a new class — make_classes","text":"adds extra class base class \"model_spec\".","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/make_classes.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Prepend a new class — make_classes","text":"","code":"make_classes(prefix)"},{"path":"https://parsnip.tidymodels.org/dev/reference/make_classes.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Prepend a new class — make_classes","text":"prefix character string class.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/make_classes.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Prepend a new class — make_classes","text":"character vector.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/mars.html","id":null,"dir":"Reference","previous_headings":"","what":"Multivariate adaptive regression splines (MARS) — mars","title":"Multivariate adaptive regression splines (MARS) — mars","text":"mars() defines generalized linear model uses artificial features predictors. features resemble hinge functions result model segmented regression small dimensions. function can fit classification regression models. different ways fit model, method estimation chosen setting model engine. engine-specific pages model listed . earth¹ information parsnip used modeling https://www.tidymodels.org/.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/mars.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Multivariate adaptive regression splines (MARS) — mars","text":"","code":"mars( mode = \"unknown\", engine = \"earth\", num_terms = NULL, prod_degree = NULL, prune_method = NULL )"},{"path":"https://parsnip.tidymodels.org/dev/reference/mars.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Multivariate adaptive regression splines (MARS) — mars","text":"mode single character string prediction outcome mode. Possible values model \"unknown\", \"regression\", \"classification\". engine single character string specifying computational engine use fitting. num_terms number features retained final model, including intercept. prod_degree highest possible interaction degree. prune_method pruning method.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/mars.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Multivariate adaptive regression splines (MARS) — mars","text":"function defines type model fit. engine specified, method fit model also defined. See set_engine() setting engine, including set engine arguments. model trained fit fit() function used data. arguments function mode engine captured quosures. pass values programmatically, use injection operator like :","code":"value <- 1 mars(argument = !!value)"},{"path":"https://parsnip.tidymodels.org/dev/reference/mars.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Multivariate adaptive regression splines (MARS) — mars","text":"https://www.tidymodels.org, Tidy Modeling R, searchable table parsnip models","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/mars.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Multivariate adaptive regression splines (MARS) — mars","text":"","code":"show_engines(\"mars\") #> # A tibble: 2 × 2 #> engine mode #> #> 1 earth classification #> 2 earth regression mars(mode = \"regression\", num_terms = 5) #> MARS Model Specification (regression) #> #> Main Arguments: #> num_terms = 5 #> #> Computational engine: earth #>"},{"path":"https://parsnip.tidymodels.org/dev/reference/max_mtry_formula.html","id":null,"dir":"Reference","previous_headings":"","what":"Determine largest value of mtry from formula.\nThis function potentially caps the value of mtry based on a formula and\ndata set. This is a safe approach for survival and/or multivariate models. — max_mtry_formula","title":"Determine largest value of mtry from formula.\nThis function potentially caps the value of mtry based on a formula and\ndata set. This is a safe approach for survival and/or multivariate models. — max_mtry_formula","text":"Determine largest value mtry formula. function potentially caps value mtry based formula data set. safe approach survival /multivariate models.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/max_mtry_formula.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Determine largest value of mtry from formula.\nThis function potentially caps the value of mtry based on a formula and\ndata set. This is a safe approach for survival and/or multivariate models. — max_mtry_formula","text":"","code":"max_mtry_formula(mtry, formula, data)"},{"path":"https://parsnip.tidymodels.org/dev/reference/max_mtry_formula.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Determine largest value of mtry from formula.\nThis function potentially caps the value of mtry based on a formula and\ndata set. This is a safe approach for survival and/or multivariate models. — max_mtry_formula","text":"mtry initial value mtry (may large). formula model formula. data training set (data frame).","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/max_mtry_formula.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Determine largest value of mtry from formula.\nThis function potentially caps the value of mtry based on a formula and\ndata set. This is a safe approach for survival and/or multivariate models. — max_mtry_formula","text":"value mtry.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/max_mtry_formula.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Determine largest value of mtry from formula.\nThis function potentially caps the value of mtry based on a formula and\ndata set. This is a safe approach for survival and/or multivariate models. — max_mtry_formula","text":"","code":"# should be 9 max_mtry_formula(200, cbind(wt, mpg) ~ ., data = mtcars) #> [1] 9"},{"path":"https://parsnip.tidymodels.org/dev/reference/maybe_matrix.html","id":null,"dir":"Reference","previous_headings":"","what":"Fuzzy conversions — maybe_matrix","title":"Fuzzy conversions — maybe_matrix","text":"substitutes .matrix() .data.frame() leave sparse matrix -.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/maybe_matrix.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Fuzzy conversions — maybe_matrix","text":"","code":"maybe_matrix(x) maybe_data_frame(x)"},{"path":"https://parsnip.tidymodels.org/dev/reference/maybe_matrix.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Fuzzy conversions — maybe_matrix","text":"x data frame, matrix, sparse matrix.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/maybe_matrix.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Fuzzy conversions — maybe_matrix","text":"data frame, matrix, sparse matrix.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/min_cols.html","id":null,"dir":"Reference","previous_headings":"","what":"Execution-time data dimension checks — min_cols","title":"Execution-time data dimension checks — min_cols","text":"tuning parameters, range values depend data dimensions (e.g. mtry). packages fail parameter values outside ranges. Since model might receive resampled versions data, ranges set prior point model fit. functions check possible range data adjust needed (warning).","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/min_cols.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Execution-time data dimension checks — min_cols","text":"","code":"min_cols(num_cols, source) min_rows(num_rows, source, offset = 0)"},{"path":"https://parsnip.tidymodels.org/dev/reference/min_cols.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Execution-time data dimension checks — min_cols","text":"num_cols, num_rows parameter value requested user. source data frame data used fit. source named \"data\", assumed one column data corresponds outcome (subtracted ). offset number subtracted number rows available data.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/min_cols.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Execution-time data dimension checks — min_cols","text":"integer (perhaps warning).","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/min_cols.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Execution-time data dimension checks — min_cols","text":"","code":"nearest_neighbor(neighbors= 100) %>% set_engine(\"kknn\") %>% set_mode(\"regression\") %>% translate() #> K-Nearest Neighbor Model Specification (regression) #> #> Main Arguments: #> neighbors = 100 #> #> Computational engine: kknn #> #> Model fit template: #> kknn::train.kknn(formula = missing_arg(), data = missing_arg(), #> ks = min_rows(100, data, 5)) library(ranger) rand_forest(mtry = 2, min_n = 100, trees = 3) %>% set_engine(\"ranger\") %>% set_mode(\"regression\") %>% fit(mpg ~ ., data = mtcars) #> Warning: 100 samples were requested but there were 32 rows in the data. 32 will be used. #> parsnip model object #> #> Ranger result #> #> Call: #> ranger::ranger(x = maybe_data_frame(x), y = y, mtry = min_cols(~2, x), num.trees = ~3, min.node.size = min_rows(~100, x), num.threads = 1, verbose = FALSE, seed = sample.int(10^5, 1)) #> #> Type: Regression #> Number of trees: 3 #> Sample size: 32 #> Number of independent variables: 10 #> Mtry: 2 #> Target node size: 32 #> Variable importance mode: none #> Splitrule: variance #> OOB prediction error (MSE): 39.02897 #> R squared (OOB): -0.07446488"},{"path":"https://parsnip.tidymodels.org/dev/reference/mlp.html","id":null,"dir":"Reference","previous_headings":"","what":"Single layer neural network — mlp","title":"Single layer neural network — mlp","text":"mlp() defines multilayer perceptron model (.k.. single layer, feed-forward neural network). function can fit classification regression models. different ways fit model, method estimation chosen setting model engine. engine-specific pages model listed . nnet¹ brulee h2o² keras information parsnip used modeling https://www.tidymodels.org/.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/mlp.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Single layer neural network — mlp","text":"","code":"mlp( mode = \"unknown\", engine = \"nnet\", hidden_units = NULL, penalty = NULL, dropout = NULL, epochs = NULL, activation = NULL, learn_rate = NULL )"},{"path":"https://parsnip.tidymodels.org/dev/reference/mlp.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Single layer neural network — mlp","text":"mode single character string prediction outcome mode. Possible values model \"unknown\", \"regression\", \"classification\". engine single character string specifying computational engine use fitting. hidden_units integer number units hidden model. penalty non-negative numeric value amount weight decay. dropout number 0 (inclusive) 1 denoting proportion model parameters randomly set zero model training. epochs integer number training iterations. activation single character string denoting type relationship original predictors hidden unit layer. activation function hidden output layers automatically set either \"linear\" \"softmax\" depending type outcome. Possible values : \"linear\", \"softmax\", \"relu\", \"elu\" learn_rate number rate boosting algorithm adapts iteration--iteration (specific engines ). sometimes referred shrinkage parameter.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/mlp.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Single layer neural network — mlp","text":"function defines type model fit. engine specified, method fit model also defined. See set_engine() setting engine, including set engine arguments. model trained fit fit() function used data. arguments function mode engine captured quosures. pass values programmatically, use injection operator like :","code":"value <- 1 mlp(argument = !!value)"},{"path":"https://parsnip.tidymodels.org/dev/reference/mlp.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Single layer neural network — mlp","text":"https://www.tidymodels.org, Tidy Modeling R, searchable table parsnip models","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/mlp.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Single layer neural network — mlp","text":"","code":"show_engines(\"mlp\") #> # A tibble: 6 × 2 #> engine mode #> #> 1 keras classification #> 2 keras regression #> 3 nnet classification #> 4 nnet regression #> 5 brulee classification #> 6 brulee regression mlp(mode = \"classification\", penalty = 0.01) #> Single Layer Neural Network Model Specification (classification) #> #> Main Arguments: #> penalty = 0.01 #> #> Computational engine: nnet #>"},{"path":"https://parsnip.tidymodels.org/dev/reference/model_db.html","id":null,"dir":"Reference","previous_headings":"","what":"parsnip model specification database — model_db","title":"parsnip model specification database — model_db","text":"used RStudio add-captures information mode specifications various R packages.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/model_db.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"parsnip model specification database — model_db","text":"model_db data frame","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/model_db.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"parsnip model specification database — model_db","text":"","code":"data(model_db)"},{"path":"https://parsnip.tidymodels.org/dev/reference/model_fit.html","id":null,"dir":"Reference","previous_headings":"","what":"Model Fit Object Information — model_fit","title":"Model Fit Object Information — model_fit","text":"object class \"model_fit\" container information model fit data.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/model_fit.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Model Fit Object Information — model_fit","text":"main elements object : lvl: vector factor levels outcome factor. NULL outcome factor vector. spec: model_spec object. fit: object produced fitting function. preproc: contains data-specific information required process new sample point prediction. example, underlying model function requires arguments x y user passed formula fit, preproc object contain items terms object . information required, NA. discussed documentation model_spec, original arguments specification saved quosures. evaluated model_fit object prior fitting. resulting model object prints call, user-defined options shown call preceded tilde (see example ). result use quosures specification. class structure basis parsnip stores model objects seeing data applying model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/model_fit.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Model Fit Object Information — model_fit","text":"","code":"# Keep the `x` matrix if the data are not too big. spec_obj <- linear_reg() %>% set_engine(\"lm\", x = ifelse(.obs() < 500, TRUE, FALSE)) spec_obj #> Linear Regression Model Specification (regression) #> #> Engine-Specific Arguments: #> x = ifelse(.obs() < 500, TRUE, FALSE) #> #> Computational engine: lm #> fit_obj <- fit(spec_obj, mpg ~ ., data = mtcars) fit_obj #> parsnip model object #> #> #> Call: #> stats::lm(formula = mpg ~ ., data = data, x = ~ifelse(.obs() < #> 500, TRUE, FALSE)) #> #> Coefficients: #> (Intercept) cyl disp hp drat #> 12.30337 -0.11144 0.01334 -0.02148 0.78711 #> wt qsec vs am gear #> -3.71530 0.82104 0.31776 2.52023 0.65541 #> carb #> -0.19942 #> nrow(fit_obj$fit$x) #> [1] 32"},{"path":"https://parsnip.tidymodels.org/dev/reference/model_formula.html","id":null,"dir":"Reference","previous_headings":"","what":"Formulas with special terms in tidymodels — model_formula","title":"Formulas with special terms in tidymodels — model_formula","text":"R, formulas provide compact, symbolic notation specify model terms. Many modeling functions R make use \"specials\", nonstandard notations used formulas. Specials defined handled special case given modeling package. example, mgcv package, provides support generalized additive models R, defines function s() -lined formulas. can used like : example, s() special defines smoothing term mgcv package knows look preprocessing model input. parsnip package can handle specials without issue. analogous code specifying generalized additive model parsnip \"mgcv\" engine looks like: However, parsnip often used conjunction greater tidymodels package ecosystem, defines pre-processing infrastructure functionality via packages like hardhat recipes. specials defined many modeling packages introduce conflicts infrastructure. support specials also maintaining consistent syntax elsewhere ecosystem, tidymodels delineates two types formulas: preprocessing formulas model formulas. Preprocessing formulas specify input variables, model formulas determine model structure.","code":"mgcv::gam(mpg ~ wt + s(disp, k = 5), data = mtcars) gen_additive_mod() %>% set_mode(\"regression\") %>% set_engine(\"mgcv\") %>% fit(mpg ~ wt + s(disp, k = 5), data = mtcars)"},{"path":"https://parsnip.tidymodels.org/dev/reference/model_formula.html","id":"example","dir":"Reference","previous_headings":"","what":"Example","title":"Formulas with special terms in tidymodels — model_formula","text":"create preprocessing formula model formula, just remove specials, retaining references input variables . example: parsnip, use model formula: recipes, use preprocessing formula : recipes package supplies large variety preprocessing techniques may replace need specials altogether, cases. workflows, use preprocessing formula everywhere, pass model formula formula argument add_model(): workflow pass model formula parsnip, using preprocessor formula elsewhere. still use preprocessing formula added recipe preprocessor using add_recipe() instead formula via add_formula().","code":"model_formula <- mpg ~ wt + s(disp, k = 5) preproc_formula <- mpg ~ wt + disp model_spec <- gen_additive_mod() %>% set_mode(\"regression\") %>% set_engine(\"mgcv\") model_spec %>% fit(model_formula, data = mtcars) library(recipes) recipe(preproc_formula, mtcars) library(workflows) wflow <- workflow() %>% add_formula(preproc_formula) %>% add_model(model_spec, formula = model_formula) fit(wflow, data = mtcars)"},{"path":"https://parsnip.tidymodels.org/dev/reference/model_printer.html","id":null,"dir":"Reference","previous_headings":"","what":"Print helper for model objects — model_printer","title":"Print helper for model objects — model_printer","text":"common format function prints information model object (e.g. arguments, calls, packages, etc).","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/model_printer.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Print helper for model objects — model_printer","text":"","code":"model_printer(x, ...)"},{"path":"https://parsnip.tidymodels.org/dev/reference/model_printer.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Print helper for model objects — model_printer","text":"x model object. ... currently used.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/model_spec.html","id":null,"dir":"Reference","previous_headings":"","what":"Model Specification Information — model_spec","title":"Model Specification Information — model_spec","text":"object class \"model_spec\" container information model fit.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/model_spec.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Model Specification Information — model_spec","text":"main elements object : args: vector main arguments model. names arguments may different counterparts n underlying model function. example, glmnet model, argument name amount penalty called \"penalty\" instead \"lambda\" make general usable across different types models (specific particular model function). elements args can tune() use tune package. information see https://www.tidymodels.org/start/tuning/. left defaults (NULL), arguments use underlying model functions default value. discussed , arguments args captured quosures immediately executed. ...: Optional model-function-specific parameters. args, quosures can tune(). mode: type model, \"regression\" \"classification\". modes added package adds functionality. method: slot filled later model's constructor function. generally contains lists information used create fit prediction code well required packages similar data. engine: character string declares exactly software used. can package name technology type. class structure basis parsnip stores model objects prior seeing data.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/model_spec.html","id":"argument-details","dir":"Reference","previous_headings":"","what":"Argument Details","title":"Model Specification Information — model_spec","text":"important detail understand creating model specifications intended functionally independent data. true tuning parameters data dependent, model specification interact data . example, R functions immediately evaluate arguments. example, calling mean(dat_vec), object dat_vec immediately evaluated inside function. parsnip model functions . example, using execute ncol(mtcars) - 1 creating specification. can seen output: model functions save argument expressions associated environments (.k.. quosure) evaluated later either fit.model_spec() fit_xy.model_spec() called actual data. consequence strategy data required get parameter values must available model fit. two main ways can fail : data modified creation model specification model fit function invoked. model specification saved loaded new session data objects exist. best way avoid issues reference data objects global environment use data descriptors .cols(). Another way writing previous specification dependent specific data object evaluated immediately model fitting process begins. One less advantageous approach solving issue use quasiquotation. insert actual R object model specification might best idea data object small. example, using work (reproducible sessions) embeds entire mtcars data set mtry expression: However, object number columns , bad: information quosures quasiquotation can found https://adv-r.hadley.nz/quasiquotation.html.","code":"rand_forest(mtry = ncol(mtcars) - 1) > rand_forest(mtry = ncol(mtcars) - 1) Random Forest Model Specification (unknown) Main Arguments: mtry = ncol(mtcars) - 1 rand_forest(mtry = .cols() - 1) rand_forest(mtry = ncol(!!mtcars) - 1) > rand_forest(mtry = ncol(!!mtcars) - 1) Random Forest Model Specification (unknown) Main Arguments: mtry = ncol(structure(list(Sepal.Length = c(5.1, 4.9, 4.7, 4.6, 5, > mtry_val <- ncol(mtcars) - 1 > mtry_val [1] 10 > rand_forest(mtry = !!mtry_val) Random Forest Model Specification (unknown) Main Arguments: mtry = 10"},{"path":"https://parsnip.tidymodels.org/dev/reference/multi_predict.html","id":null,"dir":"Reference","previous_headings":"","what":"Model predictions across many sub-models — multi_predict","title":"Model predictions across many sub-models — multi_predict","text":"models, predictions can made sub-models model object.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/multi_predict.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Model predictions across many sub-models — multi_predict","text":"","code":"multi_predict(object, ...) # S3 method for default multi_predict(object, ...) # S3 method for `_xgb.Booster` multi_predict(object, new_data, type = NULL, trees = NULL, ...) # S3 method for `_C5.0` multi_predict(object, new_data, type = NULL, trees = NULL, ...) # S3 method for `_elnet` multi_predict(object, new_data, type = NULL, penalty = NULL, ...) # S3 method for `_lognet` multi_predict(object, new_data, type = NULL, penalty = NULL, ...) # S3 method for `_multnet` multi_predict(object, new_data, type = NULL, penalty = NULL, ...) # S3 method for `_glmnetfit` multi_predict(object, new_data, type = NULL, penalty = NULL, ...) # S3 method for `_earth` multi_predict(object, new_data, type = NULL, num_terms = NULL, ...) # S3 method for `_torch_mlp` multi_predict(object, new_data, type = NULL, epochs = NULL, ...) # S3 method for `_train.kknn` multi_predict(object, new_data, type = NULL, neighbors = NULL, ...)"},{"path":"https://parsnip.tidymodels.org/dev/reference/multi_predict.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Model predictions across many sub-models — multi_predict","text":"object model_fit object. ... Optional arguments pass predict.model_fit(type = \"raw\") type. new_data rectangular data object, data frame. type single character value NULL. Possible values \"numeric\", \"class\", \"prob\", \"conf_int\", \"pred_int\", \"quantile\", \"raw\". NULL, predict() choose appropriate value based model's mode. trees integer vector number trees ensemble. penalty numeric vector penalty values. num_terms integer vector number MARS terms retain. epochs integer vector number training epochs. neighbors integer vector number nearest neighbors.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/multi_predict.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Model predictions across many sub-models — multi_predict","text":"tibble number rows data predicted. list-column named .pred contains tibbles multiple rows per sub-model. Note , within tibbles, column names follow usual standard based prediction type (.e. .pred_class type = \"class\" ).","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/multinom_reg.html","id":null,"dir":"Reference","previous_headings":"","what":"Multinomial regression — multinom_reg","title":"Multinomial regression — multinom_reg","text":"multinom_reg() defines model uses linear predictors predict multiclass data using multinomial distribution. function can fit classification models. different ways fit model, method estimation chosen setting model engine. engine-specific pages model listed . nnet¹ brulee glmnet h2o² keras spark information parsnip used modeling https://www.tidymodels.org/.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/multinom_reg.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Multinomial regression — multinom_reg","text":"","code":"multinom_reg( mode = \"classification\", engine = \"nnet\", penalty = NULL, mixture = NULL )"},{"path":"https://parsnip.tidymodels.org/dev/reference/multinom_reg.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Multinomial regression — multinom_reg","text":"mode single character string type model. possible value model \"classification\". engine single character string specifying computational engine use fitting. Possible engines listed . default model \"nnet\". penalty non-negative number representing total amount regularization (specific engines ). keras models, corresponds purely L2 regularization (aka weight decay) models can combination L1 L2 (depending value mixture). mixture number zero one (inclusive) giving proportion L1 regularization (.e. lasso) model. mixture = 1 specifies pure lasso model, mixture = 0 specifies ridge regression model, 0 < mixture < 1 specifies elastic net model, interpolating lasso ridge. Available specific engines .","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/multinom_reg.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Multinomial regression — multinom_reg","text":"function defines type model fit. engine specified, method fit model also defined. See set_engine() setting engine, including set engine arguments. model trained fit fit() function used data. arguments function mode engine captured quosures. pass values programmatically, use injection operator like : model fits classification model multiclass outcomes; binary outcomes, see logistic_reg().","code":"value <- 1 multinom_reg(argument = !!value)"},{"path":"https://parsnip.tidymodels.org/dev/reference/multinom_reg.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Multinomial regression — multinom_reg","text":"https://www.tidymodels.org, Tidy Modeling R, searchable table parsnip models","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/multinom_reg.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Multinomial regression — multinom_reg","text":"","code":"show_engines(\"multinom_reg\") #> # A tibble: 5 × 2 #> engine mode #> #> 1 glmnet classification #> 2 spark classification #> 3 keras classification #> 4 nnet classification #> 5 brulee classification multinom_reg() #> Multinomial Regression Model Specification (classification) #> #> Computational engine: nnet #>"},{"path":"https://parsnip.tidymodels.org/dev/reference/naive_Bayes.html","id":null,"dir":"Reference","previous_headings":"","what":"Naive Bayes models — naive_Bayes","title":"Naive Bayes models — naive_Bayes","text":"naive_Bayes() defines model uses Bayes' theorem compute probability class, given predictor values. function can fit classification models. different ways fit model, method estimation chosen setting model engine. engine-specific pages model listed . klaR¹² h2o² naivebayes² information parsnip used modeling https://www.tidymodels.org/.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/naive_Bayes.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Naive Bayes models — naive_Bayes","text":"","code":"naive_Bayes( mode = \"classification\", smoothness = NULL, Laplace = NULL, engine = \"klaR\" )"},{"path":"https://parsnip.tidymodels.org/dev/reference/naive_Bayes.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Naive Bayes models — naive_Bayes","text":"mode single character string prediction outcome mode. Possible values model \"unknown\", \"regression\", \"classification\". smoothness non-negative number representing relative smoothness class boundary. Smaller examples result model flexible boundaries larger values generate class boundaries less adaptable Laplace non-negative value Laplace correction smoothing low-frequency counts. engine single character string specifying computational engine use fitting.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/naive_Bayes.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Naive Bayes models — naive_Bayes","text":"function defines type model fit. engine specified, method fit model also defined. See set_engine() setting engine, including set engine arguments. model trained fit fit() function used data. arguments function mode engine captured quosures. pass values programmatically, use injection operator like :","code":"value <- 1 naive_Bayes(argument = !!value)"},{"path":"https://parsnip.tidymodels.org/dev/reference/naive_Bayes.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Naive Bayes models — naive_Bayes","text":"https://www.tidymodels.org, Tidy Modeling R, searchable table parsnip models","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/nearest_neighbor.html","id":null,"dir":"Reference","previous_headings":"","what":"K-nearest neighbors — nearest_neighbor","title":"K-nearest neighbors — nearest_neighbor","text":"nearest_neighbor() defines model uses K similar data points training set predict new samples. function can fit classification regression models. different ways fit model, method estimation chosen setting model engine. engine-specific pages model listed . kknn¹ information parsnip used modeling https://www.tidymodels.org/.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/nearest_neighbor.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"K-nearest neighbors — nearest_neighbor","text":"","code":"nearest_neighbor( mode = \"unknown\", engine = \"kknn\", neighbors = NULL, weight_func = NULL, dist_power = NULL )"},{"path":"https://parsnip.tidymodels.org/dev/reference/nearest_neighbor.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"K-nearest neighbors — nearest_neighbor","text":"mode single character string prediction outcome mode. Possible values model \"unknown\", \"regression\", \"classification\". engine single character string specifying computational engine use fitting. neighbors single integer number neighbors consider (often called k). kknn, value 5 used neighbors specified. weight_func single character type kernel function used weight distances samples. Valid choices : \"rectangular\", \"triangular\", \"epanechnikov\", \"biweight\", \"triweight\", \"cos\", \"inv\", \"gaussian\", \"rank\", \"optimal\". dist_power single number parameter used calculating Minkowski distance.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/nearest_neighbor.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"K-nearest neighbors — nearest_neighbor","text":"function defines type model fit. engine specified, method fit model also defined. See set_engine() setting engine, including set engine arguments. model trained fit fit() function used data. arguments function mode engine captured quosures. pass values programmatically, use injection operator like :","code":"value <- 1 nearest_neighbor(argument = !!value)"},{"path":"https://parsnip.tidymodels.org/dev/reference/nearest_neighbor.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"K-nearest neighbors — nearest_neighbor","text":"https://www.tidymodels.org, Tidy Modeling R, searchable table parsnip models","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/nearest_neighbor.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"K-nearest neighbors — nearest_neighbor","text":"","code":"show_engines(\"nearest_neighbor\") #> # A tibble: 2 × 2 #> engine mode #> #> 1 kknn classification #> 2 kknn regression nearest_neighbor(neighbors = 11) #> K-Nearest Neighbor Model Specification (unknown mode) #> #> Main Arguments: #> neighbors = 11 #> #> Computational engine: kknn #>"},{"path":"https://parsnip.tidymodels.org/dev/reference/null_model.html","id":null,"dir":"Reference","previous_headings":"","what":"Null model — null_model","title":"Null model — null_model","text":"null_model() defines simple, non-informative model. main arguments. function can fit classification regression models.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/null_model.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Null model — null_model","text":"","code":"null_model(mode = \"classification\")"},{"path":"https://parsnip.tidymodels.org/dev/reference/null_model.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Null model — null_model","text":"mode single character string model mode (e.g. \"regression\").","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/null_model.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Null model — null_model","text":"model can created using fit() function using following engines: R: \"parsnip\"","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/null_model.html","id":"engine-details","dir":"Reference","previous_headings":"","what":"Engine Details","title":"Null model — null_model","text":"Engines may pre-set default arguments executing model fit call. type model, template fit calls :","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/null_model.html","id":"parsnip","dir":"Reference","previous_headings":"","what":"parsnip","title":"Null model — null_model","text":"","code":"null_model() %>% set_engine(\"parsnip\") %>% set_mode(\"regression\") %>% translate() ## Null Model Specification (regression) ## ## Computational engine: parsnip ## ## Model fit template: ## parsnip::nullmodel(x = missing_arg(), y = missing_arg()) null_model() %>% set_engine(\"parsnip\") %>% set_mode(\"classification\") %>% translate() ## Null Model Specification (classification) ## ## Computational engine: parsnip ## ## Model fit template: ## parsnip::nullmodel(x = missing_arg(), y = missing_arg())"},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/null_model.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Null model — null_model","text":"","code":"null_model(mode = \"regression\") #> Null Model Specification (regression) #>"},{"path":"https://parsnip.tidymodels.org/dev/reference/nullmodel.html","id":null,"dir":"Reference","previous_headings":"","what":"Fit a simple, non-informative model — nullmodel","title":"Fit a simple, non-informative model — nullmodel","text":"Fit single mean largest class model. nullmodel() underlying computational function null_model() specification.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/nullmodel.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Fit a simple, non-informative model — nullmodel","text":"","code":"nullmodel(x, ...) # S3 method for default nullmodel(x = NULL, y, ...) # S3 method for nullmodel print(x, ...) # S3 method for nullmodel predict(object, new_data = NULL, type = NULL, ...)"},{"path":"https://parsnip.tidymodels.org/dev/reference/nullmodel.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Fit a simple, non-informative model — nullmodel","text":"x optional matrix data frame predictors. values used model fit ... Optional arguments (yet used) y numeric vector (regression) factor (classification) outcomes object object class nullmodel new_data matrix data frame predictors (used determine number predictions return) type Either \"raw\" (regression), \"class\" \"prob\" (classification)","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/nullmodel.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Fit a simple, non-informative model — nullmodel","text":"output nullmodel() list class nullmodel elements call function call value mean y prevalent class levels y factor, vector levels. NULL otherwise pct y factor, data frame column class (NULL otherwise). column prevalent class proportion training samples class (columns zero). n number elements y predict.nullmodel() returns either factor numeric vector depending class y. predictions always .","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/nullmodel.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Fit a simple, non-informative model — nullmodel","text":"nullmodel() emulates model building functions, returns simplest model possible given training set: single mean numeric outcomes prevalent class factor outcomes. class probabilities requested, percentage training set samples prevalent class returned.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/nullmodel.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Fit a simple, non-informative model — nullmodel","text":"","code":"outcome <- factor(sample(letters[1:2], size = 100, prob = c(.1, .9), replace = TRUE)) useless <- nullmodel(y = outcome) useless #> Null Regression Model #> Predicted Value: b predict(useless, matrix(NA, nrow = 5)) #> [1] b b b b b #> Levels: a b"},{"path":"https://parsnip.tidymodels.org/dev/reference/other_predict.html","id":null,"dir":"Reference","previous_headings":"","what":"Other predict methods. — predict_class.model_fit","title":"Other predict methods. — predict_class.model_fit","text":"internal functions meant directly called user.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/other_predict.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Other predict methods. — predict_class.model_fit","text":"","code":"# S3 method for model_fit predict_class(object, new_data, ...) # S3 method for model_fit predict_classprob(object, new_data, ...) # S3 method for model_fit predict_hazard(object, new_data, eval_time, time = deprecated(), ...) # S3 method for model_fit predict_confint(object, new_data, level = 0.95, std_error = FALSE, ...) # S3 method for model_fit predict_linear_pred(object, new_data, ...) predict_linear_pred(object, ...) # S3 method for model_fit predict_numeric(object, new_data, ...) predict_numeric(object, ...) # S3 method for model_fit predict_quantile( object, new_data, quantile = (1:9)/10, interval = \"none\", level = 0.95, ... ) # S3 method for model_fit predict_survival( object, new_data, eval_time, time = deprecated(), interval = \"none\", level = 0.95, ... ) predict_survival(object, ...) # S3 method for model_fit predict_time(object, new_data, ...) predict_time(object, ...)"},{"path":"https://parsnip.tidymodels.org/dev/reference/other_predict.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Other predict methods. — predict_class.model_fit","text":"object object class model_fit. new_data rectangular data object, data frame. ... Additional parsnip-related options, depending value type. Arguments underlying model's prediction function passed (use opts argument instead). Possible arguments : interval: type equal \"survival\" \"quantile\", interval estimates added, available? Options \"none\" \"confidence\". level: type equal \"conf_int\", \"pred_int\", \"survival\", parameter tail area intervals (e.g. confidence level confidence intervals). Default value 0.95. std_error: type equal \"conf_int\" \"pred_int\", add standard error fit prediction (scale linear predictors). Default value FALSE. quantile: type equal quantile, quantiles distribution. Default (1:9)/10. eval_time: type equal \"survival\" \"hazard\", time points survival probability hazard estimated. level single numeric value zero one interval estimates. std_error single logical whether standard error returned (assuming model can compute ). quantile vector numbers 0 1 quantile predicted.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/parsnip-package.html","id":null,"dir":"Reference","previous_headings":"","what":"parsnip — parsnip-package","title":"parsnip — parsnip-package","text":"goal parsnip provide tidy, unified interface models can used try range models without getting bogged syntactical minutiae underlying packages.","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/parsnip-package.html","id":"author","dir":"Reference","previous_headings":"","what":"Author","title":"parsnip — parsnip-package","text":"Maintainer: Max Kuhn max@posit.co Authors: Davis Vaughan davis@posit.co contributors: Emil Hvitfeldt emil.hvitfeldt@posit.co [contributor] Posit Software, PBC [copyright holder, funder]","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/parsnip_addin.html","id":null,"dir":"Reference","previous_headings":"","what":"Start an RStudio Addin that can write model specifications — parsnip_addin","title":"Start an RStudio Addin that can write model specifications — parsnip_addin","text":"parsnip_addin() starts process RStudio IDE Viewer window allows users write code parsnip model specifications various R packages. new code written current document location cursor.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/parsnip_addin.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Start an RStudio Addin that can write model specifications — parsnip_addin","text":"","code":"parsnip_addin()"},{"path":"https://parsnip.tidymodels.org/dev/reference/parsnip_update.html","id":null,"dir":"Reference","previous_headings":"","what":"Updating a model specification — update.bag_mars","title":"Updating a model specification — update.bag_mars","text":"parameters model specification need modified, update() can used lieu recreating object scratch.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/parsnip_update.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Updating a model specification — update.bag_mars","text":"","code":"# S3 method for bag_mars update( object, parameters = NULL, num_terms = NULL, prod_degree = NULL, prune_method = NULL, fresh = FALSE, ... ) # S3 method for bag_mlp update( object, parameters = NULL, hidden_units = NULL, penalty = NULL, epochs = NULL, fresh = FALSE, ... ) # S3 method for bag_tree update( object, parameters = NULL, cost_complexity = NULL, tree_depth = NULL, min_n = NULL, class_cost = NULL, fresh = FALSE, ... ) # S3 method for bart update( object, parameters = NULL, trees = NULL, prior_terminal_node_coef = NULL, prior_terminal_node_expo = NULL, prior_outcome_range = NULL, fresh = FALSE, ... ) # S3 method for boost_tree update( object, parameters = NULL, mtry = NULL, trees = NULL, min_n = NULL, tree_depth = NULL, learn_rate = NULL, loss_reduction = NULL, sample_size = NULL, stop_iter = NULL, fresh = FALSE, ... ) # S3 method for C5_rules update( object, parameters = NULL, trees = NULL, min_n = NULL, fresh = FALSE, ... ) # S3 method for cubist_rules update( object, parameters = NULL, committees = NULL, neighbors = NULL, max_rules = NULL, fresh = FALSE, ... ) # S3 method for decision_tree update( object, parameters = NULL, cost_complexity = NULL, tree_depth = NULL, min_n = NULL, fresh = FALSE, ... ) # S3 method for discrim_flexible update( object, num_terms = NULL, prod_degree = NULL, prune_method = NULL, fresh = FALSE, ... ) # S3 method for discrim_linear update( object, penalty = NULL, regularization_method = NULL, fresh = FALSE, ... ) # S3 method for discrim_quad update(object, regularization_method = NULL, fresh = FALSE, ...) # S3 method for discrim_regularized update( object, frac_common_cov = NULL, frac_identity = NULL, fresh = FALSE, ... ) # S3 method for gen_additive_mod update( object, select_features = NULL, adjust_deg_free = NULL, parameters = NULL, fresh = FALSE, ... ) # S3 method for linear_reg update( object, parameters = NULL, penalty = NULL, mixture = NULL, fresh = FALSE, ... ) # S3 method for logistic_reg update( object, parameters = NULL, penalty = NULL, mixture = NULL, fresh = FALSE, ... ) # S3 method for mars update( object, parameters = NULL, num_terms = NULL, prod_degree = NULL, prune_method = NULL, fresh = FALSE, ... ) # S3 method for mlp update( object, parameters = NULL, hidden_units = NULL, penalty = NULL, dropout = NULL, epochs = NULL, activation = NULL, learn_rate = NULL, fresh = FALSE, ... ) # S3 method for multinom_reg update( object, parameters = NULL, penalty = NULL, mixture = NULL, fresh = FALSE, ... ) # S3 method for naive_Bayes update(object, smoothness = NULL, Laplace = NULL, fresh = FALSE, ...) # S3 method for nearest_neighbor update( object, parameters = NULL, neighbors = NULL, weight_func = NULL, dist_power = NULL, fresh = FALSE, ... ) # S3 method for pls update( object, parameters = NULL, predictor_prop = NULL, num_comp = NULL, fresh = FALSE, ... ) # S3 method for poisson_reg update( object, parameters = NULL, penalty = NULL, mixture = NULL, fresh = FALSE, ... ) # S3 method for proportional_hazards update( object, parameters = NULL, penalty = NULL, mixture = NULL, fresh = FALSE, ... ) # S3 method for rand_forest update( object, parameters = NULL, mtry = NULL, trees = NULL, min_n = NULL, fresh = FALSE, ... ) # S3 method for rule_fit update( object, parameters = NULL, mtry = NULL, trees = NULL, min_n = NULL, tree_depth = NULL, learn_rate = NULL, loss_reduction = NULL, sample_size = NULL, penalty = NULL, fresh = FALSE, ... ) # S3 method for surv_reg update(object, parameters = NULL, dist = NULL, fresh = FALSE, ...) # S3 method for survival_reg update(object, parameters = NULL, dist = NULL, fresh = FALSE, ...) # S3 method for svm_linear update( object, parameters = NULL, cost = NULL, margin = NULL, fresh = FALSE, ... ) # S3 method for svm_poly update( object, parameters = NULL, cost = NULL, degree = NULL, scale_factor = NULL, margin = NULL, fresh = FALSE, ... ) # S3 method for svm_rbf update( object, parameters = NULL, cost = NULL, rbf_sigma = NULL, margin = NULL, fresh = FALSE, ... )"},{"path":"https://parsnip.tidymodels.org/dev/reference/parsnip_update.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Updating a model specification — update.bag_mars","text":"object model specification. parameters 1-row tibble named list main parameters update. Use either parameters main arguments directly updating. main arguments used, supersede values parameters. Also, using engine arguments object result error. num_terms number features retained final model, including intercept. prod_degree highest possible interaction degree. prune_method pruning method. fresh logical whether arguments modified -place replaced wholesale. ... used update(). hidden_units integer number units hidden model. penalty non-negative number representing amount regularization used engines. epochs integer number training iterations. cost_complexity positive number cost/complexity parameter (.k.. Cp) used CART models (specific engines ). tree_depth integer maximum depth tree. min_n integer minimum number data points node required node split . class_cost non-negative scalar class cost (cost 1 means extra cost). useful first level outcome factor minority class. case, values zero one can used bias second level factor. trees integer number trees contained ensemble. prior_terminal_node_coef coefficient prior probability node terminal node. prior_terminal_node_expo exponent prior probability node terminal node. prior_outcome_range positive value defines width prior predicted outcome within certain range. regression related observed range data; prior number standard deviations Gaussian distribution defined observed range data. classification, defined range +/-3 (assumed logit scale). default value 2. mtry number number (proportion) predictors randomly sampled split creating tree models (specific engines ). learn_rate number rate boosting algorithm adapts iteration--iteration (specific engines ). sometimes referred shrinkage parameter. loss_reduction number reduction loss function required split (specific engines ). sample_size number number (proportion) data exposed fitting routine. xgboost, sampling done iteration C5.0 samples training. stop_iter number iterations without improvement stopping (specific engines ). committees non-negative integer (greater 100) number members ensemble. neighbors integer zero nine number training set instances used adjust model-based prediction. max_rules largest number rules. regularization_method character string type regularized estimation. Possible values : \"diagonal\", \"min_distance\", \"shrink_cov\", \"shrink_mean\" (sparsediscrim engine ). frac_common_cov, frac_identity Numeric values zero one. select_features TRUE FALSE. TRUE, model ability eliminate predictor (via penalization). Increasing adjust_deg_free increase likelihood removing predictors. adjust_deg_free select_features = TRUE, acts multiplier smoothness. Increase beyond 1 produce smoother models. mixture number zero one (inclusive) denoting proportion L1 regularization (.e. lasso) model. mixture = 1 specifies pure lasso model, mixture = 0 specifies ridge regression model, 0 < mixture < 1 specifies elastic net model, interpolating lasso ridge. Available specific engines . dropout number 0 (inclusive) 1 denoting proportion model parameters randomly set zero model training. activation single character string denoting type relationship original predictors hidden unit layer. activation function hidden output layers automatically set either \"linear\" \"softmax\" depending type outcome. Possible values : \"linear\", \"softmax\", \"relu\", \"elu\" smoothness non-negative number representing relative smoothness class boundary. Smaller examples result model flexible boundaries larger values generate class boundaries less adaptable Laplace non-negative value Laplace correction smoothing low-frequency counts. weight_func single character type kernel function used weight distances samples. Valid choices : \"rectangular\", \"triangular\", \"epanechnikov\", \"biweight\", \"triweight\", \"cos\", \"inv\", \"gaussian\", \"rank\", \"optimal\". dist_power single number parameter used calculating Minkowski distance. predictor_prop maximum proportion original predictors can non-zero coefficients PLS component (via regularization). value used PLS components X. num_comp number PLS components retain. dist character string probability distribution outcome. default \"weibull\". cost positive number cost predicting sample within wrong side margin margin positive number epsilon SVM insensitive loss function (regression ) degree positive number polynomial degree. scale_factor positive number polynomial scaling factor. rbf_sigma positive number radial basis function.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/parsnip_update.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Updating a model specification — update.bag_mars","text":"updated model specification.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/parsnip_update.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Updating a model specification — update.bag_mars","text":"","code":"# ------------------------------------------------------------------------------ model <- C5_rules(trees = 10, min_n = 2) model #> ! parsnip could not locate an implementation for `C5_rules` model #> specifications. #> ℹ The parsnip extension package rules implements support for this #> specification. #> ℹ Please install (if needed) and load to continue. #> C5.0 Model Specification (classification) #> #> Main Arguments: #> trees = 10 #> min_n = 2 #> #> Computational engine: C5.0 #> update(model, trees = 1) #> ! parsnip could not locate an implementation for `C5_rules` model #> specifications. #> ℹ The parsnip extension package rules implements support for this #> specification. #> ℹ Please install (if needed) and load to continue. #> C5.0 Model Specification (classification) #> #> Main Arguments: #> trees = 1 #> min_n = 2 #> #> Computational engine: C5.0 #> update(model, trees = 1, fresh = TRUE) #> ! parsnip could not locate an implementation for `C5_rules` model #> specifications. #> ℹ The parsnip extension package rules implements support for this #> specification. #> ℹ Please install (if needed) and load to continue. #> C5.0 Model Specification (classification) #> #> Main Arguments: #> trees = 1 #> #> Computational engine: C5.0 #> # ------------------------------------------------------------------------------ model <- cubist_rules(committees = 10, neighbors = 2) model #> ! parsnip could not locate an implementation for `cubist_rules` model #> specifications. #> ℹ The parsnip extension package rules implements support for this #> specification. #> ℹ Please install (if needed) and load to continue. #> Cubist Model Specification (regression) #> #> Main Arguments: #> committees = 10 #> neighbors = 2 #> #> Computational engine: Cubist #> update(model, committees = 1) #> ! parsnip could not locate an implementation for `cubist_rules` model #> specifications. #> ℹ The parsnip extension package rules implements support for this #> specification. #> ℹ Please install (if needed) and load to continue. #> Cubist Model Specification (regression) #> #> Main Arguments: #> committees = 1 #> neighbors = 2 #> #> Computational engine: Cubist #> update(model, committees = 1, fresh = TRUE) #> ! parsnip could not locate an implementation for `cubist_rules` model #> specifications. #> ℹ The parsnip extension package rules implements support for this #> specification. #> ℹ Please install (if needed) and load to continue. #> Cubist Model Specification (regression) #> #> Main Arguments: #> committees = 1 #> #> Computational engine: Cubist #> model <- pls(predictor_prop = 0.1) model #> ! parsnip could not locate an implementation for `pls` model #> specifications. #> ℹ The parsnip extension package plsmod implements support for this #> specification. #> ℹ Please install (if needed) and load to continue. #> PLS Model Specification (unknown mode) #> #> Main Arguments: #> predictor_prop = 0.1 #> #> Computational engine: mixOmics #> update(model, predictor_prop = 1) #> ! parsnip could not locate an implementation for `pls` model #> specifications. #> ℹ The parsnip extension package plsmod implements support for this #> specification. #> ℹ Please install (if needed) and load to continue. #> PLS Model Specification (unknown mode) #> #> Main Arguments: #> predictor_prop = 1 #> #> Computational engine: mixOmics #> update(model, predictor_prop = 1, fresh = TRUE) #> ! parsnip could not locate an implementation for `pls` model #> specifications. #> ℹ The parsnip extension package plsmod implements support for this #> specification. #> ℹ Please install (if needed) and load to continue. #> PLS Model Specification (unknown mode) #> #> Main Arguments: #> predictor_prop = 1 #> #> Computational engine: mixOmics #> # ------------------------------------------------------------------------------ model <- rule_fit(trees = 10, min_n = 2) model #> ! parsnip could not locate an implementation for `rule_fit` model #> specifications. #> ℹ The parsnip extension packages agua and rules implement support for #> this specification. #> ℹ Please install (if needed) and load to continue. #> RuleFit Model Specification (unknown mode) #> #> Main Arguments: #> trees = 10 #> min_n = 2 #> #> Computational engine: xrf #> update(model, trees = 1) #> ! parsnip could not locate an implementation for `rule_fit` model #> specifications. #> ℹ The parsnip extension packages agua and rules implement support for #> this specification. #> ℹ Please install (if needed) and load to continue. #> RuleFit Model Specification (unknown mode) #> #> Main Arguments: #> trees = 1 #> min_n = 2 #> #> Computational engine: xrf #> update(model, trees = 1, fresh = TRUE) #> ! parsnip could not locate an implementation for `rule_fit` model #> specifications. #> ℹ The parsnip extension packages agua and rules implement support for #> this specification. #> ℹ Please install (if needed) and load to continue. #> RuleFit Model Specification (unknown mode) #> #> Main Arguments: #> trees = 1 #> #> Computational engine: xrf #> model <- boost_tree(mtry = 10, min_n = 3) model #> Boosted Tree Model Specification (unknown mode) #> #> Main Arguments: #> mtry = 10 #> min_n = 3 #> #> Computational engine: xgboost #> update(model, mtry = 1) #> Boosted Tree Model Specification (unknown mode) #> #> Main Arguments: #> mtry = 1 #> min_n = 3 #> #> Computational engine: xgboost #> update(model, mtry = 1, fresh = TRUE) #> Boosted Tree Model Specification (unknown mode) #> #> Main Arguments: #> mtry = 1 #> #> Computational engine: xgboost #> param_values <- tibble::tibble(mtry = 10, tree_depth = 5) model %>% update(param_values) #> Boosted Tree Model Specification (unknown mode) #> #> Main Arguments: #> mtry = 10 #> min_n = 3 #> tree_depth = 5 #> #> Computational engine: xgboost #> model %>% update(param_values, mtry = 3) #> Boosted Tree Model Specification (unknown mode) #> #> Main Arguments: #> mtry = 10 #> min_n = 3 #> tree_depth = 5 #> #> Computational engine: xgboost #> param_values$verbose <- 0 # Fails due to engine argument # model %>% update(param_values) model <- linear_reg(penalty = 10, mixture = 0.1) model #> Linear Regression Model Specification (regression) #> #> Main Arguments: #> penalty = 10 #> mixture = 0.1 #> #> Computational engine: lm #> update(model, penalty = 1) #> Linear Regression Model Specification (regression) #> #> Main Arguments: #> penalty = 1 #> mixture = 0.1 #> #> Computational engine: lm #> update(model, penalty = 1, fresh = TRUE) #> Linear Regression Model Specification (regression) #> #> Main Arguments: #> penalty = 1 #> #> Computational engine: lm #>"},{"path":"https://parsnip.tidymodels.org/dev/reference/pls.html","id":null,"dir":"Reference","previous_headings":"","what":"Partial least squares (PLS) — pls","title":"Partial least squares (PLS) — pls","text":"pls() defines partial least squares model uses latent variables model data. similar supervised version principal component. function can fit classification regression models. different ways fit model, method estimation chosen setting model engine. engine-specific pages model listed . mixOmics¹² information parsnip used modeling https://www.tidymodels.org/.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/pls.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Partial least squares (PLS) — pls","text":"","code":"pls( mode = \"unknown\", predictor_prop = NULL, num_comp = NULL, engine = \"mixOmics\" )"},{"path":"https://parsnip.tidymodels.org/dev/reference/pls.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Partial least squares (PLS) — pls","text":"mode single character string prediction outcome mode. Possible values model \"unknown\", \"regression\", \"classification\". predictor_prop maximum proportion original predictors can non-zero coefficients PLS component (via regularization). value used PLS components X. num_comp number PLS components retain. engine single character string specifying computational engine use fitting.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/pls.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Partial least squares (PLS) — pls","text":"function defines type model fit. engine specified, method fit model also defined. See set_engine() setting engine, including set engine arguments. model trained fit fit() function used data. arguments function mode engine captured quosures. pass values programmatically, use injection operator like :","code":"value <- 1 pls(argument = !!value)"},{"path":"https://parsnip.tidymodels.org/dev/reference/pls.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Partial least squares (PLS) — pls","text":"https://www.tidymodels.org, Tidy Modeling R, searchable table parsnip models","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/poisson_reg.html","id":null,"dir":"Reference","previous_headings":"","what":"Poisson regression models — poisson_reg","title":"Poisson regression models — poisson_reg","text":"poisson_reg() defines generalized linear model count data follow Poisson distribution. function can fit regression models. different ways fit model, method estimation chosen setting model engine. engine-specific pages model listed . glm¹² gee² glmer² glmnet² h2o² hurdle² stan² stan_glmer² zeroinfl² information parsnip used modeling https://www.tidymodels.org/.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/poisson_reg.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Poisson regression models — poisson_reg","text":"","code":"poisson_reg( mode = \"regression\", penalty = NULL, mixture = NULL, engine = \"glm\" )"},{"path":"https://parsnip.tidymodels.org/dev/reference/poisson_reg.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Poisson regression models — poisson_reg","text":"mode single character string type model. possible value model \"regression\". penalty non-negative number representing total amount regularization (glmnet ). mixture number zero one (inclusive) giving proportion L1 regularization (.e. lasso) model. mixture = 1 specifies pure lasso model, mixture = 0 specifies ridge regression model, 0 < mixture < 1 specifies elastic net model, interpolating lasso ridge. Available glmnet spark . engine single character string specifying computational engine use fitting.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/poisson_reg.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Poisson regression models — poisson_reg","text":"function defines type model fit. engine specified, method fit model also defined. See set_engine() setting engine, including set engine arguments. model trained fit fit() function used data. arguments function mode engine captured quosures. pass values programmatically, use injection operator like :","code":"value <- 1 poisson_reg(argument = !!value)"},{"path":"https://parsnip.tidymodels.org/dev/reference/poisson_reg.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Poisson regression models — poisson_reg","text":"https://www.tidymodels.org, Tidy Modeling R, searchable table parsnip models","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/predict.model_fit.html","id":null,"dir":"Reference","previous_headings":"","what":"Model predictions — predict.model_fit","title":"Model predictions — predict.model_fit","text":"Apply model create different types predictions. predict() can used types models uses \"type\" argument specificity.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/predict.model_fit.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Model predictions — predict.model_fit","text":"","code":"# S3 method for model_fit predict(object, new_data, type = NULL, opts = list(), ...) # S3 method for model_fit predict_raw(object, new_data, opts = list(), ...) predict_raw(object, ...)"},{"path":"https://parsnip.tidymodels.org/dev/reference/predict.model_fit.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Model predictions — predict.model_fit","text":"object object class model_fit. new_data rectangular data object, data frame. type single character value NULL. Possible values \"numeric\", \"class\", \"prob\", \"conf_int\", \"pred_int\", \"quantile\", \"time\", \"hazard\", \"survival\", \"raw\". NULL, predict() choose appropriate value based model's mode. opts list optional arguments underlying predict function used type = \"raw\". list include options model object new data predicted. ... Additional parsnip-related options, depending value type. Arguments underlying model's prediction function passed (use opts argument instead). Possible arguments : interval: type equal \"survival\" \"quantile\", interval estimates added, available? Options \"none\" \"confidence\". level: type equal \"conf_int\", \"pred_int\", \"survival\", parameter tail area intervals (e.g. confidence level confidence intervals). Default value 0.95. std_error: type equal \"conf_int\" \"pred_int\", add standard error fit prediction (scale linear predictors). Default value FALSE. quantile: type equal quantile, quantiles distribution. Default (1:9)/10. eval_time: type equal \"survival\" \"hazard\", time points survival probability hazard estimated.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/predict.model_fit.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Model predictions — predict.model_fit","text":"exception type = \"raw\", result predict.model_fit() tibble many rows rows new_data standardized column names, see : type = \"numeric\", tibble .pred column single outcome .pred_Yname columns multivariate outcome. type = \"class\", tibble .pred_class column. type = \"prob\", tibble .pred_classlevel columns. type = \"conf_int\" type = \"pred_int\", tibble .pred_lower .pred_upper columns attribute confidence level. case intervals can produces class probabilities (non-scalar outputs), columns named .pred_lower_classlevel . type = \"quantile\", tibble .pred column, list-column. list element contains tibble columns .pred .quantile (perhaps columns). type = \"time\", tibble .pred_time column. type = \"survival\", tibble .pred column, list-column. list element contains tibble columns .eval_time .pred_survival (perhaps columns). type = \"hazard\", tibble .pred column, list-column. list element contains tibble columns .eval_time .pred_hazard (perhaps columns). Using type = \"raw\" predict.model_fit() return unadulterated results prediction function. case Spark-based models, since table columns contain dots, convention used except 1) dots appear names 2) vectors never returned type-specific prediction functions. model fit failed error captured, predict() function return structure filled missing values. currently work multivariate models.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/predict.model_fit.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Model predictions — predict.model_fit","text":"type = NULL, predict() uses type = \"numeric\" regression models, type = \"class\" classification, type = \"time\" censored regression.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/predict.model_fit.html","id":"interval-predictions","dir":"Reference","previous_headings":"","what":"Interval predictions","title":"Model predictions — predict.model_fit","text":"using type = \"conf_int\" type = \"pred_int\", options level std_error can used. latter logical extra column standard error values (available).","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/predict.model_fit.html","id":"censored-regression-predictions","dir":"Reference","previous_headings":"","what":"Censored regression predictions","title":"Model predictions — predict.model_fit","text":"censored regression, numeric vector eval_time required survival hazard probabilities requested. time values required unique, finite, non-missing, non-negative. predict() functions adjust values fit specification removing offending points (warning). predict.model_fit() require outcome present. performance metrics predicted survival probability, inverse probability censoring weights (IPCW) required (see tidymodels.org reference ). require outcome thus returned predict(). can added via augment.model_fit() new_data contains column outcome Surv object. Also, type = \"linear_pred\", censored regression models default formatted linear predictor increases time. may opposite sign underlying model's predict() method produces. Set increasing = FALSE suppress behavior.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/predict.model_fit.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Model predictions — predict.model_fit","text":"https://www.tidymodels.org/learn/statistics/survival-metrics/","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/predict.model_fit.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Model predictions — predict.model_fit","text":"","code":"library(dplyr) lm_model <- linear_reg() %>% set_engine(\"lm\") %>% fit(mpg ~ ., data = mtcars %>% dplyr::slice(11:32)) pred_cars <- mtcars %>% dplyr::slice(1:10) %>% dplyr::select(-mpg) predict(lm_model, pred_cars) #> # A tibble: 10 × 1 #> .pred #> #> 1 23.4 #> 2 23.3 #> 3 27.6 #> 4 21.5 #> 5 17.6 #> 6 21.6 #> 7 13.9 #> 8 21.7 #> 9 25.6 #> 10 17.1 predict( lm_model, pred_cars, type = \"conf_int\", level = 0.90 ) #> # A tibble: 10 × 2 #> .pred_lower .pred_upper #> #> 1 17.9 29.0 #> 2 18.1 28.5 #> 3 24.0 31.3 #> 4 17.5 25.6 #> 5 14.3 20.8 #> 6 17.0 26.2 #> 7 9.65 18.2 #> 8 16.2 27.2 #> 9 14.2 37.0 #> 10 11.5 22.7 predict( lm_model, pred_cars, type = \"raw\", opts = list(type = \"terms\") ) #> cyl disp hp drat #> Mazda RX4 -0.001433177 -0.8113275 0.6303467 -0.06120265 #> Mazda RX4 Wag -0.001433177 -0.8113275 0.6303467 -0.06120265 #> Datsun 710 -0.009315653 -1.3336453 0.8557288 -0.05014798 #> Hornet 4 Drive -0.001433177 0.1730406 0.6303467 0.12009386 #> Hornet Sportabout 0.006449298 1.1975870 -0.2314083 0.10461733 #> Valiant -0.001433177 -0.1584303 0.6966356 0.19084372 #> Duster 360 0.006449298 1.1975870 -1.1594522 0.09135173 #> Merc 240D -0.009315653 -0.9449204 1.2667197 -0.01477305 #> Merc 230 -0.009315653 -1.0041833 0.8292133 -0.06562451 #> Merc 280 -0.001433177 -0.7349888 0.4579957 -0.06562451 #> wt qsec vs am gear #> Mazda RX4 2.4139815 -1.567729 0.2006406 2.88774 0.02512680 #> Mazda RX4 Wag 1.4488706 -0.736286 0.2006406 2.88774 0.02512680 #> Datsun 710 3.5494061 1.624418 -0.3511210 2.88774 0.02512680 #> Hornet 4 Drive 0.1620561 2.856736 -0.3511210 -2.40645 -0.06700481 #> Hornet Sportabout -0.6895124 -0.736286 0.2006406 -2.40645 -0.06700481 #> Valiant -0.7652074 4.014817 -0.3511210 -2.40645 -0.06700481 #> Duster 360 -1.1815297 -2.488255 0.2006406 -2.40645 -0.06700481 #> Merc 240D 0.2566748 3.688179 -0.3511210 -2.40645 0.02512680 #> Merc 230 0.4080647 7.993866 -0.3511210 -2.40645 0.02512680 #> Merc 280 -0.6895124 1.164155 -0.3511210 -2.40645 0.02512680 #> carb #> Mazda RX4 -0.2497240 #> Mazda RX4 Wag -0.2497240 #> Datsun 710 0.4668753 #> Hornet 4 Drive 0.4668753 #> Hornet Sportabout 0.2280089 #> Valiant 0.4668753 #> Duster 360 -0.2497240 #> Merc 240D 0.2280089 #> Merc 230 0.2280089 #> Merc 280 -0.2497240 #> attr(,\"constant\") #> [1] 19.96364"},{"path":"https://parsnip.tidymodels.org/dev/reference/prepare_data.html","id":null,"dir":"Reference","previous_headings":"","what":"Prepare data based on parsnip encoding information — prepare_data","title":"Prepare data based on parsnip encoding information — prepare_data","text":"Prepare data based parsnip encoding information","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/prepare_data.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Prepare data based on parsnip encoding information — prepare_data","text":"","code":"prepare_data(object, new_data)"},{"path":"https://parsnip.tidymodels.org/dev/reference/prepare_data.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Prepare data based on parsnip encoding information — prepare_data","text":"object parsnip model object new_data data frame","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/prepare_data.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Prepare data based on parsnip encoding information — prepare_data","text":"data frame matrix","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/proportional_hazards.html","id":null,"dir":"Reference","previous_headings":"","what":"Proportional hazards regression — proportional_hazards","title":"Proportional hazards regression — proportional_hazards","text":"proportional_hazards() defines model hazard function multiplicative function covariates times baseline hazard. function can fit censored regression models. different ways fit model, method estimation chosen setting model engine. engine-specific pages model listed . survival¹² glmnet² information parsnip used modeling https://www.tidymodels.org/.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/proportional_hazards.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Proportional hazards regression — proportional_hazards","text":"","code":"proportional_hazards( mode = \"censored regression\", engine = \"survival\", penalty = NULL, mixture = NULL )"},{"path":"https://parsnip.tidymodels.org/dev/reference/proportional_hazards.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Proportional hazards regression — proportional_hazards","text":"mode single character string prediction outcome mode. possible value model \"censored regression\". engine single character string specifying computational engine use fitting. penalty non-negative number representing total amount regularization (specific engines ). mixture number zero one (inclusive) denoting proportion L1 regularization (.e. lasso) model. mixture = 1 specifies pure lasso model, mixture = 0 specifies ridge regression model, 0 < mixture < 1 specifies elastic net model, interpolating lasso ridge. Available specific engines .","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/proportional_hazards.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Proportional hazards regression — proportional_hazards","text":"function defines type model fit. engine specified, method fit model also defined. See set_engine() setting engine, including set engine arguments. model trained fit fit() function used data. arguments function mode engine captured quosures. pass values programmatically, use injection operator like : Since survival models typically involve censoring (require use survival::Surv() objects), fit.model_spec() function require survival model specified via formula interface. Proportional hazards models include Cox model.","code":"value <- 1 proportional_hazards(argument = !!value)"},{"path":"https://parsnip.tidymodels.org/dev/reference/proportional_hazards.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Proportional hazards regression — proportional_hazards","text":"https://www.tidymodels.org, Tidy Modeling R, searchable table parsnip models","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/proportional_hazards.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Proportional hazards regression — proportional_hazards","text":"","code":"show_engines(\"proportional_hazards\") #> # A tibble: 0 × 2 #> # ℹ 2 variables: engine , mode proportional_hazards(mode = \"censored regression\") #> ! parsnip could not locate an implementation for `proportional_hazards` #> censored regression model specifications. #> ℹ The parsnip extension package censored implements support for this #> specification. #> ℹ Please install (if needed) and load to continue. #> Proportional Hazards Model Specification (censored regression) #> #> Computational engine: survival #>"},{"path":"https://parsnip.tidymodels.org/dev/reference/rand_forest.html","id":null,"dir":"Reference","previous_headings":"","what":"Random forest — rand_forest","title":"Random forest — rand_forest","text":"rand_forest() defines model creates large number decision trees, independent others. final prediction uses predictions individual trees combines . function can fit classification, regression, censored regression models. different ways fit model, method estimation chosen setting model engine. engine-specific pages model listed . ranger¹ aorsf² h2o² partykit² randomForest spark information parsnip used modeling https://www.tidymodels.org/.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/rand_forest.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Random forest — rand_forest","text":"","code":"rand_forest( mode = \"unknown\", engine = \"ranger\", mtry = NULL, trees = NULL, min_n = NULL )"},{"path":"https://parsnip.tidymodels.org/dev/reference/rand_forest.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Random forest — rand_forest","text":"mode single character string prediction outcome mode. Possible values model \"unknown\", \"regression\", \"classification\", \"censored regression\". engine single character string specifying computational engine use fitting. mtry integer number predictors randomly sampled split creating tree models. trees integer number trees contained ensemble. min_n integer minimum number data points node required node split .","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/rand_forest.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Random forest — rand_forest","text":"function defines type model fit. engine specified, method fit model also defined. See set_engine() setting engine, including set engine arguments. model trained fit fit() function used data. arguments function mode engine captured quosures. pass values programmatically, use injection operator like :","code":"value <- 1 rand_forest(argument = !!value)"},{"path":"https://parsnip.tidymodels.org/dev/reference/rand_forest.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Random forest — rand_forest","text":"https://www.tidymodels.org, Tidy Modeling R, searchable table parsnip models","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/rand_forest.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Random forest — rand_forest","text":"","code":"show_engines(\"rand_forest\") #> # A tibble: 6 × 2 #> engine mode #> #> 1 ranger classification #> 2 ranger regression #> 3 randomForest classification #> 4 randomForest regression #> 5 spark classification #> 6 spark regression rand_forest(mode = \"classification\", trees = 2000) #> Random Forest Model Specification (classification) #> #> Main Arguments: #> trees = 2000 #> #> Computational engine: ranger #>"},{"path":"https://parsnip.tidymodels.org/dev/reference/reexports.html","id":null,"dir":"Reference","previous_headings":"","what":"Objects exported from other packages — reexports","title":"Objects exported from other packages — reexports","text":"objects imported packages. Follow links see documentation. generics augment, fit, fit_xy, glance, required_pkgs, tidy, varying_args ggplot2 autoplot hardhat extract_fit_engine, extract_parameter_dials, extract_parameter_set_dials, extract_spec_parsnip, frequency_weights, importance_weights, tune magrittr %>%","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/repair_call.html","id":null,"dir":"Reference","previous_headings":"","what":"Repair a model call object — repair_call","title":"Repair a model call object — repair_call","text":"user passes formula fit() underlying model function uses formula, call object produced fit() may usable functions. example, arguments may still quosures data portion call correspond original data.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/repair_call.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Repair a model call object — repair_call","text":"","code":"repair_call(x, data)"},{"path":"https://parsnip.tidymodels.org/dev/reference/repair_call.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Repair a model call object — repair_call","text":"x fitted parsnip model. error occur underlying model call element. data data object relevant call. cases, data frame given parsnip model fit (.e., training set data). name data object inserted call.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/repair_call.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Repair a model call object — repair_call","text":"modified parsnip fitted model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/repair_call.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Repair a model call object — repair_call","text":"repair_call() call can adjust model objects call usable functions methods.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/repair_call.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Repair a model call object — repair_call","text":"","code":"fitted_model <- linear_reg() %>% set_engine(\"lm\", model = TRUE) %>% fit(mpg ~ ., data = mtcars) # In this call, note that `data` is not `mtcars` and the `model = ~TRUE` # indicates that the `model` argument is an `rlang` quosure. fitted_model$fit$call #> stats::lm(formula = mpg ~ ., data = data, model = ~TRUE) # All better: repair_call(fitted_model, mtcars)$fit$call #> stats::lm(formula = mpg ~ ., data = mtcars, model = TRUE)"},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/req_pkgs.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Determine required packages for a model — req_pkgs","text":"","code":"req_pkgs(x, ...)"},{"path":"https://parsnip.tidymodels.org/dev/reference/req_pkgs.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Determine required packages for a model — req_pkgs","text":"x model specification fit. ... used.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/req_pkgs.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Determine required packages for a model — req_pkgs","text":"character string package names ().","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/req_pkgs.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Determine required packages for a model — req_pkgs","text":"function deprecated favor required_pkgs().","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/required_pkgs.model_spec.html","id":null,"dir":"Reference","previous_headings":"","what":"Determine required packages for a model — required_pkgs.model_spec","title":"Determine required packages for a model — required_pkgs.model_spec","text":"Determine required packages model","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/required_pkgs.model_spec.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Determine required packages for a model — required_pkgs.model_spec","text":"","code":"# S3 method for model_spec required_pkgs(x, infra = TRUE, ...) # S3 method for model_fit required_pkgs(x, infra = TRUE, ...)"},{"path":"https://parsnip.tidymodels.org/dev/reference/required_pkgs.model_spec.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Determine required packages for a model — required_pkgs.model_spec","text":"x model specification fit. infra parsnip included result? ... used.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/required_pkgs.model_spec.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Determine required packages for a model — required_pkgs.model_spec","text":"character vector","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/required_pkgs.model_spec.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Determine required packages for a model — required_pkgs.model_spec","text":"","code":"should_fail <- try(required_pkgs(linear_reg(engine = NULL)), silent = TRUE) should_fail #> [1] \"Error in required_pkgs(linear_reg(engine = NULL)) : Please set an engine.\\n\" #> attr(,\"class\") #> [1] \"try-error\" #> attr(,\"condition\") #> #> Error in `required_pkgs()`: #> ! Please set an engine. #> --- #> Backtrace: #> ▆ #> 1. └─pkgdown::build_site_github_pages(new_process = FALSE, install = FALSE) #> 2. └─pkgdown::build_site(...) #> 3. └─pkgdown:::build_site_local(...) #> 4. └─pkgdown::build_reference(...) #> 5. └─purrr::map(...) #> 6. └─purrr:::map_(\"list\", .x, .f, ..., .progress = .progress) #> 7. ├─purrr:::with_indexed_errors(...) #> 8. │ └─base::withCallingHandlers(...) #> 9. ├─purrr:::call_with_cleanup(...) #> 10. └─pkgdown (local) .f(.x[[i]], ...) #> 11. ├─base::withCallingHandlers(...) #> 12. └─pkgdown:::data_reference_topic(...) #> 13. └─pkgdown:::run_examples(...) #> 14. └─pkgdown:::highlight_examples(code, topic, env = env) #> 15. └─downlit::evaluate_and_highlight(...) #> 16. └─evaluate::evaluate(code, child_env(env), new_device = TRUE, output_handler = output_handler) #> 17. └─evaluate:::evaluate_call(...) #> 18. ├─evaluate (local) timing_fn(...) #> 19. ├─evaluate (local) handle(...) #> 20. │ └─base::try(f, silent = TRUE) #> 21. │ └─base::tryCatch(...) #> 22. │ └─base (local) tryCatchList(expr, classes, parentenv, handlers) #> 23. │ └─base (local) tryCatchOne(expr, names, parentenv, handlers[[1L]]) #> 24. │ └─base (local) doTryCatch(return(expr), name, parentenv, handler) #> 25. ├─base::withCallingHandlers(...) #> 26. ├─base::withVisible(...) #> 27. └─evaluate:::eval_with_user_handlers(expr, envir, enclos, user_handlers) #> 28. └─base::eval(expr, envir, enclos) #> 29. └─base::eval(expr, envir, enclos) #> 30. ├─base::try(required_pkgs(linear_reg(engine = NULL)), silent = TRUE) #> 31. │ └─base::tryCatch(...) #> 32. │ └─base (local) tryCatchList(expr, classes, parentenv, handlers) #> 33. │ └─base (local) tryCatchOne(expr, names, parentenv, handlers[[1L]]) #> 34. │ └─base (local) doTryCatch(return(expr), name, parentenv, handler) #> 35. ├─generics::required_pkgs(linear_reg(engine = NULL)) #> 36. └─parsnip:::required_pkgs.model_spec(linear_reg(engine = NULL)) linear_reg() %>% set_engine(\"glmnet\") %>% required_pkgs() #> [1] \"parsnip\" \"glmnet\" linear_reg() %>% set_engine(\"glmnet\") %>% required_pkgs(infra = FALSE) #> [1] \"glmnet\" linear_reg() %>% set_engine(\"lm\") %>% fit(mpg ~ ., data = mtcars) %>% required_pkgs() #> [1] \"parsnip\" \"stats\""},{"path":"https://parsnip.tidymodels.org/dev/reference/rpart_train.html","id":null,"dir":"Reference","previous_headings":"","what":"Decision trees via rpart — rpart_train","title":"Decision trees via rpart — rpart_train","text":"rpart_train wrapper rpart() tree-based models model arguments main function.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/rpart_train.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Decision trees via rpart — rpart_train","text":"","code":"rpart_train( formula, data, weights = NULL, cp = 0.01, minsplit = 20, maxdepth = 30, ... )"},{"path":"https://parsnip.tidymodels.org/dev/reference/rpart_train.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Decision trees via rpart — rpart_train","text":"formula model formula. data data frame. weights Optional case weights. cp non-negative number complexity parameter. split decrease overall lack fit factor cp attempted. instance, anova splitting, means overall R-squared must increase cp step. main role parameter save computing time pruning splits obviously worthwhile. Essentially, user informs program split improve fit cp likely pruned cross-validation, hence program need pursue . minsplit integer minimum number observations must exist node order split attempted. maxdepth integer maximum depth node final tree, root node counted depth 0. Values greater 30 rpart give nonsense results 32-bit machines. function truncate maxdepth 30 cases. ... arguments pass either rpart rpart.control.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/rpart_train.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Decision trees via rpart — rpart_train","text":"fitted rpart model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/rule_fit.html","id":null,"dir":"Reference","previous_headings":"","what":"RuleFit models — rule_fit","title":"RuleFit models — rule_fit","text":"rule_fit() defines model derives simple feature rules tree ensemble uses features regularized model. function can fit classification regression models. different ways fit model, method estimation chosen setting model engine. engine-specific pages model listed . xrf¹² h2o² information parsnip used modeling https://www.tidymodels.org/.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/rule_fit.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"RuleFit models — rule_fit","text":"","code":"rule_fit( mode = \"unknown\", mtry = NULL, trees = NULL, min_n = NULL, tree_depth = NULL, learn_rate = NULL, loss_reduction = NULL, sample_size = NULL, stop_iter = NULL, penalty = NULL, engine = \"xrf\" )"},{"path":"https://parsnip.tidymodels.org/dev/reference/rule_fit.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"RuleFit models — rule_fit","text":"mode single character string prediction outcome mode. Possible values model \"unknown\", \"regression\", \"classification\". mtry number number (proportion) predictors randomly sampled split creating tree models (specific engines ). trees integer number trees contained ensemble. min_n integer minimum number data points node required node split . tree_depth integer maximum depth tree (.e. number splits) (specific engines ). learn_rate number rate boosting algorithm adapts iteration--iteration (specific engines ). sometimes referred shrinkage parameter. loss_reduction number reduction loss function required split (specific engines ). sample_size number number (proportion) data exposed fitting routine. xgboost, sampling done iteration C5.0 samples training. stop_iter number iterations without improvement stopping (specific engines ). penalty L1 regularization parameter. engine single character string specifying computational engine use fitting.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/rule_fit.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"RuleFit models — rule_fit","text":"RuleFit model creates regression model rules two stages. first stage uses tree-based model used generate set rules can filtered, modified, simplified. rules added predictors regularized generalized linear model can also conduct feature selection model training. function defines type model fit. engine specified, method fit model also defined. See set_engine() setting engine, including set engine arguments. model trained fit fit() function used data. arguments function mode engine captured quosures. pass values programmatically, use injection operator like :","code":"value <- 1 rule_fit(argument = !!value)"},{"path":"https://parsnip.tidymodels.org/dev/reference/rule_fit.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"RuleFit models — rule_fit","text":"Friedman, J. H., Popescu, B. E. (2008). \"Predictive learning via rule ensembles.\" Annals Applied Statistics, 2(3), 916-954. https://www.tidymodels.org, Tidy Modeling R, searchable table parsnip models","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/rule_fit.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"RuleFit models — rule_fit","text":"","code":"show_engines(\"rule_fit\") #> # A tibble: 0 × 2 #> # ℹ 2 variables: engine , mode rule_fit() #> ! parsnip could not locate an implementation for `rule_fit` model #> specifications. #> ℹ The parsnip extension packages agua and rules implement support for #> this specification. #> ℹ Please install (if needed) and load to continue. #> RuleFit Model Specification (unknown mode) #> #> Computational engine: xrf #>"},{"path":"https://parsnip.tidymodels.org/dev/reference/set_args.html","id":null,"dir":"Reference","previous_headings":"","what":"Change elements of a model specification — set_args","title":"Change elements of a model specification — set_args","text":"set_args() can used modify arguments model specification set_mode() used change model's mode.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/set_args.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Change elements of a model specification — set_args","text":"","code":"set_args(object, ...) set_mode(object, mode)"},{"path":"https://parsnip.tidymodels.org/dev/reference/set_args.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Change elements of a model specification — set_args","text":"object model specification. ... One named model arguments. mode character string model type (e.g. \"classification\" \"regression\")","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/set_args.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Change elements of a model specification — set_args","text":"updated model object.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/set_args.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Change elements of a model specification — set_args","text":"set_args() replace existing values arguments.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/set_args.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Change elements of a model specification — set_args","text":"","code":"rand_forest() #> Random Forest Model Specification (unknown mode) #> #> Computational engine: ranger #> rand_forest() %>% set_args(mtry = 3, importance = TRUE) %>% set_mode(\"regression\") #> Random Forest Model Specification (regression) #> #> Main Arguments: #> mtry = 3 #> #> Engine-Specific Arguments: #> importance = TRUE #> #> Computational engine: ranger #>"},{"path":"https://parsnip.tidymodels.org/dev/reference/set_engine.html","id":null,"dir":"Reference","previous_headings":"","what":"Declare a computational engine and specific arguments — set_engine","title":"Declare a computational engine and specific arguments — set_engine","text":"set_engine() used specify package system used fit model, along arguments specific software.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/set_engine.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Declare a computational engine and specific arguments — set_engine","text":"","code":"set_engine(object, engine, ...)"},{"path":"https://parsnip.tidymodels.org/dev/reference/set_engine.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Declare a computational engine and specific arguments — set_engine","text":"object model specification. engine character string software used fit model. highly dependent type model (e.g. linear regression, random forest, etc.). ... optional arguments associated chosen computational engine. captured quosures can tuned tune().","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/set_engine.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Declare a computational engine and specific arguments — set_engine","text":"updated model specification.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/set_engine.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Declare a computational engine and specific arguments — set_engine","text":"parsnip, model type differentiates basic modeling approaches, random forests, logistic regression, linear support vector machines, etc., mode denotes kind modeling context used (commonly, classification regression), computational engine indicates model fit, specific R package implementation even methods outside R like Keras Stan. Use show_engines() get list possible engines model interest. Modeling functions parsnip separate model arguments two categories: Main arguments commonly used tend available across engines. names standardized work different engines consistent way, can use parsnip main argument trees, instead heterogeneous arguments parameter ranger randomForest packages (num.trees ntree, respectively). Set model type function, like rand_forest(trees = 2000). Engine arguments either specific particular engine used rarely; change argument names underlying engine. ... argument set_engine() allows engine-specific argument passed directly engine fitting function, like set_engine(\"ranger\", importance = \"permutation\").","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/set_engine.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Declare a computational engine and specific arguments — set_engine","text":"","code":"# First, set main arguments using the standardized names logistic_reg(penalty = 0.01, mixture = 1/3) %>% # Now specify how you want to fit the model with another argument set_engine(\"glmnet\", nlambda = 10) %>% translate() #> Logistic Regression Model Specification (classification) #> #> Main Arguments: #> penalty = 0.01 #> mixture = 1/3 #> #> Engine-Specific Arguments: #> nlambda = 10 #> #> Computational engine: glmnet #> #> Model fit template: #> glmnet::glmnet(x = missing_arg(), y = missing_arg(), weights = missing_arg(), #> alpha = 1/3, nlambda = 10, family = \"binomial\") # Many models have possible engine-specific arguments decision_tree(tree_depth = 5) %>% set_engine(\"rpart\", parms = list(prior = c(.65,.35))) %>% set_mode(\"classification\") %>% translate() #> Decision Tree Model Specification (classification) #> #> Main Arguments: #> tree_depth = 5 #> #> Engine-Specific Arguments: #> parms = list(prior = c(0.65, 0.35)) #> #> Computational engine: rpart #> #> Model fit template: #> rpart::rpart(formula = missing_arg(), data = missing_arg(), weights = missing_arg(), #> maxdepth = 5, parms = list(prior = c(0.65, 0.35)))"},{"path":"https://parsnip.tidymodels.org/dev/reference/set_new_model.html","id":null,"dir":"Reference","previous_headings":"","what":"Tools to Register Models — set_new_model","title":"Tools to Register Models — set_new_model","text":"functions similar constructors can used validate conflicts underlying model structures used package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/set_new_model.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Tools to Register Models — set_new_model","text":"","code":"set_new_model(model) set_model_mode(model, mode) set_model_engine(model, mode, eng) set_model_arg(model, eng, parsnip, original, func, has_submodel) set_dependency(model, eng, pkg = \"parsnip\", mode = NULL) get_dependency(model) set_fit(model, mode, eng, value) get_fit(model) set_pred(model, mode, eng, type, value) get_pred_type(model, type) show_model_info(model) pred_value_template(pre = NULL, post = NULL, func, ...) set_encoding(model, mode, eng, options) get_encoding(model)"},{"path":"https://parsnip.tidymodels.org/dev/reference/set_new_model.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Tools to Register Models — set_new_model","text":"model single character string model type (e.g. \"rand_forest\", etc). mode single character string model mode (e.g. \"regression\"). eng single character string model engine. parsnip single character string \"harmonized\" argument name parsnip exposes. original single character string argument name underlying model function uses. func named character vector describes call function. func elements pkg fun. former optional recommended latter required. example, c(pkg = \"stats\", fun = \"lm\") used invoke usual linear regression function. cases, helpful use c(fun = \"predict\") using package's predict method. has_submodel single logical whether argument can make predictions multiple submodels . pkg options character string package name. value list conforms fit_obj pred_obj description , depending context. type single character value type prediction. Possible values : class, conf_int, numeric, pred_int, prob, quantile, raw. pre, post Optional functions pre- post-processing prediction results. ... Optional arguments passed args slot prediction objects. options list options engine-specific preprocessing encodings. See Details .","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/set_new_model.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Tools to Register Models — set_new_model","text":"functions available users add models engines (package otherwise) can accessed using parsnip. thoroughly documented package web site (see references ). short, parsnip stores environment object contains information code models used (e.g. fitting, predicting, etc). functions can used add models environment well helper functions can used makes sure model data right format. check_model_exists() checks model value ensures model already registered. check_model_doesnt_exist() checks model value also checks see novel environment. options engine-specific encodings dictate predictors handled. options ensure data parsnip gives underlying model allows model fit similar possible produced directly. example, fit() used fit model formula interface, typically predictor preprocessing must conducted. glmnet good example . four options can used encodings: predictor_indicators describes whether create indicator/dummy variables factor predictors. three options: \"none\" (expand factor predictors), \"traditional\" (apply standard model.matrix() encodings), \"one_hot\" (create complete set including baseline level factors). encoding affects cases fit.model_spec() used underlying model x/y interface. Another option compute_intercept; controls whether model.matrix() include intercept formula. affects inclusion intercept column. intercept, model.matrix() computes dummy variables one factor levels. Without intercept, model.matrix() computes full set indicators first factor variable, incomplete set remainder. Next, option remove_intercept remove intercept column model.matrix() finished. can useful model function (e.g. lm()) automatically generates intercept. Finally, allow_sparse_x specifies whether model function can natively accommodate sparse matrix representation predictors fitting tuning.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/set_new_model.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Tools to Register Models — set_new_model","text":"\"build parsnip model\" https://www.tidymodels.org/learn/develop/models/","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/set_new_model.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Tools to Register Models — set_new_model","text":"","code":"# set_new_model(\"shallow_learning_model\") # Show the information about a model: show_model_info(\"rand_forest\") #> Information for `rand_forest` #> modes: unknown, classification, regression, censored regression #> #> engines: #> classification: randomForest, ranger¹, spark #> regression: randomForest, ranger¹, spark #> #> ¹The model can use case weights. #> #> arguments: #> ranger: #> mtry --> mtry #> trees --> num.trees #> min_n --> min.node.size #> randomForest: #> mtry --> mtry #> trees --> ntree #> min_n --> nodesize #> spark: #> mtry --> feature_subset_strategy #> trees --> num_trees #> min_n --> min_instances_per_node #> #> fit modules: #> engine mode #> ranger classification #> ranger regression #> randomForest classification #> randomForest regression #> spark classification #> spark regression #> #> prediction modules: #> mode engine methods #> classification randomForest class, prob, raw #> classification ranger class, conf_int, prob, raw #> classification spark class, prob #> regression randomForest numeric, raw #> regression ranger conf_int, numeric, raw #> regression spark numeric #>"},{"path":"https://parsnip.tidymodels.org/dev/reference/set_tf_seed.html","id":null,"dir":"Reference","previous_headings":"","what":"Set seed in R and TensorFlow at the same time — set_tf_seed","title":"Set seed in R and TensorFlow at the same time — set_tf_seed","text":"Keras models requires seeds set R TensorFlow achieve reproducible results. function sets seeds time using version appropriate functions.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/set_tf_seed.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Set seed in R and TensorFlow at the same time — set_tf_seed","text":"","code":"set_tf_seed(seed)"},{"path":"https://parsnip.tidymodels.org/dev/reference/set_tf_seed.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Set seed in R and TensorFlow at the same time — set_tf_seed","text":"seed 1 integer value.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/show_call.html","id":null,"dir":"Reference","previous_headings":"","what":"Print the model call — show_call","title":"Print the model call — show_call","text":"Print model call","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/show_call.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Print the model call — show_call","text":"","code":"show_call(object)"},{"path":"https://parsnip.tidymodels.org/dev/reference/show_call.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Print the model call — show_call","text":"object \"model_spec\" object.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/show_call.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Print the model call — show_call","text":"character string.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/show_engines.html","id":null,"dir":"Reference","previous_headings":"","what":"Display currently available engines for a model — show_engines","title":"Display currently available engines for a model — show_engines","text":"possible engines model can depend packages loaded. parsnip extension add engines existing models. example, poissonreg package adds additional engines poisson_reg() model available unless poissonreg loaded.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/show_engines.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Display currently available engines for a model — show_engines","text":"","code":"show_engines(x)"},{"path":"https://parsnip.tidymodels.org/dev/reference/show_engines.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Display currently available engines for a model — show_engines","text":"x name parsnip model (e.g., \"linear_reg\", \"mars\", etc.)","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/show_engines.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Display currently available engines for a model — show_engines","text":"tibble.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/show_engines.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Display currently available engines for a model — show_engines","text":"","code":"show_engines(\"linear_reg\") #> # A tibble: 7 × 2 #> engine mode #> #> 1 lm regression #> 2 glm regression #> 3 glmnet regression #> 4 stan regression #> 5 spark regression #> 6 keras regression #> 7 brulee regression"},{"path":"https://parsnip.tidymodels.org/dev/reference/stan_conf_int.html","id":null,"dir":"Reference","previous_headings":"","what":"Wrapper for stan confidence intervals — stan_conf_int","title":"Wrapper for stan confidence intervals — stan_conf_int","text":"Wrapper stan confidence intervals","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/stan_conf_int.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Wrapper for stan confidence intervals — stan_conf_int","text":"","code":"stan_conf_int(object, newdata)"},{"path":"https://parsnip.tidymodels.org/dev/reference/stan_conf_int.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Wrapper for stan confidence intervals — stan_conf_int","text":"object stan model fit newdata data set.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/surv_reg.html","id":null,"dir":"Reference","previous_headings":"","what":"Parametric survival regression — surv_reg","title":"Parametric survival regression — surv_reg","text":"function deprecated favor survival_reg() uses \"censored regression\" mode. surv_reg() defines parametric survival model. information parsnip used modeling https://www.tidymodels.org/.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/surv_reg.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Parametric survival regression — surv_reg","text":"","code":"surv_reg(mode = \"regression\", engine = \"survival\", dist = NULL)"},{"path":"https://parsnip.tidymodels.org/dev/reference/surv_reg.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Parametric survival regression — surv_reg","text":"mode single character string prediction outcome mode. possible value model \"regression\". engine single character string specifying computational engine use fitting. dist character string probability distribution outcome. default \"weibull\".","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/surv_reg.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Parametric survival regression — surv_reg","text":"function defines type model fit. engine specified, method fit model also defined. See set_engine() setting engine, including set engine arguments. model trained fit fit() function used data. arguments function mode engine captured quosures. pass values programmatically, use injection operator like : Since survival models typically involve censoring (require use survival::Surv() objects), fit.model_spec() function require survival model specified via formula interface.","code":"value <- 1 surv_reg(argument = !!value)"},{"path":"https://parsnip.tidymodels.org/dev/reference/surv_reg.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Parametric survival regression — surv_reg","text":"https://www.tidymodels.org, Tidy Modeling R, searchable table parsnip models","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/survival_reg.html","id":null,"dir":"Reference","previous_headings":"","what":"Parametric survival regression — survival_reg","title":"Parametric survival regression — survival_reg","text":"survival_reg() defines parametric survival model. function can fit censored regression models. different ways fit model, method estimation chosen setting model engine. engine-specific pages model listed . survival¹² flexsurv² flexsurvspline² information parsnip used modeling https://www.tidymodels.org/.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/survival_reg.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Parametric survival regression — survival_reg","text":"","code":"survival_reg(mode = \"censored regression\", engine = \"survival\", dist = NULL)"},{"path":"https://parsnip.tidymodels.org/dev/reference/survival_reg.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Parametric survival regression — survival_reg","text":"mode single character string prediction outcome mode. possible value model \"censored regression\". engine single character string specifying computational engine use fitting. dist character string probability distribution outcome. default \"weibull\".","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/survival_reg.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Parametric survival regression — survival_reg","text":"function defines type model fit. engine specified, method fit model also defined. See set_engine() setting engine, including set engine arguments. model trained fit fit() function used data. arguments function mode engine captured quosures. pass values programmatically, use injection operator like : Since survival models typically involve censoring (require use survival::Surv() objects), fit.model_spec() function require survival model specified via formula interface.","code":"value <- 1 survival_reg(argument = !!value)"},{"path":"https://parsnip.tidymodels.org/dev/reference/survival_reg.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Parametric survival regression — survival_reg","text":"https://www.tidymodels.org, Tidy Modeling R, searchable table parsnip models","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/survival_reg.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Parametric survival regression — survival_reg","text":"","code":"show_engines(\"survival_reg\") #> # A tibble: 0 × 2 #> # ℹ 2 variables: engine , mode survival_reg(mode = \"censored regression\", dist = \"weibull\") #> ! parsnip could not locate an implementation for `survival_reg` censored #> regression model specifications. #> ℹ The parsnip extension package censored implements support for this #> specification. #> ℹ Please install (if needed) and load to continue. #> Parametric Survival Regression Model Specification (censored regression) #> #> Main Arguments: #> dist = weibull #> #> Computational engine: survival #>"},{"path":"https://parsnip.tidymodels.org/dev/reference/svm_linear.html","id":null,"dir":"Reference","previous_headings":"","what":"Linear support vector machines — svm_linear","title":"Linear support vector machines — svm_linear","text":"svm_linear() defines support vector machine model. classification, model tries maximize width margin classes (using linear class boundary). regression, model optimizes robust loss function affected large model residuals uses linear fit. function can fit classification regression models. different ways fit model, method estimation chosen setting model engine. engine-specific pages model listed . LiblineaR¹ kernlab information parsnip used modeling https://www.tidymodels.org/.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/svm_linear.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Linear support vector machines — svm_linear","text":"","code":"svm_linear(mode = \"unknown\", engine = \"LiblineaR\", cost = NULL, margin = NULL)"},{"path":"https://parsnip.tidymodels.org/dev/reference/svm_linear.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Linear support vector machines — svm_linear","text":"mode single character string prediction outcome mode. Possible values model \"unknown\", \"regression\", \"classification\". engine single character string specifying computational engine use fitting. cost positive number cost predicting sample within wrong side margin margin positive number epsilon SVM insensitive loss function (regression )","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/svm_linear.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Linear support vector machines — svm_linear","text":"function defines type model fit. engine specified, method fit model also defined. See set_engine() setting engine, including set engine arguments. model trained fit fit() function used data. arguments function mode engine captured quosures. pass values programmatically, use injection operator like :","code":"value <- 1 svm_linear(argument = !!value)"},{"path":"https://parsnip.tidymodels.org/dev/reference/svm_linear.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Linear support vector machines — svm_linear","text":"https://www.tidymodels.org, Tidy Modeling R, searchable table parsnip models","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/svm_linear.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Linear support vector machines — svm_linear","text":"","code":"show_engines(\"svm_linear\") #> # A tibble: 4 × 2 #> engine mode #> #> 1 LiblineaR classification #> 2 LiblineaR regression #> 3 kernlab classification #> 4 kernlab regression svm_linear(mode = \"classification\") #> Linear Support Vector Machine Model Specification (classification) #> #> Computational engine: LiblineaR #>"},{"path":"https://parsnip.tidymodels.org/dev/reference/svm_poly.html","id":null,"dir":"Reference","previous_headings":"","what":"Polynomial support vector machines — svm_poly","title":"Polynomial support vector machines — svm_poly","text":"svm_poly() defines support vector machine model. classification, model tries maximize width margin classes using polynomial class boundary. regression, model optimizes robust loss function affected large model residuals uses polynomial functions predictors. function can fit classification regression models. different ways fit model, method estimation chosen setting model engine. engine-specific pages model listed . kernlab¹ information parsnip used modeling https://www.tidymodels.org/.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/svm_poly.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Polynomial support vector machines — svm_poly","text":"","code":"svm_poly( mode = \"unknown\", engine = \"kernlab\", cost = NULL, degree = NULL, scale_factor = NULL, margin = NULL )"},{"path":"https://parsnip.tidymodels.org/dev/reference/svm_poly.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Polynomial support vector machines — svm_poly","text":"mode single character string prediction outcome mode. Possible values model \"unknown\", \"regression\", \"classification\". engine single character string specifying computational engine use fitting. cost positive number cost predicting sample within wrong side margin degree positive number polynomial degree. scale_factor positive number polynomial scaling factor. margin positive number epsilon SVM insensitive loss function (regression )","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/svm_poly.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Polynomial support vector machines — svm_poly","text":"function defines type model fit. engine specified, method fit model also defined. See set_engine() setting engine, including set engine arguments. model trained fit fit() function used data. arguments function mode engine captured quosures. pass values programmatically, use injection operator like :","code":"value <- 1 svm_poly(argument = !!value)"},{"path":"https://parsnip.tidymodels.org/dev/reference/svm_poly.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Polynomial support vector machines — svm_poly","text":"https://www.tidymodels.org, Tidy Modeling R, searchable table parsnip models","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/svm_poly.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Polynomial support vector machines — svm_poly","text":"","code":"show_engines(\"svm_poly\") #> # A tibble: 2 × 2 #> engine mode #> #> 1 kernlab classification #> 2 kernlab regression svm_poly(mode = \"classification\", degree = 1.2) #> Polynomial Support Vector Machine Model Specification (classification) #> #> Main Arguments: #> degree = 1.2 #> #> Computational engine: kernlab #>"},{"path":"https://parsnip.tidymodels.org/dev/reference/svm_rbf.html","id":null,"dir":"Reference","previous_headings":"","what":"Radial basis function support vector machines — svm_rbf","title":"Radial basis function support vector machines — svm_rbf","text":"svm_rbf() defines support vector machine model. classification, model tries maximize width margin classes using nonlinear class boundary. regression, model optimizes robust loss function affected large model residuals uses nonlinear functions predictors. function can fit classification regression models. different ways fit model, method estimation chosen setting model engine. engine-specific pages model listed . kernlab¹ information parsnip used modeling https://www.tidymodels.org/.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/svm_rbf.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Radial basis function support vector machines — svm_rbf","text":"","code":"svm_rbf( mode = \"unknown\", engine = \"kernlab\", cost = NULL, rbf_sigma = NULL, margin = NULL )"},{"path":"https://parsnip.tidymodels.org/dev/reference/svm_rbf.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Radial basis function support vector machines — svm_rbf","text":"mode single character string prediction outcome mode. Possible values model \"unknown\", \"regression\", \"classification\". engine single character string specifying computational engine use fitting. Possible engines listed . default model \"kernlab\". cost positive number cost predicting sample within wrong side margin rbf_sigma positive number radial basis function. margin positive number epsilon SVM insensitive loss function (regression )","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/svm_rbf.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Radial basis function support vector machines — svm_rbf","text":"function defines type model fit. engine specified, method fit model also defined. See set_engine() setting engine, including set engine arguments. model trained fit fit() function used data. arguments function mode engine captured quosures. pass values programmatically, use injection operator like :","code":"value <- 1 svm_rbf(argument = !!value)"},{"path":"https://parsnip.tidymodels.org/dev/reference/svm_rbf.html","id":"references","dir":"Reference","previous_headings":"","what":"References","title":"Radial basis function support vector machines — svm_rbf","text":"https://www.tidymodels.org, Tidy Modeling R, searchable table parsnip models","code":""},{"path":[]},{"path":"https://parsnip.tidymodels.org/dev/reference/svm_rbf.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Radial basis function support vector machines — svm_rbf","text":"","code":"show_engines(\"svm_rbf\") #> # A tibble: 4 × 2 #> engine mode #> #> 1 kernlab classification #> 2 kernlab regression #> 3 liquidSVM classification #> 4 liquidSVM regression svm_rbf(mode = \"classification\", rbf_sigma = 0.2) #> Radial Basis Function Support Vector Machine Model Specification (classification) #> #> Main Arguments: #> rbf_sigma = 0.2 #> #> Computational engine: kernlab #>"},{"path":"https://parsnip.tidymodels.org/dev/reference/tidy._LiblineaR.html","id":null,"dir":"Reference","previous_headings":"","what":"tidy methods for LiblineaR models — tidy._LiblineaR","title":"tidy methods for LiblineaR models — tidy._LiblineaR","text":"tidy() methods various LiblineaR models return coefficients parsnip model fit.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/tidy._LiblineaR.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"tidy methods for LiblineaR models — tidy._LiblineaR","text":"","code":"# S3 method for `_LiblineaR` tidy(x, ...)"},{"path":"https://parsnip.tidymodels.org/dev/reference/tidy._LiblineaR.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"tidy methods for LiblineaR models — tidy._LiblineaR","text":"x fitted parsnip model used LiblineaR engine. ... used","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/tidy._LiblineaR.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"tidy methods for LiblineaR models — tidy._LiblineaR","text":"tibble columns term estimate.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/tidy._elnet.html","id":null,"dir":"Reference","previous_headings":"","what":"tidy methods for glmnet models — tidy._elnet","title":"tidy methods for glmnet models — tidy._elnet","text":"tidy() methods various glmnet models return coefficients specific penalty value used parsnip model fit.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/tidy._elnet.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"tidy methods for glmnet models — tidy._elnet","text":"","code":"# S3 method for `_elnet` tidy(x, penalty = NULL, ...) # S3 method for `_lognet` tidy(x, penalty = NULL, ...) # S3 method for `_multnet` tidy(x, penalty = NULL, ...) # S3 method for `_fishnet` tidy(x, penalty = NULL, ...)"},{"path":"https://parsnip.tidymodels.org/dev/reference/tidy._elnet.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"tidy methods for glmnet models — tidy._elnet","text":"x fitted parsnip model used glmnet engine. penalty single numeric value. none given, value specified model specification used. ... used","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/tidy._elnet.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"tidy methods for glmnet models — tidy._elnet","text":"tibble columns term, estimate, penalty. multinomial mode used, additional class column included.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/tidy.model_fit.html","id":null,"dir":"Reference","previous_headings":"","what":"Turn a parsnip model object into a tidy tibble — tidy.model_fit","title":"Turn a parsnip model object into a tidy tibble — tidy.model_fit","text":"method tidies model parsnip model object, exists.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/tidy.model_fit.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Turn a parsnip model object into a tidy tibble — tidy.model_fit","text":"","code":"# S3 method for model_fit tidy(x, ...)"},{"path":"https://parsnip.tidymodels.org/dev/reference/tidy.model_fit.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Turn a parsnip model object into a tidy tibble — tidy.model_fit","text":"x object converted tidy tibble::tibble(). ... Additional arguments tidying method.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/tidy.model_fit.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Turn a parsnip model object into a tidy tibble — tidy.model_fit","text":"tibble","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/tidy.nullmodel.html","id":null,"dir":"Reference","previous_headings":"","what":"Tidy method for null models — tidy.nullmodel","title":"Tidy method for null models — tidy.nullmodel","text":"Return results nullmodel tibble","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/tidy.nullmodel.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Tidy method for null models — tidy.nullmodel","text":"","code":"# S3 method for nullmodel tidy(x, ...)"},{"path":"https://parsnip.tidymodels.org/dev/reference/tidy.nullmodel.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Tidy method for null models — tidy.nullmodel","text":"x nullmodel object. ... used.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/tidy.nullmodel.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Tidy method for null models — tidy.nullmodel","text":"tibble column value.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/tidy.nullmodel.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Tidy method for null models — tidy.nullmodel","text":"","code":"nullmodel(mtcars[,-1], mtcars$mpg) %>% tidy() #> # A tibble: 1 × 1 #> value #> #> 1 20.1"},{"path":"https://parsnip.tidymodels.org/dev/reference/translate.html","id":null,"dir":"Reference","previous_headings":"","what":"Resolve a Model Specification for a Computational Engine — translate","title":"Resolve a Model Specification for a Computational Engine — translate","text":"translate() translate model specification code object specific particular engine (e.g. R package). translates generic parameters counterparts.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/translate.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Resolve a Model Specification for a Computational Engine — translate","text":"","code":"translate(x, ...) # S3 method for default translate(x, engine = x$engine, ...)"},{"path":"https://parsnip.tidymodels.org/dev/reference/translate.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Resolve a Model Specification for a Computational Engine — translate","text":"x model specification. ... currently used. engine computational engine model (see ?set_engine).","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/translate.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Resolve a Model Specification for a Computational Engine — translate","text":"translate() produces template call lacks specific argument values (data, etc). filled fit() called specifics data model. call may also include tune() arguments specification. handle tune() arguments, need use tune package. information see https://www.tidymodels.org/start/tuning/ contain resolved argument names specific model fitting function/engine. function can useful need understand parsnip goes generic model specific model fitting function. Note: function used internally users use understand underlying syntax . used modify model specification.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/translate.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Resolve a Model Specification for a Computational Engine — translate","text":"","code":"lm_spec <- linear_reg(penalty = 0.01) # `penalty` is tranlsated to `lambda` translate(lm_spec, engine = \"glmnet\") #> Linear Regression Model Specification (regression) #> #> Main Arguments: #> penalty = 0.01 #> #> Computational engine: glmnet #> #> Model fit template: #> glmnet::glmnet(x = missing_arg(), y = missing_arg(), weights = missing_arg(), #> family = \"gaussian\") # `penalty` not applicable for this model. translate(lm_spec, engine = \"lm\") #> Linear Regression Model Specification (regression) #> #> Main Arguments: #> penalty = 0.01 #> #> Computational engine: lm #> #> Model fit template: #> stats::lm(formula = missing_arg(), data = missing_arg(), weights = missing_arg()) # `penalty` is tranlsated to `reg_param` translate(lm_spec, engine = \"spark\") #> Linear Regression Model Specification (regression) #> #> Main Arguments: #> penalty = 0.01 #> #> Computational engine: spark #> #> Model fit template: #> sparklyr::ml_linear_regression(x = missing_arg(), formula = missing_arg(), #> weights = missing_arg(), reg_param = 0.01) # with a placeholder for an unknown argument value: translate(linear_reg(penalty = tune(), mixture = tune()), engine = \"glmnet\") #> Linear Regression Model Specification (regression) #> #> Main Arguments: #> penalty = tune() #> mixture = tune() #> #> Computational engine: glmnet #> #> Model fit template: #> glmnet::glmnet(x = missing_arg(), y = missing_arg(), weights = missing_arg(), #> alpha = tune(), family = \"gaussian\")"},{"path":"https://parsnip.tidymodels.org/dev/reference/type_sum.model_spec.html","id":null,"dir":"Reference","previous_headings":"","what":"Succinct summary of parsnip object — type_sum.model_spec","title":"Succinct summary of parsnip object — type_sum.model_spec","text":"type_sum controls objects shown inside tibble columns.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/type_sum.model_spec.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Succinct summary of parsnip object — type_sum.model_spec","text":"","code":"# S3 method for model_spec type_sum(x) # S3 method for model_fit type_sum(x)"},{"path":"https://parsnip.tidymodels.org/dev/reference/type_sum.model_spec.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Succinct summary of parsnip object — type_sum.model_spec","text":"x model_spec model_fit object summarise.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/type_sum.model_spec.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Succinct summary of parsnip object — type_sum.model_spec","text":"character value.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/type_sum.model_spec.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Succinct summary of parsnip object — type_sum.model_spec","text":"model_spec objects, summary \"spec[?]\" \"spec[+]\". former indicates either model mode declared specification tune() parameters. Otherwise, latter shown. fitted models, either \"fit[x]\" \"fit[+]\" used \"x\" implies model fit failed way.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/update_model_info_file.html","id":null,"dir":"Reference","previous_headings":"","what":"Save information about models — update_model_info_file","title":"Save information about models — update_model_info_file","text":"function writes tab delimited file package capture information known models. information includes packages tidymodels GitHub repository well packages known work well tidymodels packages (e.g. parsnip also tune, etc.). may model definitions extension packages included . data used document engines model function man page.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/update_model_info_file.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Save information about models — update_model_info_file","text":"","code":"update_model_info_file(path = \"inst/models.tsv\")"},{"path":"https://parsnip.tidymodels.org/dev/reference/update_model_info_file.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Save information about models — update_model_info_file","text":"path character string location tab delimited file.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/update_model_info_file.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Save information about models — update_model_info_file","text":"See model implementation guidelines best practices modeling modeling packages. highly recommended known parsnip extension packages loaded. unexported parsnip function extensions() list .","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/varying.html","id":null,"dir":"Reference","previous_headings":"","what":"A placeholder function for argument values — varying","title":"A placeholder function for argument values — varying","text":"varying() used parameter specified later date.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/varying.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"A placeholder function for argument values — varying","text":"","code":"varying()"},{"path":"https://parsnip.tidymodels.org/dev/reference/varying_args.html","id":null,"dir":"Reference","previous_headings":"","what":"Determine varying arguments — varying_args.model_spec","title":"Determine varying arguments — varying_args.model_spec","text":"varying_args() takes model specification recipe returns tibble information possible varying arguments whether actually varying. id column determined differently depending whether model_spec recipe used. model_spec, first class used. recipe, unique step id used.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/varying_args.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Determine varying arguments — varying_args.model_spec","text":"","code":"# S3 method for model_spec varying_args(object, full = TRUE, ...) # S3 method for recipe varying_args(object, full = TRUE, ...) # S3 method for step varying_args(object, full = TRUE, ...)"},{"path":"https://parsnip.tidymodels.org/dev/reference/varying_args.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Determine varying arguments — varying_args.model_spec","text":"object model_spec recipe. full single logical. possible varying parameters returned? FALSE, parameters actually varying returned. ... currently used.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/varying_args.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Determine varying arguments — varying_args.model_spec","text":"tibble columns parameter name (name), whether contains varying value (varying), id object (id), class used call method (type).","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/varying_args.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Determine varying arguments — varying_args.model_spec","text":"","code":"# List all possible varying args for the random forest spec rand_forest() %>% varying_args() #> Warning: `varying_args()` was deprecated in parsnip 0.1.8. #> ℹ Please use `tune_args()` instead. #> # A tibble: 3 × 4 #> name varying id type #> #> 1 mtry FALSE rand_forest model_spec #> 2 trees FALSE rand_forest model_spec #> 3 min_n FALSE rand_forest model_spec # mtry is now recognized as varying rand_forest(mtry = varying()) %>% varying_args() #> # A tibble: 3 × 4 #> name varying id type #> #> 1 mtry TRUE rand_forest model_spec #> 2 trees FALSE rand_forest model_spec #> 3 min_n FALSE rand_forest model_spec # Even engine specific arguments can vary rand_forest() %>% set_engine(\"ranger\", sample.fraction = varying()) %>% varying_args() #> # A tibble: 4 × 4 #> name varying id type #> #> 1 mtry FALSE rand_forest model_spec #> 2 trees FALSE rand_forest model_spec #> 3 min_n FALSE rand_forest model_spec #> 4 sample.fraction TRUE rand_forest model_spec # List only the arguments that actually vary rand_forest() %>% set_engine(\"ranger\", sample.fraction = varying()) %>% varying_args(full = FALSE) #> # A tibble: 1 × 4 #> name varying id type #> #> 1 sample.fraction TRUE rand_forest model_spec rand_forest() %>% set_engine( \"randomForest\", strata = Class, sampsize = varying() ) %>% varying_args() #> # A tibble: 5 × 4 #> name varying id type #> #> 1 mtry FALSE rand_forest model_spec #> 2 trees FALSE rand_forest model_spec #> 3 min_n FALSE rand_forest model_spec #> 4 strata FALSE rand_forest model_spec #> 5 sampsize TRUE rand_forest model_spec"},{"path":"https://parsnip.tidymodels.org/dev/reference/xgb_train.html","id":null,"dir":"Reference","previous_headings":"","what":"Boosted trees via xgboost — xgb_train","title":"Boosted trees via xgboost — xgb_train","text":"xgb_train() xgb_predict() wrappers xgboost tree-based models model arguments main function.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/xgb_train.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Boosted trees via xgboost — xgb_train","text":"","code":"xgb_train( x, y, weights = NULL, max_depth = 6, nrounds = 15, eta = 0.3, colsample_bynode = NULL, colsample_bytree = NULL, min_child_weight = 1, gamma = 0, subsample = 1, validation = 0, early_stop = NULL, counts = TRUE, event_level = c(\"first\", \"second\"), ... ) xgb_predict(object, new_data, ...)"},{"path":"https://parsnip.tidymodels.org/dev/reference/xgb_train.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Boosted trees via xgboost — xgb_train","text":"x data frame matrix predictors y vector (factor numeric) matrix (numeric) outcome data. max_depth integer maximum depth tree. nrounds integer number boosting iterations. eta numeric value zero one control learning rate. colsample_bynode Subsampling proportion columns node within tree. See counts argument . default uses columns. colsample_bytree Subsampling proportion columns tree. See counts argument . default uses columns. min_child_weight numeric value minimum sum instance weights needed child continue split. gamma number minimum loss reduction required make partition leaf node tree subsample Subsampling proportion rows. default, training data used. validation proportion data used performance assessment potential early stopping. early_stop integer NULL. NULL, number training iterations without improvement stopping. validation used, performance base validation set; otherwise, training set used. counts logical. FALSE, colsample_bynode colsample_bytree assumed proportions proportion columns affects (instead counts). event_level binary classification, single string either \"first\" \"second\" pass along describing level outcome considered \"event\". ... options pass xgb.train() xgboost's method predict(). new_data rectangular data object, data frame.","code":""},{"path":"https://parsnip.tidymodels.org/dev/reference/xgb_train.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Boosted trees via xgboost — xgb_train","text":"fitted xgboost object.","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"parsnip-development-version","dir":"Changelog","previous_headings":"","what":"parsnip (development version)","title":"parsnip (development version)","text":"Fixed bug fitting model types \"spark\" engine (#1045). Fixed issues metadata \"brulee\" engine several arguments mistakenly protected. (#1050, #1054) .filter_eval_time() moved survival standalone file. Improved errors documentation related special terms formulas. See ?model_formula learn . (#770, #1014) Improved errors cases outcome column mis-specified. (#1003) Fixed documentation mlp(engine = \"brulee\"): default values learn_rate epochs swapped (#1018). new_data argument predict() method censoring_model_reverse_km objects deprecated (#965). computing censoring weights, resulting vectors longer named (#1023). Fixed bug integration workflows using model formula formula preprocessor result double intercept (#1033). predict() method censoring_model_reverse_km objects now checks ... empty (#1029).","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"parsnip-111","dir":"Changelog","previous_headings":"","what":"parsnip 1.1.1","title":"parsnip 1.1.1","text":"CRAN release: 2023-08-17 Fixed bug prediction rank deficient lm() models produced .pred_res instead .pred. (#985) Fixed bug sparse data coerced non-sparse format predict(). BART models dbarts engine, predict() can now also return standard error confidence prediction intervals (#976). augment() now works censored regression models. censored regression helper functions exported: .extract_surv_status() .extract_surv_time() (#973, #980). Fixed bug boost_tree() models couldn’t fit 1 predictor validation argument used. (#994)","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"parsnip-110","dir":"Changelog","previous_headings":"","what":"parsnip 1.1.0","title":"parsnip 1.1.0","text":"CRAN release: 2023-04-12 release parsnip contains number new features bug fixes, accompanied several optimizations substantially decrease time fit() predict() package.","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"improvements-to-glmnet-engine-interfaces-1-1-0","dir":"Changelog","previous_headings":"","what":"Improvements to \"glmnet\" engine interfaces","title":"parsnip 1.1.0","text":"glmnet models fitted base-R family objects now supported linear_reg(), logistic_reg(), multinomial_reg() (#890). multi_predict() methods linear_reg(), logistic_reg(), multinom_reg() models fitted \"glmnet\" engine now check type better error accordingly (#900). .organize_glmnet_pred() now expects predictions single penalty value (#876).","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"survival-analysis-1-1-0","dir":"Changelog","previous_headings":"","what":"Survival analysis","title":"parsnip 1.1.0","text":"time argument predict_survival() predict_hazard() deprecated favor new eval_time argument (#936). Added several internal functions (help work Surv objects) standalone file can used packages via usethis::use_standalone(\"tidymodels/parsnip\"). changes provide tooling downstream packages handle inverse probability censoring weights (#893, #897, #937). internal method generating inverse probability censoring weights (IPCW) Graf et al (1999) available via .censoring_weights_graf().","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"bug-fixes-1-1-0","dir":"Changelog","previous_headings":"","what":"Bug fixes","title":"parsnip 1.1.0","text":"Made fit() behave consistently respect missingness classification setting. Previously, fit() erroneously raised error class outcome complete cases, now always passes along complete cases handled modeling function (#888). Fixed bug model fits engine = \"earth\" fail package’s namespace hadn’t attached (#251). Fixed bug model fits factor predictors engine = \"kknn\" fail package’s namespace hadn’t attached (#264). Fixed bug prediction boosted tree model fitted \"xgboost\" using custom objective function (#875).","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"other-changes-1-1-0","dir":"Changelog","previous_headings":"","what":"Other changes","title":"parsnip 1.1.0","text":"Implemented number optimizations parsnip’s backend substantially decrease evaluation time fit() predict() (#901, #902, #910, #921, #929, #923, #931, #932, #933). logistic_reg() now warn fit() outcome two levels (#545). Rather implemented method, check new_data argument mistakenly passed newdata multi_predict() now happens generic. Packages re-exporting multi_predict() generic implementing now-duplicate checks may see new failures can remove analogous checks. check already existed predict() methods (via predict.model_fit()) parsnip multi_predict() methods (#525). Functions now indicate class outcome outcome wrong class (#887). minimum version R now 3.5 (#926). Moved forward deprecation req_pkgs() favor required_pkgs(). function now error (#871). Transitioned soft-deprecations least year old warn-deprecations. changes apply fit_control(), surv_reg(), varying(), varying_args(), \"liquidSVM\" engine. Various bug fixes improvements documentation.","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"parsnip-104","dir":"Changelog","previous_headings":"","what":"parsnip 1.0.4","title":"parsnip 1.0.4","text":"CRAN release: 2023-02-22 censored regression models, “reverse Kaplan-Meier” curve computed censoring distribution. can used evaluating type model (#855). model specification methods generics::tune_args() generics::tunable() now registered unconditionally (tidymodels/workflows#192).","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"parsnip-103","dir":"Changelog","previous_headings":"","what":"parsnip 1.0.3","title":"parsnip 1.0.3","text":"CRAN release: 2022-11-11 Adds documentation tuning infrastructure new flexsurvspline engine survival_reg() model specification censored package (@mattwarkentin, #831). matrix interface fitting fit_xy() now works \"censored regression\" mode (#829). num_leaves argument boost_tree()s lightgbm engine (via bonsai package) now tunable. change data checking code resulted 3-fold speed-parsnip (#835)","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"parsnip-102","dir":"Changelog","previous_headings":"","what":"parsnip 1.0.2","title":"parsnip 1.0.2","text":"CRAN release: 2022-10-01 bagged neural network model added (bag_mlp()). Engine implementations live baguette package. Fixed installation failures due undocumented knitr installation dependency (#785). fit_xy() now fails model mode unknown. brulee engine-specific tuning parameters updated. changes can used dials version > 1.0.0. fit() fit_xy() doesn’t error anymore control argument isn’t control_parsnip() object. work long object passed control includes elements control_parsnip(). Improved prompts related missing (loaded) extension packages well better handling model mode conflicts.","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"parsnip-101","dir":"Changelog","previous_headings":"","what":"parsnip 1.0.1","title":"parsnip 1.0.1","text":"CRAN release: 2022-08-18 Enabled passing additional engine arguments xgboost boost_tree() engine. supply engine-specific arguments documented xgboost::xgb.train() arguments passed via params, supply list elements directly named arguments set_engine(). Read ?details_boost_tree_xgboost (#787).","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"parsnip-100","dir":"Changelog","previous_headings":"","what":"parsnip 1.0.0","title":"parsnip 1.0.0","text":"CRAN release: 2022-06-16","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"model-specification-changes-1-0-0","dir":"Changelog","previous_headings":"","what":"Model Specification Changes","title":"parsnip 1.0.0","text":"Enable use case weights models support . show_model_info() now indicates models can utilize case weights. Model type functions now message informatively needed parsnip extension package loaded (#731). Refactored internals model specification printing functions. changes non-breaking extension packages, new print_model_spec() helper exported use extensions desired (#739).","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"bug-fixes-1-0-0","dir":"Changelog","previous_headings":"","what":"Bug fixes","title":"parsnip 1.0.0","text":"Fixed bug previously set engine arguments propagate update() methods despite fresh = TRUE (#704). Fixed bug error thrown arguments model functions namespaced (#745). predict(type = \"prob\") now provide error outcome variable level called \"class\" (#720). inconsistency probability type predictions two-class GAM models fixed (#708) Fixed translated printing null_model() (#752)","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"other-changes-1-0-0","dir":"Changelog","previous_headings":"","what":"Other changes","title":"parsnip 1.0.0","text":"Added glm_grouped() function convert long data grouped format required glm() logistic regression. xgb_train() now allows case weights Added ctree_train() cforest_train() wrappers functions partykit package. Engines added parsnip extension packages. Exported xgb_predict() wraps xgboost’s predict() method use parsnip extension packages (#688). Added developer function, .model_param_name_key translates names tuning parameters.","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"parsnip-021","dir":"Changelog","previous_headings":"","what":"parsnip 0.2.1","title":"parsnip 0.2.1","text":"CRAN release: 2022-03-17 Fixed major bug spark models induced previous version (#671). Updated parsnip add-new models engines. Updated parameter ranges tunable() methods added missing engine argument brulee models. Added information install mixOmics package PLS models (#680)","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"parsnip-020","dir":"Changelog","previous_headings":"","what":"parsnip 0.2.0","title":"parsnip 0.2.0","text":"CRAN release: 2022-03-09","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"model-specification-changes-0-2-0","dir":"Changelog","previous_headings":"","what":"Model Specification Changes","title":"parsnip 0.2.0","text":"Bayesian additive regression trees (BART) added via bart() function. Added \"glm\" engine linear_reg() numeric outcomes (#624). Added brulee engines linear_reg(), logistic_reg(), multinom_reg() mlp().","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"bug-fixes-0-2-0","dir":"Changelog","previous_headings":"","what":"Bug fixes","title":"parsnip 0.2.0","text":"bug class predictions two-class GAM models fixed (#541) Fixed bug logistic_reg() LiblineaR engine (#552). list column produced creating survival probability predictions now always called .pred (.pred_survival used inside list column). Fixed outcome type checking affecting subset regression models (#625). Prediction using multinom_reg() nnet engine single row longer fails (#612).","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"other-changes-0-2-0","dir":"Changelog","previous_headings":"","what":"Other Changes","title":"parsnip 0.2.0","text":"xy interface used underlying model expects use matrix, better warning issued predictors contain non-numeric columns (including dates). fit time calculated verbosity argument control_parsnip() 2L greater. Also, call system.time() now uses gcFirst = FALSE. (#611) fit_control() soft-deprecated favor control_parsnip(). New extract_parameter_set_dials() method extract parameter sets model specs. New extract_parameter_dials() method extract single parameter model specs. Argument interval added prediction: types \"survival\" \"quantile\", estimates confidence prediction interval can added available (#615). set_dependency() now allows developers create package requirements specific model’s mode (#604). varying() soft-deprecated favor tune(). varying_args() soft-deprecated favor tune_args(). autoplot() method added glmnet objects, showing coefficient paths versus penalty values (#642). parsnip now robust working keras tensorflow larger range versions (#596). xgboost engines now use new iterationrange parameter instead deprecated ntreelimit (#656).","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"developer-0-2-0","dir":"Changelog","previous_headings":"","what":"Developer","title":"parsnip 0.2.0","text":"Models information can re-registered long information registered . helpful packages add new engines use devtools::load_all() (#653).","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"parsnip-017","dir":"Changelog","previous_headings":"","what":"parsnip 0.1.7","title":"parsnip 0.1.7","text":"CRAN release: 2021-07-21","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"model-specification-changes-0-1-7","dir":"Changelog","previous_headings":"","what":"Model Specification Changes","title":"parsnip 0.1.7","text":"model function (gen_additive_mod()) added generalized additive models. model now default engine used model defined. default model listed help documents. also adds functionality declare engine model specification function. set_engine() still required engine-specific arguments need added. (#513) parsnip now checks valid combination engine mode (#529) default engine multinom_reg() changed nnet.","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"other-changes-0-1-7","dir":"Changelog","previous_headings":"","what":"Other Changes","title":"parsnip 0.1.7","text":"helper functions .convert_form_to_xy_fit(), .convert_form_to_xy_new(), .convert_xy_to_form_fit(), .convert_xy_to_form_new() converting formula matrix interface now exported developer use (#508). Fix bug augment() non-predictor, non-outcome variables included data (#510). New article “Fitting Predicting parsnip” contains examples various combinations model type engine. ( #527)","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"parsnip-016","dir":"Changelog","previous_headings":"","what":"parsnip 0.1.6","title":"parsnip 0.1.6","text":"CRAN release: 2021-05-27","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"model-specification-changes-0-1-6","dir":"Changelog","previous_headings":"","what":"Model Specification Changes","title":"parsnip 0.1.6","text":"new linear SVM model svm_linear() now available LiblineaR engine (#424) kernlab engine (#438), LiblineaR engine available logistic_reg() well (#429). models can use sparse matrices via fit_xy() (#447) tidy method (#474). models glmnet engines: single value required penalty (either single numeric value value tune()) (#481). special argument called path_values can used set lambda path specific set numbers (independent value penalty). pure ridge regression models (.e., mixture = 1) generate incorrect values path include zero. See issue #431 discussion (#486). liquidSVM engine svm_rbf() deprecated due package’s removal CRAN. (#425) xgboost engine boosted trees translating mtry xgboost’s colsample_bytree. now map mtry colsample_bynode since consistent random forest works. colsample_bytree can still optimized passing engine argument. colsample_bynode added xgboost parsnip package code written. (#495) xgboost, mtry colsample_bytree can passed integer counts proportions, subsample validation always proportions. xgb_train() now new option counts (TRUE FALSE) states scale mtry colsample_bytree used. (#461)","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"other-changes-0-1-6","dir":"Changelog","previous_headings":"","what":"Other Changes","title":"parsnip 0.1.6","text":"Re-licensed package GPL-2 MIT. See consent copyright holders . set_mode() now checks mode compatible model class, similar new_model_spec() (@jtlandis, #467). set_mode() set_engine() now error NULL missing arguments (#503). Re-organized model documentation: update methods moved model help files (#479). model/engine combination help page. model help page dynamic bulleted list engines links individual help pages. generics::required_pkgs() extended parsnip objects. Prediction functions now give consistent error user uses unavailable value type (#489) augment() method changed avoid failing model enable class probabilities. method now returns tibbles despite input data class (#487) (#478) xgboost engines now respect event_level option predictions (#460).","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"parsnip-015","dir":"Changelog","previous_headings":"","what":"parsnip 0.1.5","title":"parsnip 0.1.5","text":"CRAN release: 2021-01-19 RStudio add-available makes writing multiple parsnip model specifications source window. can accessed via IDE addin menus calling parsnip_addin(). xgboost models, users can now pass objective set_engine(\"xgboost\"). (#403) Changes test cases CRAN get xgboost work Solaris configuration. now augument() method fitted models. See augment.model_fit. (#401) Column names x now required fit_xy() used. (#398) now event_level argument xgboost engine. (#420) New mode “censored regression” new prediction types “linear_pred”, “time”, “survival”, “hazard”. (#396) Censored regression models use fit_xy() (use fit()). (#442)","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"parsnip-014","dir":"Changelog","previous_headings":"","what":"parsnip 0.1.4","title":"parsnip 0.1.4","text":"CRAN release: 2020-10-27 show_engines() provide information current set model. three models (glmnet, xgboost, ranger), enable sparse matrix use via fit_xy() (#373). added protections added function arguments dependent data dimensions (e.g., mtry, neighbors, min_n, etc). (#184) Infrastructure improved running parsnip models parallel using PSOCK clusters Windows.","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"parsnip-013","dir":"Changelog","previous_headings":"","what":"parsnip 0.1.3","title":"parsnip 0.1.3","text":"CRAN release: 2020-08-04 glance() method model_fit objects added (#325) Specific tidy() methods glmnet models fit via parsnip created coefficients specific fitted parsnip model returned.","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"fixes-0-1-3","dir":"Changelog","previous_headings":"","what":"Fixes","title":"parsnip 0.1.3","text":"glmnet models fitting two intercepts (#349) various update() methods now work engine-specific parameters.","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"parsnip-012","dir":"Changelog","previous_headings":"","what":"parsnip 0.1.2","title":"parsnip 0.1.2","text":"CRAN release: 2020-07-03","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"breaking-changes-0-1-2","dir":"Changelog","previous_headings":"","what":"Breaking Changes","title":"parsnip 0.1.2","text":"parsnip now options set specific types predictor encodings different models. example, ranger models run using parsnip workflows thing creating indicator variables. encodings can overridden using blueprint options workflows. consequence, possible get different model fit previous versions parsnip. details specific encoding changes . (#326)","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"other-changes-0-1-2","dir":"Changelog","previous_headings":"","what":"Other Changes","title":"parsnip 0.1.2","text":"tidyr >= 1.0.0 now required. SVM models produced kernlab now use formula method (see breaking change notice ). change due ksvm() made indicator variables factor predictors (one-hot encodings). Since ordinary formula method , data passed -ksvm() results closer one get ksmv() called directly. MARS models produced earth now use formula method. xgboost, one-hot encoding used indicator variables created. --hood changes made non-standard data arguments modeling packages can accommodated. (#315)","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"new-features-0-1-2","dir":"Changelog","previous_headings":"","what":"New Features","title":"parsnip 0.1.2","text":"new main argument added boost_tree() called stop_iter early stopping. xgb_train() function gained arguments early stopping percentage data leave validation set. fit() used underlying model uses formula, actual formula pass model (instead placeholder). makes model call better. function named repair_call() added. can help change underlying models call object better reflect obtained model function used directly (instead via parsnip). useful user chooses formula interface model uses formula interface. also limited use recipes used construct feature set workflows tune. predict() function now checks see required modeling packages installed. packages loaded (attached). (#249) (#308) (tidymodels/workflows#45) function req_pkgs() user interface determining required packages. (#308)","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"parsnip-011","dir":"Changelog","previous_headings":"","what":"parsnip 0.1.1","title":"parsnip 0.1.1","text":"CRAN release: 2020-05-06","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"new-features-0-1-1","dir":"Changelog","previous_headings":"","what":"New Features","title":"parsnip 0.1.1","text":"liquidSVM added engine svm_rbf() (#300)","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"fixes-0-1-1","dir":"Changelog","previous_headings":"","what":"Fixes","title":"parsnip 0.1.1","text":"error message missing packages fixed (#289 #292)","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"other-changes-0-1-1","dir":"Changelog","previous_headings":"","what":"Other Changes","title":"parsnip 0.1.1","text":"S3 dispatch tidy() broken R 4.0.","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"parsnip-005","dir":"Changelog","previous_headings":"","what":"parsnip 0.0.5","title":"parsnip 0.0.5","text":"CRAN release: 2020-01-07","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"fixes-0-0-5","dir":"Changelog","previous_headings":"","what":"Fixes","title":"parsnip 0.0.5","text":"bug (#206 #234) fixed caused error predicting multinomial glmnet model.","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"other-changes-0-0-5","dir":"Changelog","previous_headings":"","what":"Other Changes","title":"parsnip 0.0.5","text":"glmnet removed dependency since new version depends 3.6.0 greater. Keeping constrain parsnip requirement. glmnet tests run locally. set internal functions now exported. helpful creating new package registers new model specifications.","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"new-features-0-0-5","dir":"Changelog","previous_headings":"","what":"New Features","title":"parsnip 0.0.5","text":"nnet added engine multinom_reg() #209","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"breaking-changes-0-0-5","dir":"Changelog","previous_headings":"","what":"Breaking Changes","title":"parsnip 0.0.5","text":"mis-mapped parameters (going parsnip underlying model function) spark boosted trees keras models. See 897c927.","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"parsnip-004","dir":"Changelog","previous_headings":"","what":"parsnip 0.0.4","title":"parsnip 0.0.4","text":"CRAN release: 2019-11-02","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"new-features-0-0-4","dir":"Changelog","previous_headings":"","what":"New Features","title":"parsnip 0.0.4","text":"time elapsed model fitting stored $elapsed slot parsnip model object, printed model object printed. default parameter ranges updated SVM, KNN, MARS models. model udpate() methods gained parameters argument cases parameters contained tibble list. fit_control() soft-deprecated favor control_parsnip().","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"fixes-0-0-4","dir":"Changelog","previous_headings":"","what":"Fixes","title":"parsnip 0.0.4","text":"bug fixed standardizing output column types multi_predict predict multinom_reg. bug fixed related using data descriptors fit_xy(). bug fixed related column names generated multi_predict(). top-level tibble always column named .pred list column contains tibbles across sub-models. column names sub-model tibbles names consistent predict() (previously incorrect). See 43c15db. bug fixed standardizing column names nnet class probability predictions.","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"parsnip-0031","dir":"Changelog","previous_headings":"","what":"parsnip 0.0.3.1","title":"parsnip 0.0.3.1","text":"CRAN release: 2019-08-06 Test case update due CRAN running extra tests (#202)","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"parsnip-003","dir":"Changelog","previous_headings":"","what":"parsnip 0.0.3","title":"parsnip 0.0.3","text":"CRAN release: 2019-07-31 Unplanned release based CRAN requirements Solaris.","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"breaking-changes-0-0-3","dir":"Changelog","previous_headings":"","what":"Breaking Changes","title":"parsnip 0.0.3","text":"method parsnip stores model information changed. custom models previous versions need use new method registering models. methods detailed ?get_model_env package vignette adding models. mode needs declared models can used one mode prior fitting /translation. surv_reg(), engine uses survival package now called survival instead survreg. glmnet models, full regularization path always fit regardless value given penalty. Previously, model fit passing penalty glmnet’s lambda argument model make predictions specific values. (#195)","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"new-features-0-0-3","dir":"Changelog","previous_headings":"","what":"New Features","title":"parsnip 0.0.3","text":"add_rowindex() can create column called .row data frame. computational engine explicitly set, default used. default documented corresponding model page. warning issued fit time unless verbosity zero. nearest_neighbor() gained multi_predict method. multi_predict() documentation little better organized. suite internal functions added help upcoming model tuning features. parsnip object always saved name(s) outcome variable(s) proper naming predicted values.","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"parsnip-002","dir":"Changelog","previous_headings":"","what":"parsnip 0.0.2","title":"parsnip 0.0.2","text":"CRAN release: 2019-03-22 Small release driven changes sample() current r-devel.","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"new-features-0-0-2","dir":"Changelog","previous_headings":"","what":"New Features","title":"parsnip 0.0.2","text":"“null model” now available fits predictor-free model (using mean outcome regression mode classification). fit_xy() can take single column data frame matrix y without error","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"other-changes-0-0-2","dir":"Changelog","previous_headings":"","what":"Other Changes","title":"parsnip 0.0.2","text":"varying_args() now full argument control whether full set possible varying arguments returned (opposed arguments actually varying). fit_control() returns S3 method. classification models, error occurs outcome data encoded factors (#115). prediction modules (e.g. predict_class, predict_numeric, etc) de-exported. internal functions used users users using . event time data set (check_times) included time (seconds) run R CMD check using “r-devel-windows-ix86+x86_64` flavor. Packages errored censored.","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"bug-fixes-0-0-2","dir":"Changelog","previous_headings":"","what":"Bug Fixes","title":"parsnip 0.0.2","text":"varying_args() now uses version generics package. means first argument, x, renamed object align generics. recipes step method varying_args(), now error checking catch user tries specify argument varying varying (example, id) (#132). find_varying(), internal function detecting varying arguments, now returns correct results size 0 argument provided. can also now detect varying arguments nested deeply call (#131, #134). multinomial regression, .pred_ prefix now added prediction column names (#107). multinomial regression using glmnet, multi_predict() now pulls correct default penalty (#108). Confidence prediction intervals logistic regression computed intervals single level. now computed. (#156)","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"parsnip-001","dir":"Changelog","previous_headings":"","what":"parsnip 0.0.1","title":"parsnip 0.0.1","text":"CRAN release: 2018-11-12 First CRAN release","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"parsnip-0009005","dir":"Changelog","previous_headings":"","what":"parsnip 0.0.0.9005","title":"parsnip 0.0.0.9005","text":"engine, associated arguments, now specified using set_engine(). engine argument","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"parsnip-0009004","dir":"Changelog","previous_headings":"","what":"parsnip 0.0.0.9004","title":"parsnip 0.0.0.9004","text":"Arguments modeling functions now captured quosures. others replaced ... Data descriptor names changed now functions. descriptor definitions “cols” “preds” switched.","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"parsnip-0009003","dir":"Changelog","previous_headings":"","what":"parsnip 0.0.0.9003","title":"parsnip 0.0.0.9003","text":"regularization changed penalty models consistent change. mode chosen model specification, assigned time fit. 51 underlying modeling packages now loaded namespace. exceptions noted documentation model. example, predict methods, earth package need attached fully operational.","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"parsnip-0009002","dir":"Changelog","previous_headings":"","what":"parsnip 0.0.0.9002","title":"parsnip 0.0.0.9002","text":"consistent snake_case, newdata changed new_data. predict_raw method added.","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"parsnip-0009001","dir":"Changelog","previous_headings":"","what":"parsnip 0.0.0.9001","title":"parsnip 0.0.0.9001","text":"package dependency suffered new change.","code":""},{"path":"https://parsnip.tidymodels.org/dev/news/index.html","id":"parsnip-0009000","dir":"Changelog","previous_headings":"","what":"parsnip 0.0.0.9000","title":"parsnip 0.0.0.9000","text":"fit interface previously used cover x/y interface well formula interface. Now, fit() formula interface fit_xy() x/y interface. Added NEWS.md file track changes package. predict methods overhauled consistent. MARS added.","code":""}]