Using Bayesian Optimization to Improve Hyperparameter Search in TPOT
Created by W.Langdon from
gp-bibliography.bib Revision:1.8120
- @InProceedings{kenny:2024:GECCO,
-
author = "Angus Kenny and Tapabrata Ray and Steffen Limmer and
Hemant Kumar Singh and Tobias Rodemann and
Markus Olhofer",
-
title = "Using Bayesian Optimization to Improve Hyperparameter
Search in {TPOT}",
-
booktitle = "Proceedings of the 2024 Genetic and Evolutionary
Computation Conference",
-
year = "2024",
-
editor = "Jean-Baptiste Mouret and Kai Qin and Julia Handl and
Xiaodong Li and Markus Wagner and Mario Garza-Fabre and
Kate Smith-Miles and Richard Allmendinger and
Ying Bi and Grant Dick and Amir H Gandomi and
Marcella Scoczynski Ribeiro Martins and Hirad Assimi and
Nadarajen Veerapen and Yuan Sun and
Mario Andres Munyoz and Ahmed Kheiri and Nguyen Su and
Dhananjay Thiruvady and Andy Song and Frank Neumann and Carla Silva",
-
pages = "340--348",
-
address = "Melbourne, Australia",
-
series = "GECCO '24",
-
month = "14-18 " # jul,
-
organisation = "SIGEVO",
-
publisher = "Association for Computing Machinery",
-
publisher_address = "New York, NY, USA",
-
keywords = "genetic algorithms, genetic programming, Evolutionary
Machine Learning",
-
isbn13 = "979-8-4007-0494-9",
-
DOI = "doi:10.1145/3638529.3654061",
-
size = "9 pages",
-
abstract = "Automated machine learning (AutoML) has emerged as a
pivotal tool for applying machine learning (ML) models
to real-world problems. Tree-based pipeline
optimization tool (TPOT) is an AutoML framework known
for effectively solving complex tasks. TPOT's search
involves two fundamental objectives: finding optimal
pipeline structures (i.e., combinations of ML
operators) and identifying suitable hyperparameters for
these structures. While its use of genetic programming
enables TPOT to excel in structural search, its
hyperparameter search, involving discretization and
random selection from extensive potential value ranges,
can be computationally inefficient. This paper presents
a novel methodology that heavily restricts the initial
hyperparameter search space, directing TPOT's focus
towards structural exploration. As the search evolves,
Bayesian optimization (BO) is used to refine the
hyperparameter space based on data from previous
pipeline evaluations. This method leads to a more
targeted search, crucial in situations with limited
computational resources. Two variants of this approach
are proposed and compared with standard TPOT across six
datasets, with up to 20 features and 20,000 samples.
The results show the proposed method is competitive
with canonical TPOT, and outperforms it in some cases.
The study also provides new insights into the dynamics
of pipeline structure and hyperparameter search within
TPOT.",
-
notes = "GECCO-2024 EML A Recombination of the 33rd
International Conference on Genetic Algorithms (ICGA)
and the 29th Annual Genetic Programming Conference
(GP)",
- }
Genetic Programming entries for
Angus Kenny
Tapabrata Ray
Steffen Limmer
Hemant Kumar Singh
Tobias Rodemann
Markus Olhofer
Citations