DALex: Lexicase-Like Selection via Diverse Aggregation
Created by W.Langdon from
gp-bibliography.bib Revision:1.7954
- @InProceedings{Ni:2024:EuroGP,
-
author = "Andrew Ni and Li Ding and Lee Spector",
-
editor = "Mario Giacobini and Bing Xue and Luca Manzoni",
-
title = "DALex: Lexicase-Like Selection via Diverse
Aggregation",
-
booktitle = "EuroGP 2024: Proceedings of the 27th European
Conference on Genetic Programming",
-
year = "2024",
-
volume = "14631",
-
series = "LNCS",
-
publisher = "Springer",
-
address = "Aberystwyth",
-
month = "3-5 " # apr,
-
organisation = "EvoStar, Species",
-
keywords = "genetic algorithms, genetic programming",
-
pages = "90--107",
-
abstract = "Lexicase selection has been shown to provide
advantages over other selection algorithms in several
areas of evolutionary computation and machine learning.
In its standard form, lexicase selection filters a
population or other collection based on randomly
ordered training cases that are considered one at a
time. This iterated filtering process can be
time-consuming, particularly in settings with large
numbers of training cases, including many symbolic
regression and deep learning applications. In this
paper, we propose a new method that is nearly
equivalent to lexicase selection in terms of the
individuals that it selects, but which does so in
significantly less time. The new method, called DALex
(for Diversely Aggregated Lexicase selection), selects
the best individual with respect to a randomly weighted
sum of training case errors. This allows us to
formulate the core computation required for selection
as matrix multiplication instead of recursive loops of
comparisons, which in turn allows us to take advantage
of optimized and parallel algorithms designed for
matrix multiplication for speedup. Furthermore, we show
that we can interpolate between the behaviour of
lexicase selection and its relaxed variants, such as
epsilon and batch lexicase selection, by adjusting a
single hyperparameter, named particularity pressure,
which represents the importance granted to each
individual training case. Results on program synthesis,
deep learning, symbolic regression, and learning
classifier systems demonstrate that DALex achieves
significant speedups over lexicase selection and its
relaxed variants while maintaining almost identical
problem-solving performance. Under a fixed
computational budget, these savings free up resources
that can be directed towards increasing population size
or the number of generations, enabling the potential
for solving more difficult problems.",
-
isbn13 = "978-3-031-56957-9",
-
DOI = "doi:10.1007/978-3-031-56957-9_6",
-
notes = "Part of \cite{Giacobini:2024:GP} EuroGP'2024 held in
conjunction with EvoCOP2024, EvoMusArt2024 and
EvoApplications2024",
- }
Genetic Programming entries for
Andrew Ni
Li Ding
Lee Spector
Citations