Neuromemetic Evolutionary Optimization
Created by W.Langdon from
gp-bibliography.bib Revision:1.8051
- @InProceedings{Liskowski:2020:PPSN,
-
author = "Pawel Liskowski and Krzysztof Krawiec and
Nihat Engin Toklu",
-
title = "Neuromemetic Evolutionary Optimization",
-
booktitle = "16th International Conference on Parallel Problem
Solving from Nature, Part I",
-
year = "2020",
-
editor = "Thomas Baeck and Mike Preuss and Andre Deutz and
Hao Wang2 and Carola Doerr and Michael Emmerich and
Heike Trautmann",
-
volume = "12269",
-
series = "LNCS",
-
pages = "623--636",
-
address = "Leiden, Holland",
-
month = "7-9 " # sep,
-
publisher = "Springer",
-
keywords = "genetic algorithms, genetic programming, ANN,
Optimization, Neural networks, Program synthesis",
-
isbn13 = "978-3-030-58111-4",
-
URL = "https://doi.org/10.1007/978-3-030-58112-1_43",
-
DOI = "doi:10.1007/978-3-030-58112-1_43",
-
abstract = "Discrete and combinatorial optimization can be
notoriously difficult due to complex and rugged
characteristics of the objective function. We address
this challenge by mapping the search process to a
continuous space using recurrent neural networks.
Alongside with an evolutionary run, we learn three
mappings: from the original search space to a
continuous Cartesian latent space, from that latent
space back to the search space, and from the latent
space to the search objective. We elicit gradient from
that last network and use it to perform moves in the
latent space, and apply this Neuromemetic Evolutionary
Optimization (NEO) to evolutionary synthesis of
programs. Evaluation on a range of benchmarks suggests
that NEO significantly outperforms conventional genetic
programming.",
-
notes = "PPSN XVI PPSN2020",
- }
Genetic Programming entries for
Pawel Liskowski
Krzysztof Krawiec
Nihat Engin Toklu
Citations