Genetic Programming Symbolic Classification: A Study
Created by W.Langdon from
gp-bibliography.bib Revision:1.8129
- @InProceedings{Korns:2017:GPTP,
-
author = "Michael F. Korns",
-
title = "Genetic Programming Symbolic Classification: A Study",
-
booktitle = "Genetic Programming Theory and Practice XV",
-
editor = "Wolfgang Banzhaf and Randal S. Olson and
William Tozier and Rick Riolo",
-
year = "2017",
-
series = "Genetic and Evolutionary Computation",
-
pages = "39--54",
-
address = "University of Michigan in Ann Arbor, USA",
-
month = may # " 18--20",
-
organisation = "the Center for the Study of Complex Systems",
-
publisher = "Springer",
-
keywords = "genetic algorithms, genetic programming",
-
isbn13 = "978-3-319-90511-2",
-
URL = "https://link.springer.com/chapter/10.1007/978-3-319-90512-9_3",
-
DOI = "doi:10.1007/978-3-319-90512-9_3",
-
abstract = "While Symbolic Regression (SR) is a well-known
offshoot of Genetic Programming, Symbolic
Classification (SC), by comparison, has received only
meagre attention. Clearly, regression is only half of
the solution. Classification also plays an important
role in any well rounded predictive analysis tool kit.
In several recent papers, SR algorithms are developed
which move SR into the ranks of extreme accuracy. In an
additional set of papers algorithms are developed
designed to push SC to the level of basic
classification accuracy competitive with existing
commercially available classification tools. This paper
is a simple study of four proposed SC algorithms and
five well-known commercially available classification
algorithms to determine just where SC now ranks in
competitive comparison. The four SC algorithms are:
simple genetic programming using argmax referred to
herein as (AMAXSC); the M2GP algorithm; the MDC
algorithm, and Linear Discriminant Analysis (LDA). The
five commercially available classification algorithms
are available in the KNIME system, and are as follows:
Decision Tree Learner (DTL); Gradient Boosted Trees
Learner (GBTL); Multiple Layer Perceptron Learner
(MLP); Random Forest Learner (RFL); and Tree Ensemble
Learner (TEL). A set of ten artificial classification
problems are constructed with no noise. The simple
formulas for these ten artificial problems are listed
herein. The problems vary from linear to nonlinear
multimodal and from 25 to 1000 columns. All problems
have 5000 training points and a separate 5000 testing
points. The scores, on the out of sample testing data,
for each of the nine classification algorithms are
published herein.",
-
notes = "GPTP 2017, Part of \cite{Banzhaf:2017:GPTP} published
after the workshop in 2018",
- }
Genetic Programming entries for
Michael Korns
Citations