%gecco2023.awk $Revision: 1.25 $ , `GECCO' gecco2023_acm.bib 09 Sep 2023 @inproceedings(de-jong:2023:GECCO, author = {Kenneth {De Jong}}, title = {Evolutionary Computation Evolving}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {1}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {machine learning, evolutionary algorithms, optimization, evolutionary computation, parallelism}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3600058}, size = {1 pages}, abstract = {I have had the privilege of involvement in this field from its early days. The result is a rather unique and comprehensive perspective on its development and growth. In this talk I use that perspective to highlight some important milestones, discuss some current issues and suggest some directions for the future.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(gomes:2023:GECCO, author = {Carla Pedro Gomes}, title = {{AI} for Scientific Discovery and a Sustainable Future}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {2}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {dynamic programming, deep learning, computational sustainability, deep reasoning networks, multi-target regression, fast polynomial time approximation schemes, unsupervised and supervised learning, multi label classification, multi-objective pareto optimization}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3603396}, size = {1 pages}, abstract = {Artificial Intelligence (AI) is a rapidly progressing field, achieving remarkable breakthroughs in areas ranging from computer vision and machine translation to world champion-level Go gameplay, autonomous vehicles, and Chat-GPT. The continuously expanding capabilities of AI present promising opportunities for advancements in various domains. I will discuss our AI research directed at accelerating scientific discovery for a sustainable future. Specifically, I'll delve into our work in the emerging interdisciplinary field of Computational Sustainability, which focuses on developing computational methods to tackle pressing sustainability challenges. I will illustrate examples of computational sustainability challenges, including biodiversity conservation, strategic planning for hydropower dams in the Amazon basin, and the discovery of renewable energy materials. I will highlight cross-computational themes and AI challenges, emphasizing the potential for groundbreaking advancements in our pursuit of a sustainable future.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(poli:2023:GECCO, author = {Riccardo Poli}, title = {{Super-Human} and {Super-AI} Cognitive Augmentation of Human and {Human-AI} Teams Assisted by Brain Computer Interfaces}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {3}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {cognitive augmentation, decision making, human-machine teams, brain-computer interfaces}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3603554}, size = {1 pages}, abstract = {In the last 10 years a major strand of my research within the Essex Brain-Computer Interfaces and Neural Engineering (BCI-NE) laboratory has focused on the idea of combining brain signals (and other physiological and behavioral information) across multiple people to achieve a form of emergent, perceptual and, more generally cognitive, group augmentation, which is more than the sum of the parts.Over this period (in projects funded mostly by the UK Ministry of Defense and also US DOD) we have developed a technology which has delivered significant (and in fact in some cases remarkable) improvements over the group performance achieved by more traditional methods of integrating information applying it successfully to progressively more and more real-world applications.Our efforts have particularly focused on the area of decision making. Decisions (for example made by government, military or hospital management) are often made with limited amounts of information, or indeed too much information for any single person to take in, hence involving a high degree of uncertainty. Yet, such decisions can be highly critical in nature, with mistakes possibly resulting in extremely adverse outcomes, including loss of lives. So, any improvements in accuracy or speed of decisions in such conditions is vitally important.In the last 5 years we have also started to study hybrid human-AI decision-making groups by the inclusion of one or more AI-based teammates which act as peers to the humans. We found that when the conditions are right (more on this in the talk), human-AI groups produce super-human and super-AI performance.In this presentation, I will review BCIs and our approach, and will discuss some of the applications we explored including the identification of visual targets in cluttered environments, the comprehension of military radio communication, face recognition, military recognisance missions, military outposts and strategic resource allocation in a pandemic.I will finally look at potential future developments.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(almansoori:2023:GECCO, author = {Ahmed Almansoori and Muhanad Alkilabi and Elio Tuci}, title = {On the Evolution of Mechanisms for {Three-Option} Collective {Decision-Making} in a Swarm of Simulated Robots}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {4--12}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {collective perception, swarm robotics, evolutionary robotics, collective decision-making}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590385}, size = {9 pages}, abstract = {To act cohesively as a group, robot swarms must be able to make decisions collectively. Collective decision-making refers to a process in which once a group decision is reached, it cannot be attributed to any single individual. Although extensive research has been conducted in swarm robotics using hand-coded design techniques to develop individual mechanisms for collective decision-making, the proposed mechanisms are generally limited in terms of robustness, scalability, and adaptability. In this paper, we employ evolutionary computation techniques to synthesise neural network-based decision-modules underpinning the individual opinion selection in robots. We describe the group dynamics underlying the decision process that leads to consensus in a three-option perceptual discrimination task. We test the robustness, scalability and adaptability of the decision-module in a variety of conditions. We show that the decision-making mechanisms underpinned by the evolved decision-module are more effective in supporting the collective decision-making process than the hand-coded voter and majority models, both in terms of accuracy and with respect to time to convergence to consensus.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(blackwell:2023:GECCO, author = {Tim Blackwell}, title = {The Barrier Tree Benchmark: Many Basins and Double Funnels}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {13--20}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {optimisation, algorithm benchmarking, swarm intelligence}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590478}, size = {8 pages}, abstract = {The Barrier Tree Benchmark (BTB) is a principled generator of continuous real-valued landscapes: problems of known topography/critical point structure can be systematically designed and deployed in algorithm comparison studies. A previous BTB study focused on a single funnel and a double basin. This work demonstrates algorithm performance on BTB instances with many basins, and on double funnels. A methodology for principled algorithm comparison on families of problems of similar complexity and structure is proposed. It is hoped that the BTB will address a parameter tuning pathology of current problem benchmarks, namely, that common optimisation algorithms require widely different control parameter settings for optimal performance on differing problem classes. This pathology is traced to the irregular and arbitrary composition of standard benchmarks.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(feola:2023:GECCO, author = {Luigi Feola and Antoine Sion and Vito Trianni and Andreagiovanni Reina and Elio Tuci}, title = {Aggregation Through Adaptive Random Walks in a Minimalist Robot Swarm}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {21--29}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {iterated racing, swarm robotics, minimal computing, aggregation, random walks, heterogeneous swarm}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590485}, size = {9 pages}, abstract = {In swarm robotics, random walks have proven to be efficient behaviours to explore unknown environments. By adapting the parameters of the random walk to environmental and social contingencies, it is possible to obtain interesting collective behaviours. In this paper, we introduce two novel aggregation behaviours based on different parameterisations of random walks tuned through numerical optimisation. Cue-based aggregation allows the swarm to reach the centre of an arena relying only on local discrete sampling, but does not guarantee the formation of a dense cluster. Neighbour-based aggregation instead allows the swarm to cluster in a single location based on the local detection of neighbours, but ignores the environmental cue. We then investigate a heterogeneous swarm made up of the two robot types. Results show that a trade-off can be found in terms of robot proportions to achieve cue-based aggregation while keeping the majority of the swarm in a single dense cluster.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(france:2023:GECCO, author = {Kordel K. France and John W. Sheppard}, title = {Factored Particle Swarm Optimization for Policy {Co-Training} in Reinforcement Learning}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {30--38}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {co-training, reinforcement learning, factored evolutionary algorithms}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590376}, size = {9 pages}, abstract = {Uncertainty of the environment limits the circumstances with which any optimization problem can provide meaningful information. Multiple optimizers can combat this problem by communicating different information through cooperative coevolution. In reinforcement learning (RL), uncertainty can be reduced by applying learned policies collaboratively with another agent. Here, we propose policy Co-training with Factored Evolutionary Algorithms (CoFEA) to evolve an optimal policy for such scenarios. We hypothesize that self-paced co-training can allow factored particle swarms with imperfect knowledge to consolidate knowledge from each of their imperfect policies in order to approximate a single optimal policy. Additionally, we show how the performance of co-training swarms of RL agents can be maximized through the specific use of Expected SARSA as the policy learner. We evaluate CoFEA against comparable RL algorithms and attempt to establish limits for which our procedure does and does not provide benefit. Our results indicate that Particle Swarm Optimization (PSO) is effective in training multiple agents under uncertainty and that FEA reduces swarm and policy updates. This paper contributes to the field of cooperative co-evolutionary algorithms by proposing a method by which factored evolutionary techniques can significantly improve how multiple RL agents collaborate under extreme uncertainty to solve complex tasks faster than a single agent can under identical conditions.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(kebari:2023:GECCO, author = {Maryam Kebari and Annie S. Wu and H. David Mathias}, title = {{Pid-Inspired} Modifications in Response Threshold Models In Swarm Intelligent Systems}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {39--46}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {multi-agent system, swarm intelligence}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590442}, size = {8 pages}, abstract = {In this study, we investigate the effectiveness of using the PID (Proportional - Integral - Derivative) control loop factors for modifying response thresholds in a decentralized, non-communicating, threshold-based swarm. Each agent in our swarm has a set of four thresholds, each corresponding to a task the agent is capable of performing. The agent will act on a particular task if the stimulus is higher than its corresponding threshold. The ability to modify their thresholds allows the agents to specialize dynamically in response to task demands. Current approaches to dynamic thresholds typically use a learning and forgetting process to adjust thresholds. These methods are able to effectively specialize once, but can have difficulty re-specializing if the task demands change. Our approach, inspired by the PID control loop, alters the threshold values based on the current task demand value, the change in task demand, and the cumulative sum of previous task demands. We show that our PID-inspired method is scalable and outperforms fixed and current learning and forgetting response thresholds with non-changing, constant, and abrupt changes in task demand. This superior performance is due to the ability of our method to re-specialize repeatedly in response to changing task demands.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(liu:2023:GECCO, author = {Yi Liu and Jiang Qiu and Emma Hart and Yilan Yu and Zhongxue Gan and Wei Li}, title = {{Learning-Based} Neural Ant Colony Optimization}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {47--55}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {swarm intelligence, deep learning, ant colony optimization, intelligent ant}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590483}, size = {9 pages}, abstract = {In this paper, we propose a new ant colony optimization algorithm, called learning-based neural ant colony optimization (LN-ACO), which incorporates an "intelligent ant". This intelligent ant contains a convolutional neural network pre-trained on a large set of instances which is able to predict the selection probabilities of the set of possible choices at each step of the algorithm. The intelligent ant is capable of generating a solution based on knowledge learned during training, but also guides other 'traditional' ants in improving their choices during the search. As the search progresses, the intelligent ant is also influenced by the pheromones accumulated by the colony, leading to better solutions. The key idea is that if tasks or instances share common features either in terms of their search landscape or solutions, then information learned by solving one instance can be applied to substantially accelerate the search on another. We evaluate the proposed algorithm on two public datasets and one real-world test set in the path planning domain. The results demonstrate that LN-ACO is competitive in its search capability compared to other ACO methods, with a significant improvement in convergence speed.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(mattson:2023:GECCO, author = {Connor Mattson and Daniel S. Brown}, title = {Leveraging Human Feedback to Evolve and Discover Novel Emergent Behaviors in Robot Swarms}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {56--64}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {similarity learning, emergent behaviors, novelty search, swarms}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590443}, size = {9 pages}, abstract = {Robot swarms often exhibit emergent behaviors that are fascinating to observe; however, it is often difficult to predict what swarm behaviors can emerge under a given set of agent capabilities. We seek to efficiently leverage human input to automatically discover a taxonomy of collective behaviors that can emerge from a particular multi-agent system, without requiring the human to know beforehand what behaviors are interesting or even possible. Our proposed approach adapts to user preferences by learning a similarity space over swarm collective behaviors using self-supervised learning and human-in-the-loop queries. We combine our learned similarity metric with novelty search and clustering to explore and categorize the space of possible swarm behaviors. We also propose several general-purpose heuristics that improve the efficiency of our novelty search by prioritizing robot controllers that are likely to lead to interesting emergent behaviors. We test our approach in simulation on two robot capability models and show that our methods consistently discover a richer set of emergent behaviors than prior work. Code, videos, and datasets are available at https://sites.google.com/view/evolving-novel-swarms.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(mkhatshwa:2023:GECCO, author = {Sindiso Mkhatshwa and Geoff Nitschke}, title = {The Impact of Morphological Diversity in Robot Swarms}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {65--74}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {morphology-behavior diversity, evolutionary swarm robotics}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590347}, size = {10 pages}, abstract = {In nature, morphological diversity enhances functional diversity, however, there is little swarm (collective) robotics research on the impact of morphological and behavioral (body-brain) diversity that emerges in response to changing environments. This study investigates the impact of increasingly complex task environments on the artificial evolution of body-brain diversity in simulated robot swarms. We investigate whether increasing task environment complexity (collective behavior tasks requiring increasing degrees of cooperative behavior) mandates concurrent increases in behavioral, morphological, or coupled increases in body-brain diversity in robotic swarms. Experiments compared three variants of collective behavior evolution across increasingly complex task environments: two behavioral diversity maintenance variants and body-brain diversity maintenance. Results indicate that body-brain diversity maintenance yielded a significantly higher behavioral and morphological diversity in evolved swarms overall, which was beneficial in the most complex task environment.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(simionato:2023:GECCO, author = {Giada Simionato and Federico A. Galatolo and Mario G.C.A. Cimino}, title = {Swarms of Artificial Platelets for Emergent Hole Detection and Healing in Wireless Sensor Networks}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {75--83}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {artificial platelets, hole detection, swarm intelligence, emergent cooperation, hole healing}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590468}, size = {9 pages}, abstract = {Most of the applications of wireless sensor networks require the continuous coverage of a region of interest. The irregular deployment of the nodes, or their failure, could result in holes in the coverage, thus jeopardizing such requirement. Methods to recover the sensing capabilities usually demand the availability of redundant full-fledged nodes, whose relocation should heal the holes. These solutions, however, do not consider the high cost of obtaining redundant, typically complex, devices, nor that they could in turn fail. In this work, we propose a bio-inspired and emergent approach toward hole detection and healing using a swarm of resource-constrained agents with reduced sensing capabilities, whose behavior draws inspiration from the concepts underlying blood coagulation. The swarm follows three rules: activation, adhesion, and cohesion, adapted from the behavior exhibited by platelets during the human healing process. Relying only on local and relative information, the mobile agents can detect the holes border and place themselves in locally optimal positions to temporarily restore the service. To validate the algorithm, we have developed a distributed, multi-process simulator. Experimental results show that the proposed method efficiently detects and heals the holes, outperforming two state-of-the-art solutions. It also demonstrates good robustness and flexibility to agent failure.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(singh:2023:GECCO, author = {Emilio Singh and Nelishia Pillay}, title = {A Study of {Ant-Based} Pheromone Spaces for Generation Perturbative {Hyper-Heuristics}}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {84--92}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {discrete optimization, generation perturbative hyper-heuristics, ant algorithms}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590367}, size = {9 pages}, abstract = {Recent work has shown the potential of ant algorithms for generation constructive hyper-heuristics. This paper extends the previous research by presenting a novel ant algorithm that is used to drive the heuristic search for a generation perturbative hyper-heuristic, the other type of generation hyper-heuristic. The ant-based generation perturbative hyper-heuristic is presented and compared against existing heuristics in two combinatorial domains, the movie scene scheduling and capacitated vehicle routing problems, to assess the heuristic generation efficacy. The comparison is further extended by assessing the effect of different pheromone maps (1D, 2D and 3D) on the ant-based hyper-heuristic, an important factor in the previous study. The results showed that, in both domains, the hyper-heuristic was able to generate perturbative heuristics that were competitive or better than the existing heuristics. Furthermore, the type of pheromone map was relevant to the hyper-heuristic performance as the 3D map performed best for the first domain and the 1D map for the second, confirming the trend shown in previous research, as well as the validity of ant algorithms for generation perturbative hyper-heuristics.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(sun:2023:GECCO, author = {Youwei Sun and Chaoli Sun}, title = {Particle Swarm Optimization with Ring Topology for {Multi-Modal} {Multi-Objective} Problems}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {93--101}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {special crowding distance, particle swarm optimization, weighted indicator, multi-modal multi-objective optimization problems, ring topology}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590382}, size = {9 pages}, abstract = {Multi-modal multi-objective optimization problems (MMOPs) are ubiquitous in real-world applications, in which multiple objective functions are conflicting and need to be optimized simultaneously. Furthermore, multiple Pareto optimal solutions of MMOPs are mapped to the same point on the Pareto front. Canonical multi-objective optimization algorithms show poor performance for MMOPs due to the lack of diversity maintenance in the decision space. In this paper, a particle swarm optimization with ring topology, denoted as PSO-RT, is proposed for solving MMOPs. The population is firstly divided into several sub-populations based on a dynamic radius to form a ring topology. Then, each solution will update its position by learning from one of its personal best positions and the best position of its neighbors. The personal best position to be learned is selected based on the special crowding distance to improve the exploration capability, and the best position of its neighbors is selected according to the weighted indicator to speed up convergence. The performance of PSO-RT is evaluated on 22 test problems. Experimental results show that our proposed PSO-RT can obtain competitive or better results than some state-of-the-art algorithms proposed for MMOPs in terms of Pareto sets proximity (PSP) and inverted generational distance (IGDX) matrics.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(xiao:2023:GECCO, author = {Tan-Lin Xiao and Qiang Yang and Xu-Dong Gao and Yuan-Yuan Ma and Zhen-Yu Lu and Sang-Woon Jeon and Jun Zhang}, title = {Variation Encoded {Large-Scale} Swarm Optimizers for Path Planning of Unmanned Aerial Vehicle}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {102--110}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {variation encoding, particle swarm optimization, path planning, large-scale swarm optimizers, unmanned aerial vehicle}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590357}, size = {9 pages}, abstract = {Different from existing studies where low-dimensional optimizers are utilized to optimize the path of an unmanned aerial vehicle (UAV), this paper attempts to employ large-scale swarm optimizers to solve the path planning problem of UAV, such that the path can be subtler and smoother. To this end, a variation encoding scheme is devised to encode particles. Specifically, each dimension of a particle is encoded by a triad consisting of the relative movements of UAV along the three coordinate axes. With this encoding scheme, a large number of anchor points can be optimized to form the path and repetitive anchor points can be avoided. Subsequently, this paper embeds this encoding scheme into four representative and well-performed large-scale swarm optimizers, namely the stochastic dominant learning swarm optimizer (SDLSO), the level-based learning swarm optimizer (LLSO), the competitive swarm optimizer (CSO), and the social learning particle swarm optimizer (SL-PSO), to optimize the path of UAV. Experiments have been conducted on 16 scenes with 4 different numbers of peaks in the landscapes. Experimental results have demonstrated that the devised encoding scheme is effective to cooperate with the four large-scale swarm optimizers to solve the path planning problem of UAV and SDLSO achieves the best performance.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(zhang:2023:GECCO, author = {Chi Zhang and Jian-Yu Li and Chun-Hua Chen and Yun Li and Zhi-Hui Zhan}, title = {{Region-Based} Evaluation Particle Swarm Optimization with Dual Solution Libraries for {Real-Time} Traffic Signal Timing Optimization}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {111--118}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {particle swarm optimization (PSO), solution library, traffic signal timing optimization (TSTO), region-based evaluation}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590350}, size = {8 pages}, abstract = {Traffic signal timing optimization (TSTO) is a significant topic in the smart city. However, there are two challenges when solving TSTO. Firstly, it often uses time-consuming simulation software to evaluate candidate solutions, therefore it is an expensive optimization problem. Secondly, as the traffic flow changes rapidly in TSTO, providing a timing scheme to respond to the change immediately is difficult. To address the above challenges, we propose a region-based evaluation particle swarm optimization algorithm (REPSO) with dual solution libraries, which has three novel designs. First, two solution libraries are built for undersaturated and oversaturated traffic flow states, respectively, which can be used to fast provide a signal timing scheme for a traffic flow in real-time. Second, a knowledge-assisted initialization strategy is proposed and adopted to assist the initialization of new solutions based on the knowledge in the two solution libraries. Third, a region-based evaluation strategy is proposed to reduce the number of fitness evaluations, which can also greatly reduce the construction time of the solution libraries. The performance of REPSO is validated by comparing with six signal timing methods in both undersaturated and oversaturated traffic flow states, showing the better general performance of REPSO.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(boige:2023:GECCO, author = {Rapha\"{e}l Boige and Guillaume Richard and J\'{e}r\'{e}mie Dona and Thomas Pierrot and Antoine Cully}, title = {{Gradient-Informed} Quality Diversity for the Illumination of Discrete Spaces}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {119--128}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590407}, size = {10 pages}, abstract = {Quality Diversity (qd) algorithms have been proposed to search for a large collection of both diverse and high-performing solutions instead of a single set of local optima. While early qd algorithms view the objective and descriptor functions as black-box functions, novel tools have been introduced to use gradient information to accelerate the search and improve overall performance of those algorithms over continuous input spaces. However a broad range of applications involve discrete spaces, such as drug discovery or image generation. Exploring those spaces is challenging as they are combinatorially large and gradients cannot be used in the same manner as in continuous spaces. We introduce map-elites with a Gradient-Informed Discrete Emitter (me-gide), which extends qd optimisation with differentiable functions over discrete search spaces. me-gide leverages the gradient information of the objective and descriptor functions with respect to its discrete inputs to propose gradient-informed updates that guide the search towards a diverse set of high quality solutions. We evaluate our method on challenging benchmarks including protein design and discrete latent space illumination and find that our method outperforms state-of-the-art qd algorithms in all benchmarks.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(cochevelou:2023:GECCO, author = {Fran\c{c}ois Cochevelou and David Bonner and Martin-Pierre Schmidt}, title = {Differentiable {Soft-Robot} Generation}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {129--137}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {soft-robot, differentiable simulation, material point method, topology optimisation}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590408}, size = {9 pages}, abstract = {Soft robots have multiple potential applications for artificial life, ergonomics and human interaction but they also present many design and control challenges. One of these challenges is morphogenesis adapted to common tasks such as gripping and walking. We propose a method based on the Material Point Method (MPM) and numerical optimization to co-optimize effectively the morphology and muscle actuation of a 3D soft robot in three different scenarios: walking, walking up stairs and avoiding obstacles. Thanks to efficient optimization techniques, we allow virtual soft robot morphogenesis with at least four orders of magnitude more variables and fewer iterations as compared to previous work based on genetic algorithms. We demonstrate generation of a soft robot at a resolution of 1003 and 2 million parameters. We extend our approach to generate rigid structure and create a multi rigidity robot. Finally, we introduce a concept of energy fraction to limit muscle mass.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(faldor:2023:GECCO, author = {Maxence Faldor and F\'{e}lix Chalumeau and Manon Flageat and Antoine Cully}, title = {{MAP-Elites} with {Descriptor-Conditioned} Gradients and Archive Distillation into a Single Policy}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {138--146}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {reinforcement learning, neuroevolution, MAP-elites, policy gradient, quality-diversity}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590503}, size = {9 pages}, abstract = {Quality-Diversity algorithms, such as MAP-Elites, are a branch of Evolutionary Computation generating collections of diverse and high-performing solutions, that have been successfully applied to a variety of domains and particularly in evolutionary robotics. However, MAP-Elites performs a divergent search based on random mutations originating from Genetic Algorithms, and thus, is limited to evolving populations of low-dimensional solutions. PGA-MAP-Elites overcomes this limitation by integrating a gradient-based variation operator inspired by Deep Reinforcement Learning which enables the evolution of large neural networks. Although high-performingin many environments, PGA-MAP-Elites fails on several tasks where the convergent search of the gradient-based operator does not direct mutations towards archive-improving solutions. In this work, we present two contributions: (1) we enhance the Policy Gradient variation operator with a descriptor-conditioned critic that improves the archive across the entire descriptor space, (2) we exploit the actor-critic training to learn a descriptor-conditioned policy at no additional cost, distilling the knowledge of the archive into one single versatile policy that can execute the entire range of behaviors contained in the archive. Our algorithm, DCG-MAP-Elites improves the QD score over PGA-MAP-Elites by 82\% on average, on a set of challenging locomotion tasks.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(grasso:2023:GECCO, author = {Caitlin Grasso and Josh Bongard}, title = {Selection for {Short-Term} Empowerment Accelerates the Evolution of Homeostatic Neural Cellular Automata}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {147--155}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {neural cellular automata, empowerment, morphogenesis}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590469}, size = {9 pages}, abstract = {Empowerment---a domain independent, information-theoretic metric---has previously been shown to assist in the evolutionary search for neural cellular automata (NCA) capable of homeostasis when employed as a fitness function [11, 17]. In our previous study, we successfully extended empowerment, defined as maximum time-lagged mutual information between agents' actions and future sensations, to a distributed sensorimotor system embodied as an NCA. However, the time-delay between actions and their corresponding sensations was arbitrarily chosen. Here, we expand upon previous work by exploring how the time scale at which empowerment operates impacts its efficacy as an auxiliary objective to accelerate the discovery of homeostatic NCAs. We show that shorter time delays result in marked improvements over empowerment with longer delays, when compared to evolutionary selecting only for home-ostasis. Moreover, we evaluate stability and adaptability of evolved NCAs, both hallmarks of living systems that are of interest to replicate in artificial ones. We find that short-term empowered NCA are more stable and are capable of generalizing better to unseen homeostatic challenges. Taken together, these findings motivate the use of empowerment during the evolution of other artifacts, and suggest how it should be incorporated to accelerate evolution of desired behaviors for them.1}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(grillotti:2023:GECCO, author = {Luca Grillotti and Manon Flageat and Bryan Lim and Antoine Cully}, title = {Don't Bet on Luck Alone: Enhancing Behavioral Reproducibility of {Quality-Diversity} Solutions in Uncertain Domains}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {156--164}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {uncertain domains, neuroevolution, quality-diversity}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590498}, size = {9 pages}, abstract = {Quality-Diversity (QD) algorithms are designed to generate collections of high-performing solutions while maximizing their diversity in a given descriptor space. However, in the presence of unpredictable noise, the fitness and descriptor of the same solution can differ significantly from one evaluation to another, leading to uncertainty in the estimation of such values. Given the elitist nature of QD algorithms, they commonly end up with many degenerate solutions in such noisy settings. In this work, we introduce Archive Reproducibility Improvement Algorithm (ARIA); a plug-and-play approach that improves the reproducibility of the solutions present in an archive. We propose it as a separate optimization module, relying on natural evolution strategies, that can be executed on top of any QD algorithm. Our module mutates solutions to (1) optimize their probability of belonging to their niche, and (2) maximize their fitness. The performance of our method is evaluated on various tasks, including a classical optimization problem and two high-dimensional control tasks in simulated robotic environments. We show that our algorithm enhances the quality and descriptor space coverage of any given archive by at least 50\%.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(janmohamed:2023:GECCO, author = {Hannah Janmohamed and Thomas Pierrot and Antoine Cully}, title = {Improving the Data Efficiency of {Multi-Objective} {Quality-Diversity} through Gradient Assistance and Crowding Exploration}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {165--173}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {MAP-elites, quality-diversity, neuroevolution, multi-objective optimisation}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590470}, size = {9 pages}, abstract = {Quality-Diversity (QD) algorithms have recently gained traction as optimisation methods due to their effectiveness at escaping local optima and capability of generating wide-ranging and high-performing solutions. Recently, Multi-Objective MAP-Elites (MOME) extended the QD paradigm to the multi-objective setting by maintaining a Pareto front in each cell of a MAP-ELITES grid. mome achieved a global performance that competed with NSGA-II and SPEA2, two well-established multi-objective evolutionary algorithms, while also acquiring a diverse repertoire of solutions. However, MOME is limited by non-directed genetic search mechanisms which struggle in high-dimensional search spaces. In this work, we present Multi-Objective MAP-Elites with Policy-Gradient Assistance and Crowding-based Exploration (MOME-PGX): a new QD algorithm that extends MOME to improve its data efficiency and performance. MOME-PGX uses gradient-based optimisation to efficiently drive solutions towards higher performance. It also introduces crowding-based mechanisms to create an improved exploration strategy and to encourage greater uniformity across Pareto fronts. We evaluate MOME-PGX in four simulated robot locomotion tasks and demonstrate that it converges faster and to a higher performance than all other baselines. We show that MOME-PGX is between 4.3 and 42 times more data-efficient than MOME and doubles the performance of MOME, NSGA-II and SPEA2 in challenging environments.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(mertan:2023:GECCO, author = {Alican Mertan and Nick Cheney}, title = {Modular Controllers Facilitate the {Co-Optimization} of Morphology and Control in Soft Robots}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {174--183}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {evolutionary robotics, soft robotics, brain-body co-optimization}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590416}, size = {10 pages}, abstract = {Soft robotics is a rapidly growing area of robotics research that would benefit greatly from design automation, given the challenges of manually engineering complex, compliant, and generally non-intuitive robot body plans and behaviors. It has been suggested that a major hurdle currently limiting soft robot brain-body co-optimization is the fragile specialization between a robot's controller and the particular body plan it controls, resulting in premature convergence. Here we posit that modular controllers are more robust to changes to a robot's body plan. We demonstrate a decreased reduction in locomotion performance after morphological mutations to soft robots with modular controllers, relative to those with similar global controllers - leading to fitter offspring. Moreover, we show that the increased transferability of modular controllers to similar body plans enables more effective brain-body co-optimization of soft robots, resulting in an increased rate of positive morphological mutations and higher overall performance of evolved robots. We hope that this work helps provide specific methods to improve soft robot design automation in this particular setting, while also providing evidence to support our understanding of the challenges of brain-body co-optimization more generally. 1}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(nadizar:2023:GECCO, author = {Giorgia Nadizar and Eric Medvet and Kathryn Walker and Sebastian Risi}, title = {A {Fully-Distributed} {Shape-Aware} Neural Controller for Modular Robots}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {184--192}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {neural cellular automata, embodied intelligence, evolutionary robotics, collective intelligence, neuroevolution}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590419}, size = {9 pages}, abstract = {Modular robots are promising for their versatility and large design freedom. Modularity can also enable automatic assembly and reconfiguration, be it autonomous or via external machinery. However, these procedures are error-prone and often result in misassemblings. This, in turn, can cause catastrophic effects on the robot functionality, as the controller deployed in each module is optimized for a different robot shape than the actual one. In this work, we address such shortcoming by proposing a shape-aware modular controller, operating with (1) a self-discovery phase, in which each module controller identifies the shape it is assembled in, followed by (2) a parameter selection phase, where the controller selects its parameters according to the inferred shape. We deploy a self-classifying neural cellular automaton for phase (1), and we leverage evolutionary optimization for implementing a library of controller parameters for phase (2). We test the validity of the proposed method considering voxel-based soft robots, a class of modular soft robots, and the task of locomotion. Our findings confirm the effectiveness of such a controller paradigm, and also show that it can be used to partially overcome unforeseen damages or assembly mistakes.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(parsa:2023:GECCO, author = {Atoosa Parsa and Sven Witthaus and Nidhi Pashine and Corey O'Hern and Rebecca Kramer-Bottiglio and Josh Bongard}, title = {Universal Mechanical Polycomputation in Granular Matter}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {193--201}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {granular metamaterials, unconventional computing, mechanical computing}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590520}, size = {9 pages}, abstract = {Unconventional computing devices are increasingly of interest as they can operate in environments hostile to silicon-based electronics, or compute in ways that traditional electronics cannot. Mechanical computers, wherein information processing is a material property emerging from the interaction of components with the environment, are one such class of devices. This information processing can be manifested in various physical substrates, one of which is granular matter. In a granular assembly, vibration can be treated as the information-bearing mode. This can be exploited to realize "polycomputing": materials can be evolved such that a single grain within them can report the result of multiple logical operations simultaneously at different frequencies, without recourse to quantum effects. Here, we demonstrate the evolution of a material in which one grain acts simultaneously as two different NAND gates at two different frequencies. NAND gates are of interest as any logical operations can be built from them. Moreover, they are nonlinear thus demonstrating a step toward general-purpose, computationally dense mechanical computers. Polycomputation was found to be distributed across each evolved material, suggesting the material's robustness. With recent advances in material sciences, hardware realization of these materials may eventually provide devices that challenge the computational density of traditional computers.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(pierro:2023:GECCO, author = {Alessandro Pierro and Kristine Heiney and Shamit Shrivastava and Giulia Marcucci and Stefano Nichele}, title = {Optimization of a Hydrodynamic Computational Reservoir through Evolution}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {202--210}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {pattern recognition and classification, evolvable hardware, complex systems}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590355}, size = {9 pages}, abstract = {As demand for computational resources reaches unprecedented levels, research is expanding into the use of complex material substrates for computing. In this study, we interface with a model of a hydrodynamic system, under development by a startup, as a computational reservoir and optimize its properties using an evolution in materio approach. Input data are encoded as waves applied to our shallow water reservoir, and the readout wave height is obtained at a fixed detection point. We optimized the readout times and how inputs are mapped to the wave amplitude or frequency using an evolutionary search algorithm, with the objective of maximizing the system's ability to linearly separate observations in the training data by maximizing the readout matrix determinant. Applying evolutionary methods to this reservoir system substantially improved separability on an XNOR task, in comparison to implementations with hand-selected parameters. We also applied our approach to a regression task and show that our approach improves out-of-sample accuracy. Results from this study will inform how we interface with the physical reservoir in future work, and we will use these methods to continue to optimize other aspects of the physical implementation of this system as a computational reservoir.1}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(pigozzi:2023:GECCO, author = {Federico Pigozzi and Stephanie Woodman and Eric Medvet and Rebecca Kramer-Bottiglio and Josh Bongard}, title = {Morphology Choice Affects the Evolution of Affordance Detection in Robots}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {211--219}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {affordance detection, embodied cognition, soft robots}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590505}, size = {9 pages}, abstract = {A vital component of intelligent action is affordance detection: understanding what actions external objects afford the viewer. This requires the agent to understand the physical nature of the object being viewed, its own physical nature, and the potential relationships possible when they interact. Although robotics researchers have investigated affordance detection, the way in which the morphology of the robot facilitates, obstructs, or otherwise influences the robot's ability to detect affordances has yet to be studied. We do so here and find that a robot with an appropriate morphology can evolve to predict whether it will fit through an aperture with just minimal tactile feedback. We also find that some robot morphologies facilitate the evolution of more accurate affordance detection, while others do not if all have the same evolutionary optimization budget. This work demonstrates that sensation, thought, and action are necessary but not sufficient for understanding how affordance detection may evolve in organisms or robots: morphology must also be taken into account. It also suggests that, in the future, we may optimize morphology along with control in order to facilitate affordance detection in robots, and thus improve their reliable and safe action in the world.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(tjanaka:2023:GECCO, author = {Bryon Tjanaka and Matthew C Fontaine and David H Lee and Yulun Zhang and Nivedit Reddy Balam and Nathaniel Dennler and Sujay S Garlanka and Nikitas Dimitri Klapsis and Stefanos Nikolaidis}, title = {Pyribs: A {Bare-Bones} Python Library for Quality Diversity Optimization}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {220--229}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {quality diversity, framework, software library}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590374}, size = {10 pages}, abstract = {Recent years have seen a rise in the popularity of quality diversity (QD) optimization, a branch of optimization that seeks to find a collection of diverse, high-performing solutions to a given problem. To grow further, we believe the QD community faces two challenges: developing a framework to represent the field's growing array of algorithms, and implementing that framework in software that supports a range of researchers and practitioners. To address these challenges, we have developed pyribs, a library built on a highly modular conceptual QD framework. By replacing components in the conceptual framework, and hence in pyribs, users can compose algorithms from across the QD literature; equally important, they can identify unexplored algorithm variations. Furthermore, pyribs makes this framework simple, flexible, and accessible, with a user-friendly API supported by extensive documentation and tutorials. This paper overviews the creation of pyribs, focusing on the conceptual framework that it implements and the design principles that have guided the library's development. Pyribs is available at https://pyribs.org}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(ben-jedidia:2023:GECCO, author = {Firas {Ben Jedidia} and Benjamin Doerr and Martin S. Krejca}, title = {{Estimation-of-Distribution} Algorithms for {Multi-Valued} Decision Variables}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {230--238}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {estimation-of-distribution algorithms, genetic drift, leadingones benchmark, univariate marginal distribution algorithm, evolutionary algorithms}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590523}, size = {9 pages}, abstract = {With apparently all research on estimation-of-distribution algorithms (EDAs) concentrated on pseudo-Boolean optimization and permutation problems, we undertake the first steps towards using EDAs for problems in which the decision variables can take more than two values, but which are not permutation problems. To this aim, we propose a natural way to extend the known univariate EDAs to such variables. Different from a na\"{\i}ve reduction to the binary case, it avoids additional constraints.Since understanding genetic drift is crucial for an optimal parameter choice, we extend the known quantitative analysis of genetic drift to EDAs for multi-valued variables. Roughly speaking, when the variables take r different values, the time for genetic drift to become critical is r times shorter than in the binary case. Consequently, the update strength of the probabilistic model has to be chosen r times lower now.To investigate how desired model updates take place in this framework, we undertake a mathematical runtime analysis on the r-valued LeadingOnes problem. We prove that with the right parameters, the multi-valued UMDA solves this problem efficiently in O(r log(r)2n2 log(n)) function evaluations.Overall, our work shows that EDAs can be adjusted to multivalued problems and gives advice on how to set their parameters.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(benavides:2023:GECCO, author = {Xabier Benavides and Josu Ceberio and Leticia Hernando and Jose Antonio Lozano}, title = {New Knowledge about the Elementary Landscape Decomposition for Solving the Quadratic Assignment Problem}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {239--247}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {quadratic assignment problem, elementary landscapes}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590369}, size = {9 pages}, abstract = {Previous works have shown that studying the characteristics of the Quadratic Assignment Problem (QAP) is a crucial step in gaining knowledge that can be used to design tailored meta-heuristic algorithms. One way to analyze the characteristics of the QAP is to decompose its objective function into a linear combination of orthogonal sub-functions that can be independently studied. In particular, this work focuses on a decomposition approach that has attracted considerable attention: the Elementary Landscape Decomposition (ELD).The main drawback of the ELD is that it does not allow an understandable characterization of what is being measured by each component of the decomposition. Thus, it turns out difficult to design new efficient meta-heuristic algorithms for the QAP based on the ELD. To address this issue, in this work, we delve deeper into the ELD by means of an additional decomposition of its elementary components. Conducted experiments show that the performed analysis may be used to explain the behaviour of ELD-based methods, providing critical information about their potential applications.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(bossek:2023:GECCO, author = {Jakob Bossek and Aneta Neumann and Frank Neumann}, title = {On the Impact of Basic Mutation Operators and Populations within Evolutionary Algorithms for the Dynamic Weighted Traveling Salesperson Problem}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {248--256}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {evolutionary algorithms, dynamic optimization, weighted traveling salesperson problem, re-optimization}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590384}, size = {9 pages}, abstract = {Evolutionary algorithms have been shown to obtain good solutions for complex optimization problems in static and dynamic environments. It is important to understand the behaviour of evolutionary algorithms for complex optimization problems that also involve dynamic and/or stochastic components in a systematic way in order to further increase their applicability to real-world problems. We investigate the node weighted traveling salesperson problem (W-TSP), which provides an abstraction of a wide range of weighted TSP problems, in dynamic settings. In the dynamic setting of the problem, items that have to be collected as part of a TSP tour change over time. We first present a dynamic setup for the dynamic W-TSP parameterized by different types of changes that are applied to the set of items to be collected when traversing the tour. Our first experimental investigations study the impact of such changes on resulting optimized tours in order to provide structural insights of optimization solutions. Afterwards, we investigate simple mutation-based evolutionary algorithms and study the impact of the mutation operators and the use of populations with dealing with the dynamic changes to the node weights of the problem.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(canonne:2023:GECCO, author = {Lorenzo Canonne and Bilel Derbel and Francisco Chicano and Gabriela Ochoa}, title = {To Combine or Not to Combine Graybox Crossover and Local Search?}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {257--265}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {graybox optimization, fitness landscape analysis}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590466}, size = {9 pages}, abstract = {Specialized graybox local search and crossover have been successfully combined within the framework of the so-called Drils (Deterministic recombination and iterated local search) algorithm. As for any evolutionary algorithm, the initial design framework, and the underlying high-level choices and parameters, are crucially important. The Drils algorithm is no exception, and recent enhanced variants exist in the literature. In this paper, we aim at: (i) improving the performance of the latest variants of Drils, and (ii) providing a better principled understanding of graybox search behavior and dynamics. On the basis of a preliminary analysis using Local Optima Networks of small-size NKQ-landscapes, we first highlight the difference of using local search with and without crossover. We then propose to pipeline these two techniques in a simple two-phase like iterated local search scheme which is shown to provide substantial improvements over the latest Drils+ variant for large-size NKQ-landscapes. We further report a dedicated analysis in an attempt to provide new insights into the impact of local search and crossover on the phenotype and the genotype of the local optima encountered in the search trajectory.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(cavalcanti-costa:2023:GECCO, author = {Joao Guilherme {Cavalcanti Costa} and Yi Mei and Mengjie Zhang}, title = {Learning to Select Initialisation Heuristic for Vehicle Routing Problems}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {266--274}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {vehicle routing problem, initialisation heuristic, machine learning}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590397}, size = {9 pages}, abstract = {The Vehicle Routing Problem (VRP) is a complex problem that comes with a great number of applications in logistics and supply chains. It is non-trivial to select the optimal VRP techniques to solve these applications, especially since there are several possible scenarios. As there is no way to predict how each algorithm would perform until it is (at least partially) deployed, it would make sense in selecting the ones that have higher adaptability to the given environment. In this paper, we consider this idea on the initialisation part of a local search-based metaheuristic. We argue that a proper initialisation is important for obtaining better VRP solutions and apply several machine learning techniques aiming to learn how to use distinct features from four commonly used construction heuristics solutions, predicting the scenarios in which they are the most effective. We also provide relevant discussions on the effects of the initial solution on a local search context. Results show that the proposed method can help select the best or an improving method for the majority of the instances considered, especially for large-scale VRP instances.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(chicano:2023:GECCO, author = {Francisco Chicano and Bilel Derbel and S\'{e}bastien Verel}, title = {Fourier {Transform-Based} Surrogates for Permutation Problems}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {275--283}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {group representation theory, surrogate models, permutation problems}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590425}, size = {9 pages}, abstract = {In the context of pseudo-Boolean optimization, surrogate functions based on the Walsh-Hadamard transform have been recently proposed with great success. It has been shown that lower-order components of the Walsh-Hadamard transform have usually a larger influence on the value of the objective function. Thus, creating a surrogate model using the lower-order components of the transform can provide a good approximation to the objective function. The Walsh-Hadamard transform in pseudo-Boolean optimization is a particularization in the binary representation of a Fourier transform over a finite group, precisely defined in the framework of group representation theory. Using this more general definition, it is possible to define a Fourier transform for the functions over permutations. We propose in this paper the use of surrogate functions based on the Fourier transforms over the permutation space. We check how similar the proposed surrogate models are to the original objective function and we also apply regression to learn a surrogate model based on the Fourier transform. The experimental setting includes two permutation problems for which the exact Fourier transform is unknown based on the problem parameters: the Asteroid Routing Problem and the Single Machine Total Weighted Tardiness.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(chicano:2023:GECCO2, author = {Francisco Chicano and Gabriela Ochoa and Bilel Derbel and Lorenzo Canonne}, title = {Local Optima Markov Chain: A New Tool for {Landscape-Aware} Analysis of Algorithm Dynamics}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {284--292}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {local optima markov chain, hill climbing, iterated local search, local optima networks, partition crossover}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590422}, size = {9 pages}, abstract = {Landscape analysis is a very useful tool in optimization to understand the structure of the search space of a problem when there is some kind of distance or neighborhood defined over the solutions. Local Optima Networks (LON) have been proposed to serve as a summary of the landscape of a problem. LONs are graphs where the nodes are the local optima of the search space according to a particular neighborhood and edges join local optima when one can be reached from the other using some kind of perturbation followed by hill climbing. In this paper we enhance local optima networks to include precise information on the transition probabilities among local optima, yielding a Markov Chain for the visited local optima during the search. The new analysis tool, called Local Optima Markov Chain (LOMA), is built on top of the static landscape information depending on the problem and includes information about algorithm dynamics. We show how LOMAs can be used to compute metrics that are out of the reach of other landscape-aware tools, thus offering more information to understand algorithm dynamics.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(gro\ss{}mann:2023:GECCO, author = {Ernestine Gro\ss{}mann and Sebastian Lamm and Christian Schulz and Darren Strash}, title = {Finding {Near-Optimal} Weight Independent Sets at Scale}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {293--302}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590353}, size = {10 pages}, abstract = {Computing maximum weight independent sets in graphs is an important NP-hard optimization problem. The problem is particularly difficult to solve in large graphs for which data reduction techniques do not work well. To be more precise, state-of-the-art branch-and-reduce algorithms can solve many large-scale graphs if reductions are applicable. Otherwise, their performance quickly degrades due to branching requiring exponential time. In this paper, we develop an advanced memetic algorithm to tackle the problem, which incorporates recent data reduction techniques to compute near-optimal weighted independent sets in huge sparse networks. More precisely, we use a memetic approach to recursively choose vertices that are likely to be in a large-weight independent set. We include these vertices into the solution, and further reduce the graph. We show that identifying and removing vertices likely to be in large-weight independent sets opens up the reduction space and speeds up the computation of large-weight independent sets remarkably. Our experimental evaluation indicates that we are able to outperform state-of-the-art algorithms. For example, our two algorithm configurations compute the best results among all competing algorithms for 205 out of 207 instances. Thus can be seen as a useful tool when large-weight independent sets need to be computed in practice.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(li:2023:GECCO, author = {Miqing Li and Xiaofeng Han and Xiaochen Chu}, title = {{MOEAs} Are Stuck in a Different Area at a Time}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {303--311}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {evolutionary algorithms, distance metric, combinatorial optimisation, multi-objective optimisation, search space}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590447}, size = {9 pages}, abstract = {In this paper, we show that when dealing with multi-objective combinatorial optimisation problems, the search, in different executions of a multi-objective evolutionary algorithm (MOEA), e.g., NSGA-II, tends to stagnate in different areas in the search space. In other words, the final populations obtained by an MOEA under multiple executions, which can be very close in the objective space, are located far away from each other in the search space. This phenomenon becomes more apparent with the increase of some type of problem complexity (e.g., the ruggedness level of problem landscape). Interestingly, the phenomenon only happens to combinatorial optimisation problems, but not to continuous ones. In this study, we consider three well-established MOEAs (NSGA-II, SMS-EMOA and MOEA/D) on two representative combinatorial optimisation problems (NK-landscape and TSP) and on two commonly used continuous problem suites (DTLZ and WFG). Experimental results show a clear difference between multi-objective combinatorial and continuous problems and suggest a need of more efforts to be put on developing effective MOEAs for combinatorial problems.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(marrero:2023:GECCO, author = {Alejandro Marrero and Eduardo Segredo and Emma Hart and Jakob Bossek and Aneta Neumann}, title = {Generating Diverse and Discriminatory Knapsack Instances by Searching for Novelty in Variable Dimensions of {Feature-Space}}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {312--320}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {evolutionary computation, instance-space analysis, instance generation, novelty search, knapsack problem}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590504}, size = {9 pages}, abstract = {Generating new instances via evolutionary methods is commonly used to create new benchmarking data-sets, with a focus on attempting to cover an instance-space as completely as possible. Recent approaches have exploited Quality-Diversity methods to evolve sets of instances that are both diverse and discriminatory with respect to a portfolio of solvers, but these methods can be challenging when attempting to find diversity in a high-dimensional feature-space. We address this issue by training a model based on Principal Component Analysis on existing instances to create a low-dimension projection of the high-dimension feature-vectors, and then apply Novelty Search directly in the new low-dimension space. We conduct experiments to evolve diverse and discriminatory instances of Knapsack Problems, comparing the use of Novelty Search in the original feature-space to using Novelty Search in a low-dimensional projection, and repeat over a given set of dimensions. We find that the methods are complementary: if treated as an ensemble, they collectively provide increased coverage of the space. Specifically, searching for novelty in a low-dimension space contributes 56\% of the filled regions of the space, while searching directly in the feature-space covers the remaining 44\%.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(mischek:2023:GECCO, author = {Florian Mischek and Nysret Musliu}, title = {Leveraging {Problem-Independent} {Hyper-Heuristics} for {Real-World} Test Laboratory Scheduling}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {321--329}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {test laboratory scheduling, HyFlex, hyper-heuristics}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590354}, size = {9 pages}, abstract = {The area of project scheduling problems has seen a tremendous amount of different problem variations. Traditionally, each problem variant requires custom solution approaches in order to produce high-quality solutions. Developing and tuning these methods is an expensive process that may have to be repeated as soon as the requirements or problem structures change. On the other hand, research into hyper-heuristics has produced general heuristic problem-solving techniques that were developed to achieve good results on multiple diverse problem domains. They work with a set of comparatively simple low-level heuristics and dynamically adapt themselves to each new problem variant. In this paper, we investigate hyper-heuristic approaches for a real-world industrial test laboratory scheduling problem and develop a new problem domain for the HyFlex hyper-heuristic framework. We propose a diverse portfolio of low-level heuristics that can be dynamically selected during the search process by hyper-heuristics to solve the problem. We evaluate and compare the performance of several problem-independent hyper-heuristics on this domain and show that they are able to match, and sometimes even exceed, the performance of state-of-the-art solution techniques that were developed and tuned specifically for this problem.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(nepomuceno:2023:GECCO, author = {Napole\~{a}o Nepomuceno and Ricardo Saboia and Andr\'{e} Coelho}, title = {A {MILP-Based} Very {Large-Scale} Neighborhood Search for the Heterogeneous Vehicle Routing Problem with Simultaneous Pickup and Delivery}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {330--338}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {very large-scale neighborhood search, matheuristics, vehicle routing problem}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3591054}, size = {9 pages}, abstract = {The Vehicle Routing Problem with Simultaneous Pickup and Delivery (VRPSPD) is a challenging NP-hard optimization problem, in which each of several customers demands both delivery and pickup operations and must be visited once by a single vehicle from a fleet. In this article, we present a Very Large-Scale Neighborhood Search (VLSN) to tackle the VRPSPD with heterogeneous fleets, whereby different large neighborhood structures are employed within a simple descent method. We use benchmark instances from literature and present very competitive results, improving several of the best-known solutions and consistently outperforming current state-of-the-art algorithms.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(pei:2023:GECCO, author = {Jiyuan Pei and Hao Tong and Jialin Liu and Yi Mei and Xin Yao}, title = {Local Optima Correlation Assisted Adaptive Operator Selection}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {339--347}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {capacitated vehicle routing problem, experience-based optimisation, local search, combinatorial optimisation, adaptive operator selection, metaheuristics}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590399}, size = {9 pages}, abstract = {For solving combinatorial optimisation problems with metaheuristics, different search operators are applied for sampling new solutions in the neighbourhood of a given solution. It is important to understand the relationship between operators for various purposes, e.g., adaptively deciding when to use which operator to find optimal solutions efficiently. However, it is difficult to theoretically analyse this relationship, especially in the complex solution space of combinatorial optimisation problems. In this paper, we propose to empirically analyse the relationship between operators in terms of the correlation between their local optima and develop a measure for quantifying their relationship. The comprehensive analyses on a wide range of capacitated vehicle routing problem benchmark instances show that there is a consistent pattern in the correlation between commonly used operators. Based on this newly proposed local optima correlation metric, we propose a novel approach for adaptively selecting among the operators during the search process. The core intention is to improve search efficiency by preventing wasting computational resources on exploring neighbourhoods where the local optima have already been reached. Experiments on randomly generated instances and commonly used benchmark datasets are conducted. Results show that the proposed approach outperforms commonly used adaptive operator selection methods.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(phan:2023:GECCO, author = {Quan Minh Phan and Ngoc Hoang Luong}, title = {Pareto Local Search is Competitive with Evolutionary Algorithms for {Multi-Objective} Neural Architecture Search}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {348--356}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {local optima networks, neural architecture search, evolutionary algorithms, multi-objective optimization, local search}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590395}, size = {9 pages}, abstract = {Neural architecture search (NAS) involves automatically searching for promising deep neural network structures in certain architecture spaces. Depending on the number of criteria being concerned, NAS can be formulated as single-objective optimization problems (SONAS) or multi-objective optimization problems (MONAS). Evolutionary algorithms (EAs) are common approaches for NAS due to their effectiveness in solving challenging combinatorial problems. Recent studies, however, have analyzed SONAS landscapes and indicated that while NAS problems are multi-modal but local search algorithms with simple perturbation operators can escape local optima to reach global optima without much difficulty. Such investigations for MONAS remain under-explored. In this paper, we employ local optimal networks (LONs) for visually structuring the MONAS landscape with a simple local search procedure. Via detailed analyses, we then design LOMONAS, a dedicated Pareto local search algorithm for MONAS. The experimental results on four NAS benchmarks (MacroNAS, NAS-Bench-101, NAS-Bench-201, and NAS-Bench-ASR) exhibit the superior performance of LOMONAS compared to two widely-used multi-objective EAs (MOEAs), NSGA-II and MOEA/D. The findings indicate that Pareto local search algorithms are competitive with MOEAs in solving MONAS problems.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(ramirez-sanchez:2023:GECCO, author = {Jairo Enrique {Ram\'{\i}rez S\'{a}nchez} and Camilo {Chac\'{o}n Sartori} and Christian Blum}, title = {{Q-Learning} Ant Colony Optimization Supported by Deep Learning for Target Set Selection}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {357--366}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {deep learning, ant colony optimization, target set selection, social networks}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590396}, size = {10 pages}, abstract = {The use of machine learning techniques within metaheuristics is a rapidly growing field of research. In this paper, we show how a deep learning framework can be beneficially used to improve an ant colony optimization algorithm. In particular, problem information obtained via deep learning is combined in our algorithm by means of Q-learning with the usual pheromone and greedy information. Our algorithm is applied to the Target Set Selection (TSS) problem, which is an NP-hard combinatorial optimization problem with applications, for example, in social networks. The specific problem variant considered in this paper asks for finding a smallest subset of the nodes of a given graph such that their influence can be spread to all other nodes of the graph via a diffusion process. The experimental results show, first, that the pure ant colony optimization approach can already compete with the state of the art. Second, the obtained results indicate that the hybrid algorithm variant outperforms the pure ant colony optimization approach especially in the context of large problem instances.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(santucci:2023:GECCO, author = {Valentino Santucci and Josu Ceberio}, title = {Doubly Stochastic Matrix Models for Estimation of Distribution Algorithms}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {367--374}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {permutation matching problems, doubly stochastic matrices, estimation of distribution algorithms, evolutionary algorithms}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590371}, size = {8 pages}, abstract = {Problems with solutions represented by permutations are very prominent in combinatorial optimization. Thus, in recent decades, a number of evolutionary algorithms have been proposed to solve them, and among them, those based on probability models have received much attention. In that sense, most efforts have focused on introducing algorithms that are suited for solving ordering/ranking nature problems. However, when it comes to proposing probability-based evolutionary algorithms for assignment problems, the works have not gone beyond proposing simple and in most cases univariate models. In this paper, we explore the use of Doubly Stochastic Matrices (DSM) for optimizing matching and assignment nature permutation problems. To that end, we explore some learning and sampling methods to efficiently incorporate DSMs within the picture of evolutionary algorithms. Specifically, we adopt the framework of estimation of distribution algorithms and compare DSMs to some existing proposals for permutation problems. Conducted preliminary experiments on instances of the quadratic assignment problem validate this line of research and show that DSMs may obtain very competitive results, while computational cost issues still need to be further investigated.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(torralbo:2023:GECCO, author = {Manuel Torralbo and Leticia Hernando and Ernesto Contreras-Torres and Jose A. Lozano}, title = {On the Use of Second Order Neighbors to Escape from Local Optima}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {375--383}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {second order neighborhood, escaping local optima, quadratic assignment problem, travelling salesperson problem, linear ordering problem, permutation-based combinatorial optimization problems}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590473}, size = {9 pages}, abstract = {Designing efficient local search based algorithms requires to consider the specific properties of the problems. We introduce a simple and efficient strategy, the Extended Reach, that escapes from local optima obtained from a best improvement local search and apply it to the linear ordering problem (LOP), the traveling salesperson problem (TSP) and the quadratic assignment problem (QAP). This strategy is based on two landscape properties observed in the literature. First, it considers that a local optimum is usually located in the frontier of its own attraction basin, and thus, it is enough to inspect the second order neighbors to reach a (better) solution inside an attraction basin of a better local optimum. Second, taking into account that for the LOP and specific neighborhoods it is possible to discard solutions without the need of being evaluated, we extend this result to the TSP with the 2-opt neighborhood to avoid the unnecessary evaluation of solutions. Efficient ways of evaluating the second order neighbors are also presented, based on the cost differences, reducing significantly the computation cost. Experimental results on random and benchmark instances show that our strategy, indeed, escapes from local optima despite its simplicity.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(zhu:2023:GECCO, author = {Luyao Zhu and Fangfang Zhang and Xiaodong Zhu and Ke Chen and Mengjie Zhang}, title = {{Sample-Aware} {Surrogate-Assisted} Genetic Programming for Scheduling Heuristics Learning in Dynamic Flexible Job Shop Scheduling}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {384--392}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {genetic algorithms, genetic programming, automated scheduling heuristics design, dynamic flexible job shop scheduling, surrogate samples}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590440}, size = {9 pages}, abstract = {Genetic programming (GP) has been successfully introduced to learn scheduling heuristics for dynamic flexible job shop scheduling (DFJSS) automatically. However, the evaluations of GP individuals are normally time-consuming, especially with long DFJSS simulations. Taking k-nearest neighbour with phenotypic characterisations of GP individuals as a surrogate approach, has been successfully used to preselect GP offspring to the next generation for effectiveness improvement. However, this approach is not straightforward to improve the training efficiency, which is normally the primary goal of surrogate. In addition, there is no study on which GP individuals (samples) are good for building surrogate models. To this end, first, this paper proposes a surrogate-assisted GP algorithm to reduce the training time of learning scheduling heuristics for DFJSS. Second, this paper further proposes an effective sampling strategy for surrogate-assisted GP. The results show that our proposed algorithm can achieve comparable performance with only about a third of training time of traditional GP. With the same training time, the proposed algorithm can significantly improve the quality of learned scheduling heuristics in all examined scenarios. Furthermore, the evolved scheduling heuristics by the proposed sample-aware surrogate-assisted GP are more interpretable with smaller rule sizes than traditional GP.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(andersen:2023:GECCO, author = {Hayden Andersen and Andrew Lensen and Will Browne and Yi Mei}, title = {Producing Diverse Rashomon Sets of Counterfactual Explanations with Niching Particle Swarm Optimization Algorithms}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {393--401}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {machine learning, explainable AI, particle swarm optimisation}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590444}, size = {9 pages}, abstract = {Counterfactual explanation is a popular eXplainable AI technique, that gives contrastive explanations to answer potential "what-if" questions about the workings of machine learning models. However, research into how explanations are understood by human beings has shown that an optimal explanation should be both selected and social, providing multiple varying explanations for the same event that allow a user to select specific explanations based on prior beliefs and cognitive biases. In order to provide such explanations, a Rashomon set of explanations can be created: a set of explanations utilising different features in the data. Current work to generate counterfactual explanations does not take this need into account, only focusing on producing a single optimal counterfactual.This work presents a novel method for generating a diverse Rashomon set of counterfactual explanations using the final population from a Particle Swarm Optimisation (PSO) algorithm. It explores a selection of PSO niching algorithms for PSO and evaluates the best algorithm to produce these sets. Finally, the ability of this method to be implemented and trusted by users is discussed.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(aydeniz:2023:GECCO, author = {Ayhan Alp Aydeniz and Robert Loftin and Kagan Tumer}, title = {Novelty Seeking Multiagent Evolutionary Reinforcement Learning}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {402--410}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {fitness shaping, multiagent learning, evolutionary RL, exploration}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590428}, size = {9 pages}, abstract = {Coevolving teams of agents promises effective solutions for many coordination tasks such as search and rescue missions or deep ocean exploration. Good team performance in such domains generally relies on agents discovering complex joint policies, which is particularly difficult when the fitness functions are sparse (where many joint policies return the same or even zero fitness values). In this paper, we introduce Novelty Seeking Multiagent Evolutionary Reinforcement Learning (NS-MERL), which enables agents to more efficiently explore their joint strategy space. The key insight of NS-MERL is to promote good exploratory behaviors for individual agents using a dense, novelty-based fitness function. Though the overall team-level performance is still evaluated via a sparse fitness function, agents using NS-MERL more efficiently explore their joint action space and more readily discover good joint policies. Our results in complex coordination tasks show that teams of agents trained with NS-MERL perform significantly better than agents trained solely with task-specific fitnesses.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(caetano:2023:GECCO, author = {Victor Caetano and Matheus C\^{a}ndido Teixeira and Gisele Lobo Pappa}, title = {Symbolic Regression Trees as Embedded Representations}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {411--419}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {genetic algorithms, genetic programming, embedded representations, semantics, transformers}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590423}, size = {9 pages}, abstract = {Representation learning is an area responsible for learning data representations that makes it easier for machine learning algorithms to extract useful information from them. Deep learning currently has the most effective methods for this task and can learn distributed representations - also known as embeddings - able to represent different properties of the data and their relationship. In this direction, this paper introduces a new way to look at tree-like GP individuals for symbolic regression. Given a set of predefined operators and a sufficiently large number of solutions sampled from the space, we train a transformer to learn an encoding/decoding function. By transforming a tree representation into a distributed representation, we are able to measure distances between trees in a much more efficient way and, more importantly, generate the potential for these representations to capture semantics. We show the distance accounting for embedding presents results very similar to those of a tree-edition, which reflects their syntactic similarity. Although the model as it stands is not able to capture semantics yet, we show its potential by using the generated tree-representation model in a simple task: measuring distances between trees in a fitness-sharing scenario.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(chen:2023:GECCO, author = {Qi Chen and Bing Xue and Wolfgang Banzhaf and Mengjie Zhang}, title = {Relieving Genetic Programming from Coefficient Learning for Symbolic Regression via Correlation and Linear Scaling}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {420--428}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {genetic algorithms, genetic programming, fitness function, correlation, linear scaling, symbolic regression}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3595918}, size = {9 pages}, abstract = {The difficulty of learning optimal coefficients in regression models using only genetic operators has long been a challenge in genetic programming for symbolic regression. As a simple but effective remedy it has been proposed to perform linear scaling of model outputs prior to a fitness evaluation. Recently, the use of a correlation coefficient-based fitness function with a post-processing linear scaling step for model alignment has been shown to outperform error-based fitness functions in generating symbolic regression models. In this study, we compare the impact of four evaluation strategies on relieving genetic programming (GP) from learning coefficients in symbolic regression and focusing on learning the more crucial model structure. The results from 12 datasets, including ten real-world tasks and two synthetic datasets, confirm that all these strategies assist GP to varying degrees in learning coefficients. Among the them, correlation fitness with one-time linear scaling as post-processing, due to be the most efficient while bringing notable benefits to the performance, is the recommended strategy to relieve GP from learning coefficients.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(cook:2023:GECCO, author = {Joshua Cook and Kagan Tumer and Tristan Scheiner}, title = {Leveraging Fitness Critics To Learn Robust Teamwork}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {429--437}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {multiagent learning, fitness approximation, evolutionary machine learning}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590497}, size = {9 pages}, abstract = {Co-evolutionary algorithms have successfully trained agent teams for tasks such as autonomous exploration or robot soccer. However generally, such approaches seek a single strong team, whereas many real-world applications require agents to effectively cooperate across multiple teams. To adapt to different teammates, agents need to learn more general teamwork skills rather than a single team-specific role. Previous work primarily frames this as a fitness-shaping problem, providing high-quality but expensive evaluation methods to isolate an agent's contribution. In this work, we introduce Learned Evaluations for Robust Teaming (LERT), an approach that provides a local evaluation that leverages state trajectories of agents to better quantify their impact across multiple teams. The key insight of this work is that agent state trajectories and previous experiences carry sufficient information to map agent abilities to team performance. As a result, LERT cooperatively co-evolves agents to work together across arbitrary teams. While only using local information and significantly fewer team evaluations, LERT performs as well as-if not better than-current methods.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(demir:2023:GECCO, author = {Kaan Demir and Bach Nguyen and Bing Xue and Mengjie Zhang}, title = {{Co-Operative} {Co-Evolutionary} {Many-Objective} Embedded {Multi-Label} Feature Selection with {Decomposition-Based} {PSO}}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {438--446}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {multi-label classification, decomposition, many-objective, embedded feature selection}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590373}, size = {9 pages}, abstract = {Multi-label classification is an emerging machine-learning problem involving the prediction of a set of class labels based on the instance's features. In real-world problems, there are hundreds or thousands of features, many of which are irrelevant or redundant, resulting in a large search space for feature selection, i.e. to find small and discriminate feature subsets that improve the classification performance. Feature selection for multi-label classification is a many-objective optimisation problem with more than three main conflicting objectives: one of which is to reduce the number of selected features. There are many metrics for measuring multi-label classification performance, each of which can conflict with one another depending on the task. Hence, multi-label feature selection is a many-objective optimisation problem when three or more classification metrics and the number of selected features are optimised. In this paper, we propose to combine multi-label feature selection with evolutionary many-objective optimisation to address the above challenges and handle the trade-offs between multiple classification metrics and the number of selected features, using a decomposition-based algorithm. The results demonstrate that our proposed method is capable of finding discriminative and small feature subsets that can significantly improve the classification performances in comparison with other many-objective feature selection approaches.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(dixit:2023:GECCO, author = {Gaurav Dixit and Kagan Tumer}, title = {Learning Synergies for {Multi-Objective} Optimization in Asymmetric Multiagent Systems}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {447--455}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {team composition, multiagent multi-objective optimization}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590524}, size = {9 pages}, abstract = {Agents in a multiagent system must learn diverse policies that allow them to express complex inter-agent relationships required to optimize a single team objective. Multiagent Quality Diversity methods partially address this by transforming the agents' large joint policy space to a tractable sub-space that can produce synergistic agent policies. However, a majority of real-world problems are inherently multi-objective and require asymmetric agents (agents with different capabilities and objectives) to learn policies that represent diverse trade-offs between agent-specific and team objectives. This work introduces Multi-objective Asymmetric Island Model (MO-AIM), a multi-objective multiagent learning framework for the discovery of generalizable agent synergies and trade-offs that is based on adapting the population dynamics over a spectrum of tasks. The key insight is that the competitive pressure arising from the changing populations on the team tasks forces agents to acquire robust synergies required to balance their individual and team objectives in response to the nature of their teams and task dynamics. Results on several variations of a multi-objective habitat problem highlight the potential of MO-AIM in producing teams with diverse specializations and trade-offs that readily adapt to unseen tasks.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(fontaine:2023:GECCO, author = {Matthew Fontaine and Stefanos Nikolaidis}, title = {Covariance Matrix Adaptation {MAP-Annealing}}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {456--465}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590389}, size = {10 pages}, abstract = {Single-objective optimization algorithms search for the single highest-quality solution with respect to an objective. Quality diversity (QD) optimization algorithms, such as Covariance Matrix Adaptation MAP-Elites (CMA-ME), search for a collection of solutions that are both high-quality with respect to an objective and diverse with respect to specified measure functions. However, CMA-ME suffers from three major limitations highlighted by the QD community: prematurely abandoning the objective in favor of exploration, struggling to explore flat objectives, and having poor performance for low-resolution archives. We propose a new quality diversity algorithm, Covariance Matrix Adaptation MAP-Annealing (CMA-MAE), that addresses all three limitations. We provide theoretical justifications for the new algorithm with respect to each limitation. Our theory informs our experiments, which support the theory and show that CMA-MAE achieves state-of-the-art performance and robustness.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(fontbonne:2023:GECCO, author = {Nicolas Fontbonne and Nicolas Maudet and Nicolas Bredeche}, title = {Adaptive Team Cooperative {Co-Evolution} for a {Multi-Rover} Distribution Problem}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {466--475}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {marginal contribution, fitness evaluation, multi-agent reinforcement learning, multi-armed bandits, cooperative coevolution EA, evolutionary robotics, multi-agent systems, multi-robots}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590500}, size = {10 pages}, abstract = {This paper deals with policy learning for a team of heterogeneous robotic agents when the whole team shares a single reward. We address the problem of providing an accurate estimation of the contribution of each agent in tasks where coordination between agents requires joint policy updates of two (or more) agents. This is typically the case when two agents must simultaneously modify their behaviors to perform a joint action that leads to a performance gain for the whole team. We propose a cooperative co-evolutionary algorithm extended with a multi-armed bandit algorithm that dynamically adjusts the number of agents that should update their policies simultaneously, aiming both for performance and learning speed. We use a realistic robotic multi-rover task where agents must physically distribute themselves on points of interest of different natures to complete the task. Results show that the algorithm is able to select the best group size for policy updates that reflects the task's coordination requirements. Surprisingly, we also reveal that coupling between agents' actions in a realistic setup can also emerge from interactions at the phenotypical level, hinting at subtle interactions during learning between the control parameter space and the behavioral space.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(frati:2023:GECCO, author = {Lapo Frati and Neil Traft and Nick Cheney}, title = {{OmnImage:} Evolving 1k Image Cliques for {Few-Shot} Learning}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {476--484}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {few-shot learning, computer vision, evolution}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590430}, size = {9 pages}, abstract = {Few-shot learning datasets contain a large number of classes with only a few examples in each. Existing datasets may contain thousands of classes, but very simple images (e.g. handwritten characters) such that a naive baseline can perform very well. Or they may be so complex, with large within-class variation and distracting background, that they are too difficult to enable meaningful learning with so few examples. To construct a customizable dataset of consistent natural images, we assemble a new dataset with each class containing a small subset from each of the 1000 classes of ImageNet-1k. To select subsets with clear within-class consistency we use an evolutionary approach to minimize the pairwise cosine-distance between features generated by a pre-trained VGG model. We train a classifier on these evolved image cliques and find that our evolved dataset provides a greater challenge than hand written digits, but not the extreme difficulty of a non-evolved subset of ImageNet. We find that pre-training our classifier on these evolved prototypical classes significantly improves performance on classifying random subsets ImageNet (relative to pre-training on similar random-class subsets), and conjecture that these prototypical classes may be beneficial for seeding concept learning. Dataset and code is publicly available at: https://github.com/lfrati/OmnImage}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(guha:2023:GECCO, author = {Ritam Guha and Wei Ao and Stephen Kelly and Vishnu Boddeti and Erik Goodman and Wolfgang Banzhaf and Kalyanmoy Deb}, title = {{MOAZ:} A {Multi-Objective} {AutoML-Zero} Framework}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {485--492}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {evolutionary algorithms, AutoML, multi-objective search}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590391}, size = {8 pages}, abstract = {Automated machine learning (AutoML) greatly eases human efforts in architecture engineering. However, mainstream AutoML methods like neural architecture search (NAS) are customized for well-designed search spaces wherein promising architectures are densely distributed. In contrast, AutoML-Zero builds machine-learning algorithms using basic primitives and can explore novel architectures beyond human knowledge. AutoML-Zero shows the potential to deploy machine learning systems by not taking advantage of either feature engineering or architectural engineering. In its current form, it only optimizes a single objective like accuracy and has no mechanism to ensure that the constraints of real-world applications are satisfied. We propose a multi-objective variant of AutoML-Zero called MOAZ, that distributes solutions on a Pareto front by trading off accuracy against the computational complexity of the machine learning algorithm. In addition to generating different Pareto-optimal solutions, MOAZ can effectively explore the sparse search space to improve search efficiency. Experimental results on linear regression tasks show MOAZ reduces the median complexity by 87.4\% compared to AutoML-Zero while accelerating the median target performance achievement speed by 82\%. In addition, our preliminary results on non-linear regression tasks show the potential for further improvements in search accuracy and for reducing the need for human intervention in AutoML.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(kang:2023:GECCO, author = {Myeongjong Kang}, title = {Positive Definite Nonparametric Regression Using an Evolutionary Algorithm with Application to Covariance Function Estimation}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {493--501}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {probabilistic model-building genetic algorithm, kernel smoothing, shape constrained regression, covariance estimation, variogram analysis}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590363}, size = {9 pages}, abstract = {We propose a novel nonparametric regression framework subject to the positive definiteness constraint. It offers a highly modular approach for estimating covariance functions of stationary processes. Our method can impose positive definiteness, as well as isotropy and monotonicity, on the estimators, and its hyperparameters can be decided using cross validation. We define our estimators by taking integral transforms of kernel-based distribution surrogates. We then use the iterated density estimation evolutionary algorithm, a variant of estimation of distribution algorithms, to fit the estimators. We also extend our method to estimate covariance functions for point-referenced data. Compared to alternative approaches, our method provides more reliable estimates for long-range dependence. Several numerical studies are performed to demonstrate the efficacy and performance of our method. Also, we illustrate our method using precipitation data from the Spatial Interpolation Comparison 97 project.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(kenny:2023:GECCO, author = {Angus Kenny and Tapabrata Ray and Steffen Limmer and Hemant Kumar Singh and Tobias Rodemann and Markus Olhofer}, title = {Hybridizing {TPOT} with Bayesian Optimization}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {502--510}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {genetic algorithms, genetic programming}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590364}, size = {9 pages}, abstract = {Tree-based pipeline optimization tool (TPOT) is used to automatically construct and optimize machine learning pipelines for classification or regression tasks. The pipelines are represented as trees comprising multiple data transformation and machine learning operators --- each using discrete hyper-parameter spaces --- and optimized with genetic programming. During the evolution process, TPOT evaluates numerous pipelines which can be challenging when computing budget is limited. In this study, we integrate TPOT with Bayesian Optimization (BO) to extend its ability to search across continuous hyper-parameter spaces, and attempt to improve its performance when there is a limited computational budget. Multiple hybrid variants are proposed and systematically evaluated, including (a) sequential/periodic use of BO and (b) use of discrete/continuous search spaces for BO. The performance of these variants is assessed using 6 data sets with up to 20 features and 20,000 samples. Furthermore, an adaptive variant was designed where the choice of whether to apply TPOT or BO is made automatically in each generation. While the variants did not produce results that are significantly better than "standard" TPOT, the study uncovered important insights into the behavior and limitations of TPOT itself which is valuable in designing improved variants.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(la-cava:2023:GECCO, author = {William G {La Cava}}, title = {Optimizing Fairness Tradeoffs in Machine Learning with Multiobjective {Meta-Models}}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {511--519}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {machine learning, multiobjective optimization, fairness}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590487}, size = {9 pages}, abstract = {Improving the fairness of machine learning models is a nuanced task that requires decision makers to reason about multiple, conflicting criteria. The majority of fair machine learning methods transform the error-fairness trade-off into a single objective problem with a parameter controlling the relative importance of error versus fairness. We propose instead to directly optimize the error-fairness tradeoff by using multi-objective optimization. We present a flexible framework for defining the fair machine learning task as a weighted classification problem with multiple cost functions. This framework is agnostic to the underlying prediction model as well as the metrics. We use multiobjective optimization to define the sample weights used in model training for a given machine learner, and adapt the weights to optimize multiple metrics of fairness and accuracy across a set of tasks. To reduce the number of optimized parameters, and to constrain their complexity with respect to population subgroups, we propose a novel meta-model approach that learns to map protected attributes to sample weights, rather than optimizing those weights directly. On a set of real-world problems, this approach outperforms current state-of-the-art methods by finding solution sets with preferable error/fairness trade-offs.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(moscato:2023:GECCO, author = {Pablo Moscato and Andrew Ciezak and Nasimul Noman}, title = {Dynamic Depth for Better Generalization in Continued Fraction Regression}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {520--528}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {memetic algorithm, symbolic regression, generalization, continued fraction}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590461}, size = {9 pages}, abstract = {A continued fraction expansion represents a real number as an expression obtained by iteratively extracting the largest whole number from its fractional part and inverting the remainder.Continued Fraction Regression (CFR) is a method for approximating unknown target functions from data. The key idea is representing the target function as an analytic continued fraction expansion. This is achieved through an optimization approach, which searches the set of possible fractions to find the best approximating fraction for the given data. This research investigates the relationship between truncated fraction depth, accuracy, complexity, and training time in the CFR method for challenging regression problems. Specifically, low-sample synthetic datasets with Gaussian noise are considered, which we use as a proxy for low-sample dynamical systems with underlying models obscured by measurement errors.We propose and assess the performance of three depth-regulating CFR approaches against six modern symbolic regression methods. We reinforce the strong generalization capacity of the CFR method while reducing model complexity and execution time. Our method achieves the most 1st place rankings in testing against its competitors on 21 Nguyen datasets. It is never worse than 3rd for any dataset while taking at most 4\% of the training time of its closest competitor.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(nikolikj:2023:GECCO, author = {Ana Nikolikj and Sa\v{s}o D\v{z}eroski and Mario Andr\'{e}s Mu\~{n}oz and Carola Doerr and Peter Koro\v{s}ec and Tome Eftimov}, title = {Algorithm Instance Footprint: Separating Easily Solvable and Challenging Problem Instances}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {529--537}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {supervised machine learning, explainability, single-objective optimization, latent representations, algorithm behavior}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590424}, size = {9 pages}, abstract = {In black-box optimization, it is essential to understand why an algorithm instance works on a set of problem instances while failing on others and provide explanations of its behavior. We propose a methodology for formulating an algorithm instance footprint that consists of a set of problem instances that are easy to be solved and a set of problem instances that are difficult to be solved, for an algorithm instance. This behavior of the algorithm instance is further linked to the landscape properties of the problem instances to provide explanations of which properties make some problem instances easy or challenging. The proposed methodology uses meta-representations that embed the landscape properties of the problem instances and the performance of the algorithm into the same vector space. These meta-representations are obtained by training a supervised machine learning regression model for algorithm performance prediction and applying model explainability techniques to assess the importance of the landscape features to the performance predictions. Next, deterministic clustering of the meta-representations demonstrates that using them captures algorithm performance across the space and detects regions of poor and good algorithm performance, together with an explanation of which landscape properties are leading to it.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(schneider:2023:GECCO, author = {Lennart Schneider and Bernd Bischl and Janek Thomas}, title = {{Multi-Objective} Optimization of Performance and Interpretability of Tabular Supervised Machine Learning Models}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {538--547}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {tabular data, supervised learning, interpretability, evolutionary computation, group structure, multi-objective, performance}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590380}, size = {10 pages}, abstract = {We present a model-agnostic framework for jointly optimizing the predictive performance and interpretability of supervised machine learning models for tabular data. Interpretability is quantified via three measures: feature sparsity, interaction sparsity of features, and sparsity of non-monotone feature effects. By treating hyperparameter optimization of a machine learning algorithm as a multi-objective optimization problem, our framework allows for generating diverse models that trade off high performance and ease of interpretability in a single optimization run. Efficient optimization is achieved via augmentation of the search space of the learning algorithm by incorporating feature selection, interaction and monotonicity constraints into the hyperparameter search space. We demonstrate that the optimization problem effectively translates to finding the Pareto optimal set of groups of selected features that are allowed to interact in a model, along with finding their optimal monotonicity constraints and optimal hyperparameters of the learning algorithm itself. We then introduce a novel evolutionary algorithm that can operate efficiently on this augmented search space. In benchmark experiments, we show that our framework is capable of finding diverse models that are highly competitive or outperform state-of-the-art XGBoost or Explainable Boosting Machine models, both with respect to performance and interpretability.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(shiraishi:2023:GECCO, author = {Hiroki Shiraishi and Yohei Hayamizu and Tomonori Hashiyama}, title = {{Fuzzy-UCS} Revisited: {Self-Adaptation} of Rule Representations in {Michigan-Style} Learning {Fuzzy-Classifier} Systems}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {548--557}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {fuzzy-UCS, self-adaptation, knowledge representation, supervised learning, learning fuzzy-classifier systems}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590360}, size = {10 pages}, abstract = {This paper focuses on the impact of rule representation in Michigan-style Learning Fuzzy-Classifier Systems (LFCSs) on its classification performance. A well-representation of the rules in an LFCS is crucial for improving its performance. However, conventional rule representations frequently need help addressing problems with unknown data characteristics. To address this issue, this paper proposes a supervised LFCS (i.e., Fuzzy-UCS) with a self-adaptive rule representation mechanism, entitled Adaptive-UCS. Adaptive-UCS incorporates a fuzzy indicator as a new rule parameter that sets the membership function of a rule as either rectangular (i.e., crisp) or triangular (i.e., fuzzy) shapes. The fuzzy indicator is optimized with evolutionary operators, allowing the system to search for an optimal rule representation. Results from extensive experiments conducted on continuous space problems demonstrate that Adaptive-UCS outperforms other UCSs with conventional crisp-hyperrectangular and fuzzy-hypertrapezoidal rule representations in classification accuracy. Additionally, Adaptive-UCS exhibits robustness in the case of noisy inputs and real-world problems with inherent uncertainty, such as missing values, leading to stable classification performance.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(tiso:2023:GECCO, author = {Stefano Tiso and Pedro Carvalho and Nuno Louren\c{c}o and Penousal Machado}, title = {Biological Insights on {Grammar-Structured} Mutations Improve Fitness and Diversity}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {558--567}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {genetic algorithms, genetic programming, mutation, optimizers, grammar-guided genetic programming}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590472}, size = {10 pages}, abstract = {Grammar-Guided Genetic Programming (GGGP) employs a variety of concepts from evolutionary theory to autonomously design solutions for a given task. Recent insights from evolutionary biology can lead to further improvements in GGGP algorithms. In this paper, we propose a new mutation approach called Facilitated Mutation (FM) that is based on the theory of Facilitated Variation. We evaluate the performance of FM on the evolution of neural network optimizers for image classification, a relevant task in Evolutionary Computation, with important implications for the field of Machine Learning. We compare FM and FM combined with crossover (FMX) against a typical mutation approach to assess the benefits of the approach. We find that FMX provides statistical improvements in key metrics, creating a superior optimizer overall (+0.5\% average test accuracy), improving the average quality of solutions (+53\% average population fitness), and discovering more diverse high-quality behaviors (+523 high-quality solutions discovered on average). Additionally, FM and FMX reduce the number of fitness evaluations in an evolutionary run, reducing computational costs. FM's implementation cost is minimal and the approach is theoretically applicable to any algorithm where genes are associated witha grammar non-terminal, making this approach applicable in many existing GGGP systems.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(toutouh:2023:GECCO, author = {Jamal Toutouh and Subhash Nalluru and Erik Hemberg and Una-May O'Reilly}, title = {{Semi-Supervised} Learning with Coevolutionary Generative Adversarial Networks}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {568--576}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590426}, size = {9 pages}, abstract = {It can be expensive to label images for classification. Good classifiers or high-quality images can be trained on unlabeled data with Generative Adversarial Network (GAN) methods. We use coevolutionary algorithms with Semi-Supervised GANs (SSL-GANs) that work with a few labeled and some more unlabeled images to train both a good classifier and a high-quality image generator. A spatial coevolutionary algorithm introduces diversity into the GAN training. We use a two-dimensional grid of GANs to gain discriminator loss diversity with a distributed cell-level coevolutionary algorithm. The GAN components are exchanged between neighboring cells based on performance and population-based hyperparameter tuning. The approach is demonstrated on two separate benchmark datasets, and with only a few labels, we simultaneously achieve good classification accuracy and high generated image quality score. In addition, the generated image quality and classification accuracy are competitive to state-of-the-art methods.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(tran:2023:GECCO, author = {Hai-Long Tran and Long Doan and Ngoc Hoang Luong and Huynh Thi Thanh Binh}, title = {A {Two-Stage} {Multi-Objective} Evolutionary Reinforcement Learning Framework for Continuous Robot Control}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {577--585}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {multi-objective optimization, reinforcement learning, genetic algorithms}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590441}, size = {9 pages}, abstract = {Real-world continuous control problems often require optimizing for multiple conflicting objectives. Various works in multi-objective reinforcement learning have been conducted to tackle such issues and obtained impressive performance. At the same time, evolutionary algorithms (EAs), which are extensively used in multi-objective optimization, have recently been demonstrated their competitiveness to RL algorithms, including multi-objective control for environments with discrete action spaces. However, using EAs for multi-objective continuous robot control is still an under-explored topic. For the single-objective setting, the Proximal Distilled Evolutionary Reinforcement Learning (PDERL) framework succeeds in combining the robustness of EA and the efficiency of RL methods. In this work, we bring the strengths of PDERL to the multiobjective realm to create the novel multi-objective PDERL framework called MOPDERL that consists of a warm-up stage and an evolution stage. In particular, MOPDERL collaboratively optimizes policies for each separate objective in the warm-up stage, and then exchanges that knowledge for further policy improvement during the multi-objective evolution stage. We benchmark MOPDERL on six MuJoCo robot locomotion environments, which have been modified for the multi-objective context. The results show that MOPDERL produces better-quality Pareto fronts and higher metric scores than the state-of-the-art PGMORL algorithm across five out of six environments.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(videau:2023:GECCO, author = {Mathurin Videau and Nickolai Knizev and Alessandro Leite and Marc Schoenauer and Olivier Teytaud}, title = {Interactive Latent Diffusion Model}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {586--596}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590471}, size = {11 pages}, abstract = {This paper introduces Interactive Latent Diffusion Model (IELDM), an encapsulation of a popular text-to-image diffusion model into an Evolutionary framework, allowing the users to steer the design of images toward their goals, alleviating the tedious trial-and-error process that such tools frequently require. The users can not only designate their favourite images, allowing the system to build a surrogate model based on their goals and move in the same directions, but also click on some specific parts of the images to either locally refine the image through dedicated mutation, or recombine images by choosing on each one some regions they like. Experiments validate the benefits of IELDM, especially in a situation where Latent Diffusion Model is challenged by complex input prompts.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(wever:2023:GECCO, author = {Marcel Wever and Miran \"{O}zdogan and Eyke H\"{u}llermeier}, title = {Cooperative {Co-Evolution} for Ensembles of Nested Dichotomies for {Multi-Class} Classification}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {597--605}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {supervised learning, coevolution, nested dichotomy}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590457}, size = {9 pages}, abstract = {In multi-class classification, it can be beneficial to decompose a learning problem into several simpler problems. One such reduction technique is the use of so-called nested dichotomies, which recursively bisect the set of possible classes such that the resulting subsets can be arranged in the form of a binary tree, where each split defines a binary classification problem. Recently, a genetic algorithm for optimizing the structure of such nested dichotomies has achieved state-of-the-art results. Motivated by its success, we propose to extend this approach using a co-evolutionary scheme to optimize both the structure of nested dichotomies and their composition into ensembles through which they are evaluated. Furthermore, we present an experimental study showing this approach to yield ensembles of nested dichotomies at substantially lower cost and, in some cases, even with an improved generalization performance.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(yatsu:2023:GECCO, author = {Naoya Yatsu and Hiroki Shiraishi and Hiroyuki Sato and Keiki Takadama}, title = {Exploring {High-Dimensional} Rules Indirectly via Latent Space Through a Dimensionality Reduction for {XCS}}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {606--614}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {XCS classifier system, data mining, learning classifier systems, autoencoder}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590439}, size = {9 pages}, abstract = {To mine high-dimensional rules in Learning Classifier Systems (LCSs) through a reduction of the dimensionality of input data, this paper proposes a novel approach that indirectly learns the rules in the "latent space" based on the rewards of the reconstructed rules in the "observation space". We call this approach Learning Strategy by exploring rules in Observation space via Latent space (LS-OvL), which is based on two rule representations in the observation and latent space. Concretely, LS-OvL explores the rules by searching the latent space as the reduced dimensional input space by an autoencoder and evaluates them in the observation space by reconstructing them from the latent space. Such a design is significant because it prevents the generation of inaccurate rules during the reconstitution process from the latent space to the observation space. Through a comparison LS-OvL with the conventional learning strategy, which explores and evaluates the rules in the only latent space and reconstructs them in the observation space, the experimental results show that (1) LS-OvL outperforms the conventional learning strategy in terms of the acquired reward and the population size, and (2) LS-OvL can generate explainable and classifiable high-dimensional rules.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(yuan:2023:GECCO, author = {Gonglin Yuan and Bing Xue and Mengjie Zhang}, title = {An Effective {One-Shot} Neural Architecture Search Method with Supernet {Fine-Tuning} for Image Classification}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {615--623}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {neural architecture search, image classification, evolutionary computation}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590438}, size = {9 pages}, abstract = {Neural architecture search (NAS) is becoming increasingly popular for its ability to automatically search for an appropriate network architecture, avoiding laborious manual designing processes, and potentially introducing novel structures. However, many NAS methods suffer from heavy computational consumption. One-shot NAS alleviates this issue by training a big supernet and allowing all the candidates to inherit weights from the supernet, avoiding training from scratch. However, the performance evaluations in one-shot methods might not always be reliable due to the weight co-adaption issue inside the supernet. This paper proposes a super-net fine-tuning strategy to allow the supernet's weights to adapt to the new focused search region along with the evolutionary process. Furthermore, a new genetic algorithm-based search method is designed to offer an effective path-sampling strategy in the search region and provide a new population generation method to preclude unfair fitness comparisons between different populations. The experimental results demonstrate the proposed method achieves promising results compared with 32 peer competitors in terms of the algorithm's computational cost and the searched architecture's performance. Specifically, the proposed method achieves classification error rates of 2.50\% and 17.07\% within only 0.50 and 0.92 GPU-days on CIFAR10 and CIFAR100, respectively.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(zheng:2023:GECCO, author = {Bowen Zheng and Ran Cheng}, title = {Rethinking {Population-Assisted} {Off-Policy} Reinforcement Learning}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {624--632}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {evolutionary reinforcement learning, neuroevolution, off-policy learning}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590512}, size = {9 pages}, abstract = {While off-policy reinforcement learning (RL) algorithms are sample efficient due to gradient-based updates and data reuse in the replay buffer, they struggle with convergence to local optima due to limited exploration. On the other hand, population-based algorithms offer a natural exploration strategy, but their heuristic black-box operators are inefficient. Recent algorithms have integrated these two methods, connecting them through a shared replay buffer. However, the effect of using diverse data from population optimization iterations on off-policy RL algorithms has not been thoroughly investigated. In this paper, we first analyze the use of off-policy RL algorithms in combination with population-based algorithms, showing that the use of population data could introduce an overlooked error and harm performance. To test this, we propose a uniform and scalable training design and conduct experiments on our tailored framework in robot locomotion tasks from the OpenAI gym. Our results substantiate that using population data in off-policy RL can cause instability during training and even degrade performance. To remedy this issue, we further propose a double replay buffer design that provides more on-policy data and show its effectiveness through experiments. Our results offer practical insights for training these hybrid methods.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(an:2023:GECCO, author = {Guangyan An and Ziyu Wu and Zhilong Shen and Ke Shang and Hisao Ishibuchi}, title = {Evolutionary {Multi-Objective} Deep Reinforcement Learning for Autonomous {UAV} Navigation in {Large-Scale} Complex Environments}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {633--641}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590446}, size = {9 pages}, abstract = {Autonomous navigation of Unmanned Aerial Vehicles (UAVs) in large-scale complex environments presents a significant challenge in modern aerospace engineering, as it requires effective decision-making in an environment with limited sensing capacity, dynamic changes, and dense obstacles. Reinforcement Learning (RL) has been applied in sequential control problems, but the manual setting of hyperparameters, including reward functions, often results in suboptimal solutions and inadequate training. To address these limitations, we propose a framework that combines Multi-Objective Evolutionary Algorithms (MOEAs) with RL algorithms. The proposed framework generates a set of non-dominating parameters for the reward function using MOEAs, leading to diverse decision-making preferences, efficient convergence, and improved performance. The framework was tested on the autonomous navigation of UAVs and demonstrated significant improvement compared to traditional RL methods. This work offers a novel perspective on the problem of autonomous UAV navigation in large-scale complex environments and highlights the potential for further improvement through the integration of RL and MOEAs.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(dang:2023:GECCO, author = {Duc-Cuong Dang and Andre Opris and Bahare Salehi and Dirk Sudholt}, title = {Analysing the Robustness of {NSGA-II} under Noise}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {642--651}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {runtime analysis, evolutionary multiobjective optimisation, noisy optimisation}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590421}, size = {10 pages}, abstract = {Runtime analysis has produced many results on the efficiency of simple evolutionary algorithms like the (1+1) EA, and its analogue called GSEMO in evolutionary multiobjective optimisation (EMO). Recently, the first runtime analyses of the famous and highly cited EMO algorithm NSGA-II have emerged, demonstrating that practical algorithms with thousands of applications can be rigorously analysed. However, these results only show that NSGA-II has the same performance guarantees as GSEMO and it is unclear how and when NSGA-II can outperform GSEMO.We study this question in noisy optimisation and consider a noise model that adds large amounts of posterior noise to all objectives with some constant probability p per evaluation. We show that GSEMO fails badly on every noisy fitness function as it tends to remove large parts of the population indiscriminately. In contrast, NSGA-II is able to handle the noise efficiently on LeadingOnes-TrailingZeroes when p < 1/2, as the algorithm is able to preserve useful search points. We identify a phase transition at p = 1/2 where the expected time to cover the Pareto front changes from polynomial to exponential. This is the first proof that NSGA-II can outperform GSEMO and the first runtime analysis of NSGA-II in noisy optimisation.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(gharafi:2023:GECCO, author = {Mohamed Gharafi and Nikolaus Hansen and Dimo Brockhoff and Rodolphe {Le Riche}}, title = {Multiobjective Optimization with a Quadratic {Surrogate-Assisted} {CMA-ES}}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {652--660}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {multiobjective optimization, quadratic metamodel, evolution strategies, CMA-ES, surrogate-assisted optimization}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590492}, size = {9 pages}, abstract = {We present a surrogate-assisted multiobjective optimization algorithm. The aggregation of the objectives relies on the Uncrowded Hypervolume Improvement (UHVI) which is partly replaced by a linear-quadratic surrogate that is integrated into the CMA-ES algorithm. Surrogating the UHVI poses two challenges. First, the UHVI is a dynamic function, changing with the empirical Pareto set. Second, it is a composite function, defined differently for dominated and nondominated points. The presented algorithm is thought to be used with expensive functions of moderate dimension (up to about 50) with a quadratic surrogate which is updated based on its ranking ability. We report numerical experiments which include tests on the COCO benchmark. The algorithm shows in particular linear convergence on the double sphere function with a convergence rate that is 6--20 times faster than without surrogate assistance.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(gong:2023:GECCO, author = {Cheng Gong and Yang Nan and Lie Meng Pang and Qingfu Zhang and Hisao Ishibuchi}, title = {Effects of Including Optimal Solutions into Initial Population on Evolutionary Multiobjective Optimization}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {661--669}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {initialization method, evolutionary computation, multi-objective optimization, EMO algorithms}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590515}, size = {9 pages}, abstract = {A long-standing question in the evolutionary multi-objective (EMO) community is how to generate a good initial population for EMO algorithms. Intuitively, as the starting point of optimization, a good initial population can have positive effects on the performance of EMO algorithms. However, in most existing EMO algorithms, one of the commonly-used initialization methods is to randomly generate a set of solutions as an initial population. One possible approach to improve random initialization is to include one or more Pareto optimal (near Pareto optimal) solution(s) in the initial population, which are expected to provide useful information and knowledge on the optimized problem. In this paper, to investigate the effectiveness of this initialization idea, we examine and quantify the effects of including one or more Pareto optimal solution(s) in the initial population on the performance of EMO algorithms. Experimental results demonstrate that it is worthwhile to first obtain and then include some Pareto optimal solutions in the initial population. Through a number of experiments and algorithm behavior analysis, this study provides supports and insights into EMO algorithm design and motivates further research on population initialization for EMO algorithms.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(he:2023:GECCO, author = {Linjun He and Yang Nan and Hisao Ishibuchi and Dipti Srinivasan}, title = {Effects of Objective Space Normalization in {Multi-Objective} Evolutionary Algorithms on {Real-World} Problems}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {670--678}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {objective space normalization, evolutionary multi-objective optimization (EMO), real-world multi-objective optimization problem}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590450}, size = {9 pages}, abstract = {In real-world multi-objective problems, each objective has a totally different scale. However, some frequently-used multi-objective evolutionary algorithms (MOEAs) have no objective space normalization mechanisms. The effect of objective space normalization on the performance of decomposition-based MOEAs (e.g., MOEA/D and NSGA-III) has already been examined for artificial test problems (e.g., DTLZ and WFG) in the literature. In this paper, we examine its practical usefulness for real-world multi-objective problems using various MOEAs. Our experimental results clearly show that objective space normalization is needed not only in decomposition-based MOEAs but also in hypervolume-based MOEAs. We also explain why objective space normalization is needed in these two types of MOEAs.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(ishibuchi:2023:GECCO, author = {Hisao Ishibuchi and Lie Meng Pang and Ke Shang}, title = {Effects of Dominance Modification on {Hypervolume-Based} and {IGD-Based} Performance Evaluation Results of {NSGA-II}}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {679--687}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {evolutionary many-objective optimization, evolutionary multi-objective optimization, performance indicators, cone dominance}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590437}, size = {9 pages}, abstract = {In the field of evolutionary multi-objective optimization, it is well known that dominance-based algorithms do not work well on many-objective problems. This is because almost all solutions in a population become non-dominated in early generations. Two approaches have been proposed to decrease the number of non-dominated solutions. One is to increase the dominated region by each solution: dominance modification. The other is to increase the correlation among objectives: objective modification. In this paper, first we show that these two approaches can be viewed as the same approach. We also explain that some regions of the Pareto front are dominated when the dominated region is increased. Next, we numerically examine the effects of dominance modification on the performance of NSGA-II on many-objective test problems. Through computational experiments, we demonstrate that its positive and negative effects are clearly shown by the hypervolume (HV) and inverted generational distance (IGD) indicators, respectively. Then, we discuss why these two indicators emphasize different effects of dominance modification using the optimal distribution of solutions for each indicator. Finally, we explain that objective space normalization is needed in dominance modification whereas it has no effects on the Pareto dominance relation.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(kang:2023:GECCO2, author = {Yuhao Kang and Jialong Shi and Jianyong Sun and Ye Fan}, title = {Improving Neighborhood Exploration Mechanism to Speed up {PLS}}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {688--694}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {speedup technique, metaheuristics, multi-objective optimization, local search, combinatorial optimization}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590445}, size = {7 pages}, abstract = {As an extension of local search for multiobjective case, the basic version of Pareto Local Search (PLS) suffers from a poor anytime behavior. Researches have been carried out to overcome this drawback from different aspects. In this paper, we focus on the mechanism of neighborhood exploration in bi-objective Travelling Salesman Problems (bTSPs). Inspired by existing fast local search strategies for single objective TSP, we propose two speed-up strategies to help PLS quickly find promising neighboring solutions in bTSPs. In the experimental studies, we investigate the sensitivity of parameters and test the performance of several PLS variants with different combinations of the two strategies. The experimental results verify the effectiveness of the two strategies and their combination.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(liao:2023:GECCO, author = {Hsu Chen Liao and Wen Zhong Fang and Tian Li Yu}, title = {Adaptive Donor Selection Mixing for {Multi-Objective} Optimization: An Enhanced Variant of {MO-GOMEA}}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {695--703}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {model building, scalability, evolutionary algorithms, multi-objective optimization, donor selection}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590403}, size = {9 pages}, abstract = {The multi-objective gene-pool optimal mixing evolutionary algorithm with interleaved multi-start scheme (MO-GOMEA) is a powerful, parameterless model-based genetic algorithm that excels at solving multi-objective combinatorial optimization problems. In this paper, we propose a new mixing mechanism, adaptive donor selection mixing (ADSM) and further integrate it into MO-GOMEA to form a new variant, ADSM-MO-GOMEA. The proposed ADSM mechanism adaptively switches between cluster-guided and elitist-guided mixing, with the latter having a customized donor selection for the receiver based on empirical observations and mathematical derivation. The empirical results on multiple benchmark problems indicate that ADSM-MO-GOMEA improves the effectiveness over the original MO-GOMEA and achieves a lower inverted generational diversity and higher front occupation within the given limited number of evaluations.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(liefooghe:2023:GECCO, author = {Arnaud Liefooghe and Manuel Lopez-Ibanez}, title = {{Many-Objective} (Combinatorial) Optimization is Easy}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {704--712}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {search difficulty, local search, multi-modality, local optima, multi-objective nk-landscapes, many-objective optimization}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590475}, size = {9 pages}, abstract = {It is a common held assumption that problems with many objectives are harder to optimize than problems with two or three objectives. In this paper, we challenge this assumption and provide empirical evidence that increasing the number of objectives tends to reduce the difficulty of the landscape being optimized. Of course, increasing the number of objectives brings about other challenges, such as an increase in the computational effort of many operations, or the memory requirements for storing non-dominated solutions. More precisely, we consider a broad range of multi- and many-objective combinatorial benchmark problems, and we measure how the number of objectives impacts the dominance relation among solutions, the connectedness of the Pareto set, and the landscape multimodality in terms of local optimal solutions and sets. Our analysis shows the limit behavior of various landscape features when adding more objectives to a problem. Our conclusions do not contradict previous observations about the inability of Pareto-optimality to drive search, but we explain these observations from a different perspective. Our findings have important implications for the design and analysis of many-objective optimization algorithms.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(liefooghe:2023:GECCO2, author = {Arnaud Liefooghe and Gabriela Ochoa and S\'{e}bastien Verel and Bilel Derbel}, title = {Pareto Local Optimal Solutions Networks with Compression, Enhanced Visualization and Expressiveness}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {713--721}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590474}, size = {9 pages}, abstract = {The structure of local optima in multi-objective combinatorial optimization and their impact on algorithm performance are not yet properly understood. In this paper, we are interested in the representation of multi-objective landscapes and their multi-modality. More specifically, we revise and extend the network of Pareto local optimal solutions (PLOS-net), inspired by the well-established local optima network from single-objective optimization. We first define a compressed PLOS-net which allows us to enhance its perception while preserving the important notion of connectedness between local optima. We then study an alternative visualization of the (compressed) PLOS-net that focuses on good-quality solutions, improves the distinction between connected components in the network, and generalizes well to landscapes with more than 2 objectives. We finally define a number of network metrics that characterize the PLOS-net, some of them being strongly correlated with search performance. We visualize and experiment with small-size multiobjective nk-landscapes, and we disclose the effect of PLOS-net metrics against well-established multi-objective local search and evolutionary algorithms.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(lu:2023:GECCO, author = {Yongfan Lu and Bingdong Li and Hong Qian and Wenjing Hong and Peng Yang and Aimin Zhou}, title = {{RM-SAEA:} Regularity Model Based {Surrogate-Assisted} Evolutionary Algorithms for Expensive {Multi-Objective} Optimization}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {722--730}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {Pareto set learning, surrogate-assisted evolutionary algorithm, expensive multi-objective optimization, regularity model}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590435}, size = {9 pages}, abstract = {Due to computationally and/or financially costly evaluation, tackling expensive multi-objective optimization problems is quite challenging for evolutionary algorithms. One popular approach to these problems is building cheap surrogate models to replace the expensive real function evaluations. To this end, various kinds of surrogate-assisted evolutionary algorithms (SAEAs) have been proposed, building surrogate models which predict the fitness values, classifications, or relation of the candidate solutions. However, off-spring generation, despite its important role in evolutionary optimization, has not received enough attention in these SAEAs. In this paper, a regularity model based framework, namely RM-SAEA, is proposed for better offspring generation in expensive multi-objective optimization. To be specific, RM-SAEA is featured with a heterogeneous offspring generation module, which is composed of a regularity model and a general genetic operator. Moreover, in order to alleviate the data deficiency issue in the expensive optimization scenario, a data augmentation strategy is employed while training the regularity model. Finally, two representative SAEAs are embedded into RM-SAEA in order to instantiate the proposed framework. Experimental results on benchmark multi-objective problems with up to 10 objectives demonstrate that RM-SAEA achieves the best overall performance compared with 6 state-of-the-art algorithms.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(neumann:2023:GECCO, author = {Frank Neumann and Carsten Witt}, title = {3-Objective Pareto Optimization for Problems with Chance Constraints}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {731--739}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {evolutionary multi-objective optimization, theory, chance constraints, runtime analysis}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590392}, size = {9 pages}, abstract = {Evolutionary multi-objective algorithms have successfully been used in the context of Pareto optimization where a given constraint is relaxed into an additional objective. In this paper, we explore the use of 3-objective formulations for problems with chance constraints. Our formulation trades off the expected cost and variance of the stochastic component as well as the given deterministic constraint. We point out benefits that this 3-objective formulation has compared to a bi-objective one recently investigated for chance constraints with Normally distributed stochastic components. Our analysis shows that the 3-objective formulation allows to compute all required trade-offs using 1-bit flips only, when dealing with a deterministic cardinality constraint. Furthermore, we carry out experimental investigations for the chance constrained dominating set problem and show the benefit for this classical NP-hard problem.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(shu:2023:GECCO, author = {Tianye Shu and Yang Nan and Ke Shang and Hisao Ishibuchi}, title = {{Two-Phase} Procedure for Efficiently Removing Dominated Solutions From Large Solution Sets}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {740--748}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {multi-objective optimization, dominated solution removal}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590404}, size = {9 pages}, abstract = {In evolutionary multi-objective optimization (EMO), one important procedure is to remove all dominated solutions from a solution set (e.g., solutions in an archive) to obtain an approximated Pareto front, which is called a static nondominance problem. Recently, an unbounded external archive (UEA) is used in EMO algorithms in many studies to store all solutions examined during the evolutionary process. In these studies, the candidate set in the static nondominance problem includes all the examined solutions. Although many methods have been proposed to solve the static nondominance problem, the dominated solution removal is still time-consuming for a large-scale candidate set. To tackle this issue, we propose a simple and general two-phase procedure to improve the efficiency of existing dominated solution removal methods. In the first phase of our procedure, a large-scale candidate set is divided into several subsets. Dominated solutions are removed from each subset independently, and remaining solutions are merged. In the second phase, dominated solutions are removed from the merged set. Compared with directly removing all dominated solutions from the candidate set, our experimental results show that the proposed two-phase procedure can drastically decrease the computation time when the percentage of nondominated solutions in the candidate set is small.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(tanabe:2023:GECCO, author = {Ryoji Tanabe}, title = {On the Unbounded External Archive and Population Size in {Preference-Based} Evolutionary {Multi-Objective} Optimization Using a Reference Point}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {749--758}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {benchmarking, unbounded external archive, population size, preference-based evolutionary multi-objective optimization}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590511}, size = {10 pages}, abstract = {Although the population size is an important parameter in evolutionary multi-objective optimization (EMO), little is known about its influence on preference-based EMO (PBEMO). The effectiveness of an unbounded external archive (UA) in PBEMO is also poorly understood, where the UA maintains all non-dominated solutions found so far. In addition, existing methods for postprocessing the UA cannot handle the decision maker's preference information. In this context, first, this paper proposes a preference-based postprocessing method for selecting representative solutions from the UA. Then, we investigate the influence of the UA and population size on the performance of PBEMO algorithms. Our results show that the performance of PBEMO algorithms (e.g., R-NSGA-II) can be significantly improved by using the UA and the proposed method. We demonstrate that a smaller population size than commonly used is effective in most PBEMO algorithms for a small budget of function evaluations, even for many objectives. We found that the size of the region of interest is a less important factor in selecting the population size of the PBEMO algorithms on real-world problems.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(tomczyk:2023:GECCO, author = {Micha\l{} Tomczyk and Mi\l{}osz Kadzi\'{n}ski}, title = {{Co-Evolution} Improves the Efficiency of Preference Learning Methods When the Decision Maker's Aspirations Develop over Time}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {759--767}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {evolutionary multiple objective optimization, interactive procedures, pairwise comparisons, co-evolution, preference learning}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590348}, size = {9 pages}, abstract = {This paper's research scope is interactive evolutionary multiple objective optimization founded on the preference learning paradigm. It concerns a scenario in which the Decision Maker's (DM's) aspirations develop over time. In this view, the interactive method may be forced to occasionally re-learn the DM's value system and, thus, re-orient the search during optimization. In preliminary studies, we observed that although a satisfactory recommendation can ultimately be discovered, it is often attainable with more significant computational power. To resolve this issue, we propose a co-evolutionary method, evolving sub-populations that approximate the Pareto front or align with the DM's preferences. This diversifies the maintained solution set, which is useful when re-understanding the DM's aspirations during interactions. Also, it helps reallocate the preference-driven sub-population quickly, avoiding an extensive computational burden. We demonstrate the usefulness of such hybridization in a series of extensive experiments that involve different test problems, numbers of objectives, and behavioral models of the Decision Maker.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(wu:2023:GECCO, author = {Yin Wu and Ruihao Zheng and Zhenkun Wang}, title = {{Decomposition-Based} {Multi-Objective} Evolutionary Algorithm with {Model-Based} Ideal Point Estimation}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {768--776}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {multi-objective evolutionary algorithm based on decomposition (MOEA/D), mixed bias, evolutionary multi-objective optimization, ideal point estimation}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590521}, size = {9 pages}, abstract = {The ideal point is critical in the multi-objective optimization problem (MOP), which consists of the best value of each objective. It is widely used for normalizing the objective space and guiding the evolution of the population. Since the ideal point cannot know prior, the multi-objective evolutionary algorithm based on decomposition (MOEA/D) takes the best objective values of the population as the estimated ideal point. However, the population-based ideal point estimation may cause the estimated ideal point to be appropriate for (1) no objective or (2) only some objectives. In our analysis, the unreliable estimation deteriorates the performance of MOEA/D. These two scenarios often occur when the MOP with mixed bias (i.e., position-related bias and distance-related bias). To overcome this, we propose to incorporate the model-based ideal point estimation in MOEA/D. The new algorithm (called MOEA/D-MIPE) employs the radial basis function model and a remedy scheme to estimate the ideal point. In experimental studies, we compare MOEA/D-MIPE with seven state-of-the-art algorithms on various MOPs. The results show that MOEA/D-MIPE has excellent potential.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(wu:2023:GECCO2, author = {Ying Wu and Na Yang and Long Chen and Ye Tian and Zhenzhou Tang}, title = {Directed Quick Search Guided Evolutionary Algorithm for {Large-Scale} {Multi-Objective} Optimization Problems}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {777--785}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {sampling strategy, evolutionary algorithms, quick search, directed reproduction, large-scale multi-objective optimization, direction vector}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590480}, size = {9 pages}, abstract = {For large-scale multi-objective evolutionary algorithms (LSMOEAs), it has been a major challenge to efficiently obtain accurate evolutionary directions in the ultra-high-dimensional decision space to produce high-quality offspring. To this hand, this paper proposes an algorithm, namely, a directed quick search guided large-scale multi-objective evolutionary algorithm (QSLMOA). This algorithm contains three main parts: a directional vector-based sampling strategy, a quick search guided directed reproduction strategy, and an environment selection. In each generation, the proposed sampling strategy determines a set of directional solutions to construct the direction vectors which are used to guide the search directions. The sampling strategy significantly reduces the search space and improves the sampling efficiency in the early stages. On the other hand, our proposed reproduction strategy introduces the directional information, and with their assistance, the inferior ones of solutions in the combined population can rapidly converge to the elite ones, which can speed up the convergence rate. Finally, the elitist non-dominated sorting is adopted as the environment selection to obtain the parent population of the next generation. Comprehensive experiments verify that QSLMOA performs the best compared to the state-of-the-art LSMOEAs for nine large-scale multi-objective benchmark problems LSMOP1-LSMOP9 with up to three objectives and 5000 decision variables.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(yadav:2023:GECCO, author = {Deepanshu Yadav and Palaniappan Ramu and Kalyanmoy Deb}, title = {{Multi-Objective} Robust Optimization and {Decision-Making} Using Evolutionary Algorithms}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {786--794}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {R-NSGA-III, robust front, multi-criteria decision-making, evolutionary algorithms, pareto front}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590420}, size = {9 pages}, abstract = {Evolutionary multi-objective optimization (EMO) algorithms are predominantly used for solving multi- and many-objective optimization problems to arrive at the respective Pareto front. From a practical point of view, it is desirable for a decision-maker (DM) to consider objective vectors that are less sensitive to the small perturbation in design variables and problem parameters. Such insensitive, yet closer to Pareto-optimal solutions, lie on the so-called robust front. In real-world applications, such as engineering design and process optimization problems, perturbations in variables come from manufacturing tolerances, uncertainties in material properties, variations in operating conditions, etc. The existing EMO literature on robustness studies emphasized on finding the entire robust front, but hardly considered robustness in both optimization and decision-making tasks. In this paper, we propose and evaluate different algorithmic implementations of three aspects - multi-objective optimization, robustness consideration, and multi-criterion decision-making - together. Based on experimental results on two to eight-objective problems, we discuss the outcomes and advantages of different integration approaches of these three aspects and present the most effective combined approach. The results are interesting and should pave the way to develop more efficient multi-objective robust optimization and decision-making (MORODM) procedures for handling practical problems with uncertainties.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(yang:2023:GECCO, author = {Na Yang and Quan Zhang and Ying Wu and Yisu Ge and Zhenzhou Tang}, title = {A Hierarchical {Clustering-Based} Cooperative {Multi-Population} {Many-Objective} Optimization Algorithm}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {795--803}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {multi-population optimization algorithms, evolutionary algorithms, clustering algorithms, many-objective optimization}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590476}, size = {9 pages}, abstract = {The increasing number of objectives poses a great challenge upon many-objective optimization algorithms (MaOOAs) when solving many-objective optimization problems (MaOOPs), since it is rather difficult to obtain well-distributed solutions with tight convergence. To efficiently improve the ability of solving MaOOPs, this paper proposes a hierarchical clustering-based cooperative multi-population many-objective optimization algorithm (C2MP-MaOOA). Specifically, a hierarchical clustering-based population division strategy is proposed in C2MP-MaOOA, which is able to effectively optimize different regions of the Pareto front (PF) regardless of its shape, so as to maintain population diversity and accelerate convergence. Any single-objective optimizer can be applied in C2MP-MaOOA to optimize a subpopulation. To comprehensively evaluate the performance of C2MP-MaOOA, it was compared with eight state-of-the-art existing algorithms and two variants of C2MP-MaOOA on 63 MaOOPs selected from DTLZ, MaF, and WFG benchmark suites. The results indicate that C2MP-MaOOA has the best overall performance for each benchmark suite, which demonstrates that C2MP-MaOOA is quite competitive in solving MaOOPs.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(zhu:2023:GECCO2, author = {Han Zhu and Ke Shang and Hisao Ishibuchi}, title = {{STHV-Net:} Hypervolume Approximation Based on Set Transformer}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {804--812}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {hypervolume approximation, HV-Net, set transformer, evolutionary multi-objective optimization}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590378}, size = {9 pages}, abstract = {In this paper, we propose STHV-Net to approximate the hyper-volume indicator based on Set Transformer. Set Transformer is an advanced model to process set-form data which concentrates on the interaction of set elements. STHV-Net receives a non-dominated positive solution set of any size and outputs an approximate hyper-volume value of this solution set. The output value is independent of the order of the elements in the input set. The performance of STHV-Net is compared with three existing approximation methods (Monte Carlo, R2 indicator, HV-Net) using two evaluation criteria: approximation errors and computing time. Our experimental results show that STHV-Net is superior to the Monte Carlo method and the R2 indicator method with respect to these two criteria. Compared with HV-Net, our method can obtain lower approximation errors at the cost of a slightly longer computing time. We provide six representative models with different parameter sizes for users who have different preferences about the tradeoff between approximation error and computing time.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(cenikj:2023:GECCO, author = {Gjorgjina Cenikj and Ga\v{s}per Petelin and Carola Doerr and Peter Koro\v{s}ec and Tome Eftimov}, title = {{DynamoRep:} {Trajectory-Based} Population Dynamics for Classification of {Black-Box} Optimization Problems}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {813--821}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {black-box single-objective optimization, optimization problem classification, problem representation, meta-learning}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590401}, size = {9 pages}, abstract = {The application of machine learning (ML) models to the analysis of optimization algorithms requires the representation of optimization problems using numerical features. These features can be used as input for ML models that are trained to select or to configure a suitable algorithm for the problem at hand. Since in pure black-box optimization information about the problem instance can only be obtained through function evaluation, a common approach is to dedicate some function evaluations for feature extraction, e.g., using random sampling. This approach has two key downsides: (1) It reduces the budget left for the actual optimization phase, and (2) it neglects valuable information that could be obtained from a problem-solver interaction.In this paper, we propose a feature extraction method that describes the trajectories of optimization algorithms using simple descriptive statistics. We evaluate the generated features for the task of classifying problem classes from the Black Box Optimization Benchmarking (BBOB) suite. We demonstrate that the proposed DynamoRep features capture enough information to identify the problem class on which the optimization algorithm is running, achieving a mean classification accuracy of 95\% across all experiments.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(hong:2023:GECCO, author = {Yuan Hong and Dirk Arnold}, title = {Evolutionary {Mixed-Integer} Optimization with Explicit Constraints}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {822--830}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {evolution strategy, mixed-integer optimization, constrained black-box optimization}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590467}, size = {9 pages}, abstract = {Determining the feasibility of a candidate solution to a constrained black-box optimization problem may either be similarly expensive as the process of determining its quality, or it may be much cheaper. Constraints that allow obtaining degrees of feasibility or constraint violation without incurring significant computational costs are referred to as explicit. We present an evolutionary algorithm for solving mixed-integer black-box optimization problems with explicit constraints. The algorithm wraps active-set evolution strategies, an algorithm for solving continuous black-box problems with explicit constraints, in a branching mechanism that allows enforcing integrality constraints. In computer experiments we demonstrate that the algorithm solves a set of mixed-integer problems with significantly fewer objective function evaluations than several algorithms that do not exploit the explicitness of the constraints.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(ikeda:2023:GECCO, author = {Koki Ikeda and Isao Ono}, title = {Natural Evolution Strategy for {Mixed-Integer} {Black-Box} Optimization}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {831--838}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {natural evolution strategy, mixed-integer black-box optimization}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590518}, size = {8 pages}, abstract = {This paper proposes a natural evolution strategy (NES) for mixed-integer black-box optimization (MI-BBO) that appears in real-world problems such as hyperparameter optimization of machine learning and materials design. This problem is difficult to optimize because plateaus where the values do not change appear when the integer variables are relaxed to the continuous ones. CMA-ES w. Margin that addresses the plateaus reportedly showed good performance on MI-BBO benchmark problems. However, it has been observed that the search performance of CMA-ES w. Margin deteriorates when continuous variables contribute more to the objective function value than integer ones. In order to address the problem of CMA-ES w. Margin, we propose Distance-weighted eXponential Natural Evolution Strategy taking account of Implicit Constraint and Integer (DX-NES-ICI). We compare the search performance of DX-NES-ICI with that of CMA-ES w. Margin through numerical experiments. As a result, DX-NES-ICI was up to 3.7 times better than CMA-ES w. Margin in terms of a rate of finding the optimal solutions on benchmark problems where continuous variables contribute more to the objective function value than integer ones. DX-NES-ICI also outperformed CMA-ES w. Margin on problems where CMA-ES w. Margin originally showed good performance.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(nomura:2023:GECCO, author = {Masahiro Nomura and Youhei Akimoto and Isao Ono}, title = {{CMA-ES} with Learning Rate Adaptation: Can {CMA-ES} with Default Population Size Solve Multimodal and Noisy Problems?}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {839--847}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {covariance matrix adaptation evolution strategy, black-box optimization}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590358}, size = {9 pages}, abstract = {The covariance matrix adaptation evolution strategy (CMA-ES) is one of the most successful methods for solving black-box continuous optimization problems. One practically useful aspect of the CMA-ES is that it can be used without hyperparameter tuning. However, the hyperparameter settings still have a considerable impact, especially for difficult tasks such as solving multimodal or noisy problems. In this study, we investigate whether the CMA-ES with default population size can solve multimodal and noisy problems. To perform this investigation, we develop a novel learning rate adaptation mechanism for the CMA-ES, such that the learning rate is adapted so as to maintain a constant signal-to-noise ratio. We investigate the behavior of the CMA-ES with the proposed learning rate adaptation mechanism through numerical experiments, and compare the results with those obtained for the CMA-ES with a fixed learning rate. The results demonstrate that, when the proposed learning rate adaptation is used, the CMA-ES with default population size works well on multimodal and/or noisy problems, without the need for extremely expensive learning rate tuning.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(schoenenberger:2023:GECCO, author = {Lisa Sch\"{o}nenberger and Hans-Georg Beyer}, title = {On a Population Sizing Model for Evolution Strategies Optimizing the Highly Multimodal Rastrigin Function}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {848--855}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {multi-modal objective function, evolution strategies, global convergence, population sizing, global optimization}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590451}, size = {8 pages}, abstract = {A model is presented that allows for the calculation of the success probability by which a vanilla Evolution Strategy converges to the global optimizer of the Rastrigin test function. As a result a population size scaling formula will be derived that allows for an estimation of the population size needed to ensure a high convergence security depending on the search space dimensionality.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(thomaser:2023:GECCO, author = {Andr\'{e} Thomaser and Jacob {De Nobel} and Diederick Vermetten and Furong Ye and Thomas B\"{a}ck and Anna Kononova}, title = {When to Be Discrete: Analyzing Algorithm Performance on Discretized Continuous Problems}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {856--863}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {benchmarking, integer handling, CMA-ES, integer representation, discretization}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590410}, size = {8 pages}, abstract = {The domain of an optimization problem is seen as one of its most important characteristics. In particular, the distinction between continuous and discrete optimization is rather impactful. Based on this, the optimizing algorithm, analyzing method, and more are specified. However, in practice, no problem is ever truly continuous. Whether this is caused by computing limits or more tangible properties of the problem, most variables have a finite resolution.In this work, we use the notion of the resolution of continuous variables to discretize problems from the continuous domain. We explore how the resolution impacts the performance of continuous optimization algorithms. Through a mapping to integer space, we are able to compare these continuous optimizers to discrete algorithms on the exact same problems. We show that the standard (μW, Λ)-CMA-ES fails when discretization is added to the problem.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(vermetten:2023:GECCO, author = {Diederick Vermetten and Fabio Caraffini and Anna V. Kononova and Thomas B\"{a}ck}, title = {Modular Differential Evolution}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {864--872}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {differential evolution, modular algorithms, benchmarking, algorithm configuration}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590417}, size = {9 pages}, abstract = {New contributions in the field of iterative optimisation heuristics are often made in an iterative manner. Novel algorithmic ideas are not proposed in isolation, but usually as extensions of a preexisting algorithm. Although these contributions are often compared to the base algorithm, it is challenging to make fair comparisons between larger sets of algorithm variants. This happens because even small changes in the experimental setup, parameter settings, or implementation details can cause results to become incomparable. Modular algorithms offer a way to overcome these challenges. By implementing the algorithmic modifications into a common framework, many algorithm variants can be compared, while ensuring that implementation details match in all versions.In this work, we propose a version of a modular framework for the popular Differential Evolution (DE) algorithm. We show that this modular approach not only aids in comparison but also allows for a much more detailed exploration of the space of possible DE variants. This is illustrated by showing that tuning the settings of modular DE vastly outperforms a set of commonly used DE versions which have been recreated in our framework. We then investigate these tuned algorithms in detail, highlighting the relation between modules and performance on particular problems.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(vermetten:2023:GECCO2, author = {Diederick Vermetten and Furong Ye and Carola Doerr}, title = {Using Affine Combinations of {BBOB} Problems for Performance Assessment}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {873--881}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {black-box optimization, benchmarking, performance analysis}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590412}, size = {9 pages}, abstract = {Benchmarking plays a major role in the development and analysis of optimization algorithms. As such, the way in which the used benchmark problems are defined significantly affects the insights that can be gained from any given benchmark study. One way to easily extend the range of available benchmark functions is through affine combinations between pairs of functions. From the perspective of landscape analysis, these function combinations smoothly transition between the two base functions.In this work, we show how these affine function combinations can be used to analyze the behavior of optimization algorithms. In particular, we highlight that by varying the weighting between the combined problems, we can gain insights into the effects of added global structure on the performance of optimization algorithms. By analyzing performance trajectories on more function combinations, we also show that aspects such as the scaling of objective functions and placement of the optimum can greatly impact how these results are interpreted.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(watanabe:2023:GECCO, author = {Yohei Watanabe and Kento Uchida and Ryoki Hamano and Shota Saito and Masahiro Nomura and Shinichi Shirakawa}, title = {{(1+1)-CMA-ES} with Margin for Discrete and {Mixed-Integer} Problems}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {882--890}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {discrete black-box optimization, covariance matrix adaptation evolution strategy, mixed-integer black-box optimization, elitist strategy}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590516}, size = {9 pages}, abstract = {The covariance matrix adaptation evolution strategy (CMA-ES) is an efficient continuous black-box optimization method. The CMA-ES possesses many attractive features, including invariance properties and a well-tuned default hyperparameter setting. Moreover, several components to specialize the CMA-ES have been proposed, such as noise handling and constraint handling. To utilize these advantages in mixed-integer optimization problems, the CMA-ES with margin has been proposed. The CMA-ES with margin prevents the premature convergence of discrete variables by the margin correction, in which the distribution parameters are modified to leave the generation probability for changing the discrete variable. The margin correction has been applied to (μ/μw,Λ)-CMA-ES, while this paper introduces the margin correction into (1+1)-CMA-ES, an elitist version of CMA-ES. The (1+1)-CMA-ES is often advantageous for unimodal functions and can be computationally less expensive. To tackle the performance deterioration on mixed-integer optimization, we use the discretized elitist solution as the mean of the sampling distribution and modify the margin correction not to move the elitist solution. The numerical simulation using benchmark functions on mixed-integer, integer, and binary domains shows that (1+1)-CMA-ES with margin outperforms the CMA-ES with margin and is better than or comparable with several specialized methods to a particular search domain.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(bu:2023:GECCO, author = {Chenyang Bu and Zhiyong Cao and Chenlong He and Yuhong Zhang}, title = {Probabilistic Model with Evolutionary Optimization for Cognitive Diagnosis}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {891--899}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {educational data mining, cognitive diagnostic, local search, evolutionary algorithm}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590522}, size = {9 pages}, abstract = {Cognitive Diagnostic Models (CDMs) aim to analyze students' cognitive levels of each knowledge component (KC) by mining educational data. Existing CDMs can be mainly divided into two categories, i.e., traditional probability-based and neural-network-based. Most probabilistic models have the advantages of simplicity and good interpretability, but suffer from slow training time in the case of a large number of KCs. Neural-network-based methods are widely considered to be superior to probabilistic models due to their good performance. However, neural network methods are less interpretable than probabilistic models, thus limiting their usefulness in practice. Because most existing probabilistic models are optimized iteratively based on single-point-based search methods, they may be easily trapped in local optimum due to the influence of the initial points. And evolutionary algorithms (EAs) have good global search ability. Therefore, an interesting question is whether a simple probabilistic model based on evolutionary optimization can rival neural-network in limited optimization time. Thus, a hybrid EA with a customized local search is proposed. Experimental results on three real-world datasets show that our method outperforms the compared 7 models (including 2 state-of-the-art neural-network-based models); and the running time of our method is significantly less than the compared probabilistic models.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(correia:2023:GECCO, author = {Jo\~{a}o Correia and V\'{\i}tor Pereira and Miguel Rocha}, title = {Combining Evolutionary Algorithms with Reaction Rules Towards Focused Molecular Design}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {900--909}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {evolutionary algorithms, reaction rules, drug discovery}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590413}, size = {10 pages}, abstract = {Designing novel small molecules with desirable properties and feasible synthesis continues to pose a significant challenge in drug discovery, particularly in the realm of natural products. Reaction-based gradient-free methods are promising approaches for designing new molecules as they ensure synthetic feasibility and provide potential synthesis paths. However, it is important to note that the novelty and diversity of the generated molecules highly depend on the availability of comprehensive reaction templates. To address this challenge, we introduce ReactEA, a new open-source evolutionary framework for computer-aided drug discovery that solely utilizes biochemical reaction rules. ReactEA optimizes molecular properties using a comprehensive set of 22,949 reaction rules, ensuring chemical validity and synthetic feasibility. ReactEA is versatile, as it can virtually optimize any objective function and track potential synthetic routes during the optimization process. To demonstrate its effectiveness, we apply ReactEA to various case studies, including the design of novel drug-like molecules and the optimization of pre-existing ligands. The results show that ReactEA consistently generates novel molecules with improved properties and reasonable synthetic routes, even for complex tasks such as improving binding affinity against the PARP1 enzyme when compared to existing inhibitors.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(guijt:2023:GECCO, author = {Arthur Guijt and Dirk Thierens and Tanja Alderliesten and Peter A.N. Bosman}, title = {The Impact of Asynchrony on Parallel {Model-Based} {EAs}}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {910--918}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {parallel algorithms, genetic algorithms, model-based evolutionary algorithms, asynchronous algorithms, linkage learning}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590406}, size = {9 pages}, abstract = {In a parallel EA one can strictly adhere to the generational clock, and wait for all evaluations in a generation to be done. However, this idle time limits the throughput of the algorithm and wastes computational resources. Alternatively, an EA can be made asynchronous parallel. However, EAs using classic recombination and selection operators (GAs) are known to suffer from an evaluation time bias, which also influences the performance of the approach. Model-Based Evolutionary Algorithms (MBEAs) are more scalable than classic GAs by virtue of capturing the structure of a problem in a model. If this model is learned through linkage learning based on the population, the learned model may also capture biases. Thus, if an asynchronous parallel MBEA is also affected by an evaluation time bias, this could result in learned models to be less suited to solving the problem, reducing performance. Therefore, in this work, we study the impact and presence of evaluation time biases on MBEAs in an asynchronous parallelization setting, and compare this to the biases in GAs. We find that a modern MBEA, GOMEA, is unaffected by evaluation time biases, while the more classical MBEA, ECGA, is affected, much like GAs are.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(ivanova:2023:GECCO, author = {Alexandra Ivanova and Denis Antipov and Benjamin Doerr}, title = {Larger Offspring Populations Help the (1 + (λ, λlambda)) Genetic Algorithm to Overcome the Noise}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {919--928}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {evolutionary computation, population-based algorithms, noisy optimization}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590514}, size = {10 pages}, abstract = {Evolutionary algorithms are known to be robust to noise in the evaluation of the fitness. In particular, larger offspring population sizes often lead to strong robustness. We analyze to what extent the (1 + (Λ, Λ)) genetic algorithm is robust to noise. This algorithm also works with larger offspring population sizes, but an intermediate selection step and a non-standard use of crossover as repair mechanism could render this algorithm less robust than, e.g., the simple (1 + Λ) evolutionary algorithm. Our experimental analysis on several classic benchmark problems shows that this difficulty does not arise. Surprisingly, in many situations this algorithm is even more robust to noise than the (1 + Λ) EA.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(lange:2023:GECCO, author = {Robert Lange and Tom Schaul and Yutian Chen and Chris Lu and Tom Zahavy and Valentin Dalibard and Sebastian Flennerhag}, title = {Discovering {Attention-Based} Genetic Algorithms via {Meta-Black-Box} Optimization}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {929--937}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {genetic algorithm, machine learning, meta-learning}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590496}, size = {9 pages}, abstract = {Genetic algorithms constitute a family of black-box optimization algorithms, which take inspiration from the principles of biological evolution. While they provide a general-purpose tool for optimization, their particular instantiations can be heuristic and motivated by loose biological intuition. In this work we explore a fundamentally different approach: Given a sufficiently flexible parametrization of the genetic operators, we discover entirely new genetic algorithms in a data-driven fashion. More specifically, we parametrize selection and mutation rate adaptation as cross- and self-attention modules and use Meta-Black-Box-Optimization to evolve their parameters on a set of diverse optimization tasks. The resulting Learned Genetic Algorithm outperforms state-of-the-art adaptive baseline genetic algorithms and generalizes far beyond its meta-training settings. The learned algorithm can be applied to previously unseen optimization problems, search dimensions \& evaluation budgets. We conduct extensive analysis of the discovered operators and provide ablation experiments, which highlight the benefits of flexible module parametrization and the ability to transfer ('plug-in') the learned operators to conventional genetic algorithms.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(nikfarjam:2023:GECCO, author = {Adel Nikfarjam and Ralf Rothenberger and Frank Neumann and Tobias Friedrich}, title = {Evolutionary Diversity Optimisation in Constructing Satisfying Assignments}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {938--945}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {evolutionary diversity optimisation, SAT}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590517}, size = {8 pages}, abstract = {Computing diverse solutions for a given problem, in particular evolutionary diversity optimisation (EDO), is a hot research topic in the evolutionary computation community. This paper studies the Boolean satisfiability problem (SAT) in the context of EDO. SAT is of great importance in computer science and differs from the other problems studied in EDO literature, such as KP and TSP. SAT is heavily constrained, and the conventional evolutionary operators are inefficient in generating SAT solutions. Our approach avails of the following characteristics of SAT: 1) the possibility of adding more constraints (clauses) to the problem to forbid solutions or to fix variables, and 2) powerful solvers in the literature, such as minisat. We utilise such a solver to construct a diverse set of solutions.Moreover, maximising diversity provides us with invaluable information about the solution space of a given SAT problem, such as how large the feasible region is. In this study, we introduce evolutionary algorithms (EAs) employing a well-known SAT solver to maximise diversity among a set of SAT solutions explicitly. The experimental investigations indicate the introduced algorithms' capability to maximise diversity among the SAT solutions.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(przewozniczek:2023:GECCO, author = {Michal Witold Przewozniczek and Renato Tin\'{o}s and Marcin Michal Komarnicki}, title = {First Improvement Hill Climber with Linkage Learning -- on Introducing Dark {Gray-Box} Optimization into Statistical Linkage Learning Genetic Algorithms}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {946--954}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {evolutionary algorithms, genetic algorithms, linkage learning, model building, empirical linkage learning}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590495}, size = {9 pages}, abstract = {Gray-box optimization requires user-supported information about inter-variable dependencies to propose more effective optimizers for hard combinatorial problems. In Black-box optimization, such information is unavailable. Therefore, the Gray-box operators are only usable in Black-box scenarios if an optimizer can discover the inter-variable dependencies independently. Empirical Linkage Learning (ELL) techniques are guaranteed to discover only the true dependencies, which led to proposing the Dark Gray-box optimizers class. Such optimizers use ELL to construct Empirical Variable Interaction Graph (eVIG), which may miss some dependencies but contains only the true ones. eVIG allows using Gray-box operators in Black-box scenarios. ELL techniques are computationally expensive. Therefore, the recently proposed Local Search with Linkage Learning (LSwLL) is promising because it makes ELL a no-cost technique. However, LSwLL has some disadvantages. First, it can decompose only the problems of additive nature. Second, LSwLL removes ELL costs, but in some optimization scenarios, it may be expensive itself. Therefore, we propose the First Improvement Hill Climber with Linkage Learning (FIHCwLL). FIHCwLL decomposes additive and non-additive problems, and its overall costs are frequently lower than LSwLL (although ELL is not no-cost anymore). We introduce FIHCwLL into two state-of-the-art model-building optimizers, creating two new Dark Gray-box optimizers of significantly improved effectiveness.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(przewozniczek:2023:GECCO2, author = {Michal Witold Przewozniczek and Marcin Michal Komarnicki}, title = {To Slide or Not to Slide? Moving along Fitness Levels and Preserving the Gene Subsets Diversity in Modern Evolutionary Computation}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {955--962}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {linkage learning, genetic algorithm, model building, empirical linkage learning, optimal mixing}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590491}, size = {8 pages}, abstract = {Optimal Mixing (OM) is a mating operator employed by many state-of-the-art Genetic Algorithms (GAs). This paper identifies the sliding phenomenon defined as a serie of the so-called plateau moves. Sliding is a part of the original OM proposition. Although sliding seems to be a minor element of OM, we show that performing or avoiding it may significantly affect the effectiveness of OM-employing GAs. Therefore, we analyze the details of sliding pros and cons and propose the Autonomous Slide Deciding Algorithm (ASDA). ASDA analyzes the diversity of the population for a given subset of genes. Then, it decides, for a given mixing operation, sliding will be profitable or not. We show that using ASDA is greedily beneficial for two different state-of-the-art GAs. Additionally, we explain why ASDA deteriorates the effectiveness of the third considered OM-using GA.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(scarton:2023:GECCO, author = {Ludovico Scarton and Alexander Hagg}, title = {On the Suitability of Representations for Quality Diversity Optimization of Shapes}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {963--971}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {representation, compositional pattern producing networks, parametric, encoding, quality diversity, cellular automata}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590381}, size = {9 pages}, abstract = {The representation, or encoding, utilized in evolutionary algorithms has a substantial effect on their performance. Examination of the suitability of widely used representations for quality diversity optimization (QD) in robotic domains has yielded inconsistent results regarding the most appropriate encoding method. Given the domain-dependent nature of QD, additional evidence from other domains is necessary. This study compares the impact of several representations, including direct encoding, a dictionary-based representation, parametric encoding, compositional pattern producing networks, and cellular automata, on the generation of voxelized meshes in an architecture setting. The results reveal that some indirect encodings outperform direct encodings and can generate more diverse solution sets, especially when considering full phenotypic diversity. The paper introduces a multi-encoding QD approach that incorporates all evaluated representations in the same archive. Species of encodings compete on the basis of phenotypic features, leading to an approach that demonstrates similar performance to the best single-encoding QD approach. This is noteworthy, as it does not always require the contribution of the best-performing single encoding.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(shahrzad:2023:GECCO, author = {Hormoz Shahrzad and Risto P Miikkulainen}, title = {Accelerating Evolution Through Gene Masking and Distributed Search}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {972--980}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {evolutionary algorithms, genetic algorithms, adaptive evolution, fitness-level method, distributed evolution, markov chains, stochastic matrices}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590508}, size = {9 pages}, abstract = {In building practical applications of evolutionary computation (EC), two optimizations are essential. First, the parameters of the search method need to be tuned to the domain in order to balance exploration and exploitation effectively. Second, the search method needs to be distributed to take advantage of parallel computing resources. This paper presents BLADE (BLAnket Distributed Evolution) as an approach to achieving both goals simultaneously. BLADE uses blankets (i.e., masks on the genetic representation) to tune the evolutionary operators during the search, and implements the search through hub-and-spoke distribution. In the paper, (1) the blanket method is formalized for the (1 + 1)EA case as a Markov chain process. Its effectiveness is then demonstrated by analyzing dominant and subdominant eigenvalues of stochastic matrices, suggesting a generalizable theory; (2) the fitness-level theory is used to analyze the distribution method; and (3) these insights are verified experimentally on three benchmark problems, showing that both blankets and distribution lead to accelerated evolution. Moreover, a surprising synergy emerges between them: When combined with distribution, the blanket approach achieves more than n-fold speedup with n clients in some cases. The work thus highlights the importance and potential of optimizing evolutionary computation in practical applications.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(tinos:2023:GECCO, author = {Renato Tin\'{o}s and Michal Przewozniczek and Darrell Whitley and Francisco Chicano}, title = {Genetic Algorithm with Linkage Learning}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {981--989}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {variable interaction graph, genetic algorithms, linkage learning}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590349}, size = {9 pages}, abstract = {Next-generation genetic algorithms (GAs) should explore information from the problem structure whenever possible. Variable interactions can be inferred using linkage learning. Statistical linkage learning techniques were shown to improve GAs' effectiveness significantly in many problems, but may eventually report false linkages. On the other hand, empirical linkage learning (ELL) techniques discover only true variable dependencies. However, traditional ELL techniques are computationally expensive. We introduce the genetic algorithm with linkage learning (GAwLL), which discovers an empirical weighted variable interaction graph (VIGw) as a side-effect of the optimization performed by a GA, making it a no-cost ELL technique. Vertices of the VIGw represent decision variables and weights indicate the strength of the interaction between variables. The VIGw allows us to obtain new insights about the optimization problem and can be used to design genetic operators that efficiently explore the information about variable dependencies. Experiments with NK landscapes show that GAwLL is able to efficiently build the empirical VIGw. We also present an interesting machine learning application, where the VIGw represents a feature interaction network. By using GAwLL, the feature interaction network is built as a side-effect of evolutionary feature selection.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(doerr:2023:GECCO, author = {Benjamin Doerr and Arthur Dremaux and Johannes Lutzeyer and Aurelien Stumpf}, title = {How the Move Acceptance {Hyper-Heuristic} Copes With Local Optima: Drastic Differences Between Jumps and Cliffs}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {990--999}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {hyper-heuristics, non-elitism, mutation, clif benchmark}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590509}, size = {10 pages}, abstract = {In recent work, Lissovoi, Oliveto, and Warwicker (Artificial Intelligence (2023)) proved that the Move Acceptance Hyper-Heuristic (MAHH) leaves the local optimum of the multimodal cliff benchmark with remarkable efficiency. With its O (n3) runtime, for almost all cliff widths d, the MAHH massively outperforms the Θ(nd) runtime of simple elitist evolutionary algorithms (EAs). For the most prominent multimodal benchmark, the jump functions, the given runtime estimates of O(n2mm-Θ(m)) and Ω(2Ω(m)), for gap size m ≥ 2, are far apart and the real performance of MAHH is still an open question.In this work, we resolve this question. We prove that for any choice of the MAHH selection parameter p, the expected runtime of the MAHH on a jump function with gap size m = o(n1/2) is at least Ω(n2m-1/(2m - 1)!). This renders the MAHH much slower than simple elitist evolutionary algorithms with their typical O(nm) runtime.We also show that the MAHH with the global bit-wise mutation operator instead of the local one-bit operator optimizes jump functions in time [EQUATION], essentially the minimum of the optimization times of the (1 + 1) EA and the MAHH. This suggests that combining several ways to cope with local optima can be a fruitful approach.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(doerr:2023:GECCO2, author = {Benjamin Doerr and Taha {El Ghazi El Houssaini} and Amirhossein Rajabi and Carsten Witt}, title = {How Well Does the Metropolis Algorithm Cope With Local Optima?}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {1000--1008}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {evolutionary algorithm, theory, stochastic local search heuristic, metropolis algorithm, runtime analysis}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590390}, size = {9 pages}, abstract = {The Metropolis algorithm (MA) is a classic stochastic local search heuristic. It avoids getting stuck in local optima by occasionally accepting inferior solutions. To better and in a rigorous manner understand this ability, we conduct a mathematical runtime analysis of the MA on the CLIFF benchmark. Apart from one local optimum, cliff functions are monotonically increasing towards the global optimum. Consequently, to optimize a cliff function, the MA only once needs to accept an inferior solution. Despite seemingly being an ideal benchmark for the MA to profit from its main working principle, our mathematical runtime analysis shows that this hope does not come true. Even with the optimal temperature (the only parameter of the MA), the MA optimizes most cliff functions less efficiently than simple elitist evolutionary algorithms (EAs), which can only leave the local optimum by generating a superior solution possibly far away. This result suggests that our understanding of why the MA is often very successful in practice is not yet complete. Our work also suggests to equip the MA with global mutation operators, an idea supported by our preliminary experiments.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(huntsman:2023:GECCO, author = {Steve Huntsman}, title = {{Quality-Diversity} in Dissimilarity Spaces}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {1009--1018}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {quality-diversity optimization, dissimilarity, magnitude}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590409}, size = {10 pages}, abstract = {The theory of magnitude provides a mathematical framework for quantifying and maximizing diversity. We apply this framework to formulate quality-diversity algorithms in generic dissimilarity spaces. In particular, we instantiate a very general version of Go-Explore with promising performance for challenging and computationally expensive objectives, such as arise in simulations.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(kent:2023:GECCO, author = {Paul Kent and Juergen Branke}, title = {Bayesian Quality Diversity Search with Interactive Illumination}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {1019--1026}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {quality diversity, interactive, surrogate assisted, illumination, Bayesian optimization}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590486}, size = {8 pages}, abstract = {This paper presents a novel way for interactively identifying a most preferable solution based on quality and behavioural characteristics. Our algorithm combines the principles of Quality-Diversity Search and Bayesian Optimization to create Gaussian Process surrogate models of the behaviour and fitness space. Unlike traditional Quality-Diversity methods which aim to find good solutions with different behavioural characteristics, we propose a three-step interactive approach that allows a decision maker to efficiently identify the most preferred solution(s). In the first stage, it uses an entropy-based acquisition function to generate an illumination model, followed by an interactive phase where the decision maker can specify regions of interest and a target behaviour. These preferences are then utilized by an improvement greedy acquisition function to guide the optimization process and quickly identify a solution close to the user-specified target. In a case study, with a simulated decision maker, we demonstrate that our approach can find better solutions much more quickly than by selecting the most preferred solution from an archive generated with MAP-Elites.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(lehre:2023:GECCO, author = {Per Kristian Lehre and Mario {Hevia Fajardo} and Jamal Toutouh and Erik Hemberg and Una-May O'Reilly}, title = {Analysis of a Pairwise Dominance Coevolutionary Algorithm And {DefendIt}}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {1027--1035}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {evolutionary algorithms, co-evolution, cyber security}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590411}, size = {9 pages}, abstract = {While competitive coevolutionary algorithms are ideally suited to model adversarial dynamics, their complexity makes it difficult to understand what is happening when they execute. To achieve better clarity, we introduce a game named DefendIt and explore a previously developed pairwise dominance coevolutionary algorithm named PDCoEA. We devise a methodology for consistent algorithm comparison, then use it to empirically study the impact of population size, the impact of relative budget limits between the defender and attacker, and the impact of mutation rates on the dynamics and payoffs. Our methodology provides reliable comparisons and records of run and multi-run dynamics. Our supplementary material also offers enticing and detailed animations of a pair of players' game moves over the course of a game of millions of moves matched to the same run's populations' payoffs.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(muniasamy:2023:GECCO, author = {Rajesh Pandian Muniasamy and Somesh Singh and Rupesh Nasre and N.S. Narayanaswamy}, title = {Effective Parallelization of the Vehicle Routing Problem}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {1036--1044}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {operations research, travelling salesman problem, minimum spanning tree, capacitated vehicle routing problem}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590458}, size = {9 pages}, abstract = {Capacitated Vehicle Routing Problem (CVRP) is an important combinatorial optimization problem, which is also NP-hard. A wide array of heuristics have been proposed in the literature to obtain an approximate solution to CVRP. To improve the execution time, parallel methods have been developed for accelerating metaheuristics-based algorithms, genetic algorithms, and evolutionary algorithms for CVRP. Despite these advances, our experiments with the state-of-the-art parallel solutions indicate that their run times are too high to be practically useful. The combinatorial explosion is so high that the execution time is prohibitively large even on mid-sized CVRP instances having a few hundred customers. In this work, we propose a novel technique which combines local search and randomization for solving CVRP faster with reasonable accuracy, even on large problem instances. Our usage of randomization enables searching a large space of candidate solutions. We experimentally compare our proposed method with the state-of-the-art GPU implementations on diverse input instances and demonstrate the efficacy of our approach. Our sequential and shared-memory parallel implementations are on an average 36--1189\texttimes{} faster than the state-of-the-art GPU-parallel genetic algorithms while also achieving a superior solution quality. Furthermore, our reported solutions are close to the current best-known solutions from CVRPLIB.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(pigozzi:2023:GECCO2, author = {Federico Pigozzi and Federico Julian {Camerota Verd\`{u}} and Eric Medvet}, title = {How the Morphology Encoding Influences the Learning Ability in {Body-Brain} {Co-Optimization}}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {1045--1054}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {plasticity, embodiment, evolutionary reinforcement learning, encoding}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590429}, size = {10 pages}, abstract = {Embedding the learning of controllers within the evolution of morphologies has emerged as an effective strategy for the co-optimization of agents' bodies and brains. Intuitively, that is how nature shaped animal life on Earth. Still, the design of such co-optimization is a complex endeavor; one issue is the choice of the genetic encoding for the morphology. Such choice can be crucial for the effectiveness of learning, i.e., how fast and to what degree agents adapt, through learning, during their life. Here we evolve the morphologies of voxel-based soft agents with two different encodings, direct and indirect while learning the controllers with reinforcement learning. We experiment with three tasks, ranging from cave crawling to beam toppling, and study how the encoding influences the learning outcome. Our results show that the direct encoding corresponds to increased ability to learn, mostly in terms of learning speed. The same is not always true for the indirect one. We link these results to different shades of the Baldwin effect, consisting of morphologies being selected for increasing an agent's ability to learn during its lifetime.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(christodoulaki:2023:GECCO, author = {Evangelia Christodoulaki and Michael Kampouridis and Maria Kyropoulou}, title = {Enhanced Strongly Typed Genetic Programming for Algorithmic Trading}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {1055--1063}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {genetic algorithms, genetic programming, algorithmic trading, technical analysis, sentiment analysis}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590359}, size = {9 pages}, abstract = {This paper proposes a novel strongly typed Genetic Programming (STGP) algorithm that combines Technical (TA) and Sentiment analysis (SA) indicators to produce trading strategies. While TA and SA have been successful when used individually, their combination has not been considered extensively. Our proposed STGP algorithm has a novel fitness function, which rewards not only a tree's trading performance, but also the trading performance of its TA and SA subtrees. To achieve this, the fitness function is equal to the sum of three components: the fitness function for the complete tree, the fitness function of the TA subtree, and the fitness function of the SA subtree. In doing so, we ensure that the evolved trees contain profitable trading strategies that take full advantage of both technical and sentiment analysis. We run experiments on 35 international stocks and compare the STGP's performance to four other GP algorithms, as well as multilayer perceptron, support vector machines, and buy and hold. Results show that the proposed GP algorithm statistically and significantly outperforms all benchmarks and it improves the financial performance of the trading strategies produced by other GP algorithms by up to a factor of two for the median rate of return.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(de-franca:2023:GECCO, author = {Fabricio Olivetti {de Franca} and Gabriel Kronberger}, title = {Reducing Overparameterization of Symbolic Regression Models with Equality Saturation}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {1064--1072}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {genetic algorithms, genetic programming, simplification, symbolic regression, equality saturation}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590346}, size = {9 pages}, abstract = {Overparameterized models in regression analysis are often harder to interpret and can be harder to fit because of ill-conditioning. Genetic programming is prone to overparameterized models as it evolves the structure of the model without taking the location of parameters into account. One way to alleviate this is rewriting the expression and merging the redundant fitting parameters. In this paper we propose the use of equality saturation to alleviate overparameterization. We first notice that all the tested GP implementations suffer from overparameterization to different extents and then show that equality saturation together with a small set of rewriting rules is capable of reducing the number of fitting parameters to a minimum with a high probability. Compared to one of the few available alternatives, Sympy, it produces much better and consistent results. These results lead to different possible future investigations such as the simplification of expressions during the evolutionary process, and improvement of the interpretability of symbolic models.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(ding:2023:GECCO, author = {Li Ding and Edward Pantridge and Lee Spector}, title = {Probabilistic Lexicase Selection}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {1073--1081}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {genetic algorithms, genetic programming, symbolic regression, machine learning, evolutionary algorithms, program synthesis, parent selection}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590375}, size = {9 pages}, abstract = {Lexicase selection is a widely used parent selection algorithm in genetic programming, known for its success in various task domains such as program synthesis, symbolic regression, and machine learning. Due to its non-parametric and recursive nature, calculating the probability of each individual being selected by lexicase selection has been proven to be an NP-hard problem, which discourages deeper theoretical understanding and practical improvements to the algorithm. In this work, we introduce probabilistic lexicase selection (plexicase selection), a novel parent selection algorithm that efficiently approximates the probability distribution of lexicase selection. Our method not only demonstrates superior problem-solving capabilities as a semantic-aware selection method, but also benefits from having a probabilistic representation of the selection process for enhanced efficiency and flexibility. Experiments are conducted in two prevalent domains in genetic programming: program synthesis and symbolic regression, using standard benchmarks including PSB and SRBench. The empirical results show that plexicase selection achieves state-of-the-art problem-solving performance that is competitive to the lexicase selection, and significantly outperforms lexicase selection in computation efficiency.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(durasevic:2023:GECCO, author = {Marko Durasevic and Francisco Javier Gil-Gala and Domagoj Jakobovi\'{c}}, title = {Divide and Conquer: Using Single Objective Dispatching Rules to Improve Convergence for {Multi-Objective} Optimisation}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {1082--1090}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {genetic algorithms, genetic programming, dispatching rules, scheduling, unrelated machines environment}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590370}, size = {9 pages}, abstract = {Dynamic multi-objective (MO) scheduling problems are encountered in various real-world situations. Due to dynamic events that occur in such problems, one has to resort to using simple constructive heuristics, called dispatching rules (DRs), when tackling them. Since DRs are difficult to design manually there is a lack of existing DRs suitable for solving MO problems. Due to that reason, genetic programming has successfully been applied to evolve DRs specifically for solving MO problems. The process of evolving new DRs is computationally expensive, requiring a significant amount of time to obtain DRs of good quality. For that reason it is worth investigating inwhich ways the convergence of algorithms could be improved. One option is to use DRs previously evolved for optimising individual criteria to initialise the starting population when optimising a MO problem. The goal of this study is to investigate how such an initialisation strategy affects the performance of NSGA-II and NSGA-III when evolving DRs for MO problems. Therefore, 8 MO unrelated machines scheduling problems, containing between 2 and 5 criteria, are considered. The obtained results demonstrate that using previously evolved DRs for single objective optimisation leads to a faster convergence, and in many cases significantly better results.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(fernandes:2023:GECCO, author = {Matheus Campos Fernandes and Fabr\'{\i}cio Olivetti {De Fran\c{c}a} and Em\'{\i}lio Francesquini}, title = {{HOTGP} - {Higher-Order} Typed Genetic Programming}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {1091--1099}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {genetic algorithms, genetic programming, functional programming, inductive program synthesis}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590464}, size = {9 pages}, abstract = {Program synthesis is the process of generating a computer program following a set of specifications, which can be a high-level description of the problem and/or a set of input-output examples. The synthesis can be modeled as a search problem in which the search space is the set of all the programs valid under a grammar. As the search space is vast, brute force is usually not viable and search heuristics, such as genetic programming, also have difficulty navigating it without any guidance. In this paper we present HOTGP, a new genetic programming algorithm that synthesizes pure, typed, and functional programs. HOTGP leverages the knowledge provided by the rich data-types associated with the specification and the built-in grammar to constrain the search space and improve the performance of the synthesis. The grammar is based on Haskell's standard base library (the synthesized code can be directly compiled using any standard Haskell compiler) and includes support for higher-order functions, Λ-functions, and parametric polymorphism. Experimental results show that, when compared to 6 state-of-the-art algorithms using a standard set of benchmarks, HOTGP is competitive and capable of synthesizing the correct programs more frequently than any other of the evaluated algorithms.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(fonseca:2023:GECCO, author = {Alcides Fonseca and Diogo Po\c{c}as}, title = {Comparing the Expressive Power of {Strongly-Typed} and {Grammar-Guided} Genetic Programming}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {1100--1108}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {genetic algorithms, genetic programming, grammatical evolution, grammar-guided genetic programming, strongly-typed genetic programming}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590507}, size = {9 pages}, abstract = {Since Genetic Programming (GP) has been proposed, several flavors of GP have arisen, each with their own strengths and limitations. Grammar-Guided and Strongly-Typed GP (GGGP and STGP, respectively) are two popular flavors that have the advantage of allowing the practitioner to impose syntactic and semantic restrictions on the generated programs. GGGP makes use of (traditionally context-free) grammars to restrict the generation of (and the application of genetic operators on) individuals. By guiding this generation according to a grammar, i.e. a set of rules, GGGP improves performance by searching for an good-enough solution on a subset of the search space. This approach has been extended with Attribute Grammars to encode semantic restrictions, while Context-Free Grammars would only encode syntactic restrictions. STGP is also able to restrict the shape of the generated programs using a very simple grammar together with a type system. In this work, we address the question of which approach has more expressive power. We demonstrate that STGP has higher expressive power than Context-Free GGGP and less expressive power than Attribute Grammatical Evolution.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(geiger:2023:GECCO, author = {Alina Geiger and Dominik Sobania and Franz Rothlauf}, title = {{Down-Sampled} {Epsilon-Lexicase} Selection for {Real-World} Symbolic Regression Problems}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {1109--1117}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {genetic algorithms, genetic programming, parent selection, down-sampled epsilon-lexicase selection, symbolic regression}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590400}, size = {9 pages}, abstract = {Epsilon-lexicase selection is a parent selection method in genetic programming that has been successfully applied to symbolic regression problems. Recently, the combination of random subsampling with lexicase selection significantly improved performance in other genetic programming domains such as program synthesis. However, the influence of subsampling on the solution quality of real-world symbolic regression problems has not yet been studied. In this paper, we propose down-sampled epsilon-lexicase selection which combines epsilon-lexicase selection with random subsampling to improve the performance in the domain of symbolic regression. Therefore, we compare down-sampled epsilon-lexicase with traditional selection methods on common real-world symbolic regression problems and analyze its influence on the properties of the population over a genetic programming run. We find that the diversity is reduced by using down-sampled epsilon-lexicase selection compared to standard epsilon-lexicase selection. This comes along with high hyperselection rates we observe for down-sampled epsilon-lexicase selection. Further, we find that down-sampled epsilon-lexicase selection outperforms the traditional selection methods on all studied problems. Overall, with down-sampled epsilon-lexicase selection we observe an improvement of the solution quality of up to 85\% in comparison to standard epsilon-lexicase selection.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(gil-gala:2023:GECCO, author = {Francisco Javier Gil-Gala and Sezin Afsar and Marko Durasevic and Juan Jos\'{e} Palacios and Murat Afsar}, title = {Genetic Programming for the Vehicle Routing Problem with {Zone-Based} Pricing}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {1118--1126}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {genetic algorithms, genetic programming, vehicle routing problem, zone-based pricing, routing policies, hyper-heuristics}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590366}, size = {9 pages}, abstract = {The vehicle routing problem (VRP) is one of the most interesting NP-Hard problems due to the multitude of applications in the real world. This work tracks a VRP with zone-based prices inwhich each customer belongs to a particular zone, and the goal is to maximize the profit. The particularity of this VRP variant is that the provider needs to determine the prices for each zone and routes for all vehicles. However, depending on the selected zone prices, only a subset of customers will have to be visited. In this work, we propose a novel route generation scheme (RGS) that considers both decisions simultaneously. The RGS is guided by a priority function (PF), which determines the next customer to visit. Since designing efficient PFs manually is a difficult and time-consuming task, hyper-heuristic methods, specifically genetic programming (GP), have been used in this study to generate them automatically. Furthermore, to test the performance of the generated PFs, a genetic algorithm is also used to exploit the RGS to construct the solution. The experimental analysis shows that the evolved heuristics provide reasonable quality solutions quickly, in contrast with the current state-of-the-art. Furthermore, GP produces better results than GA for some problem instances.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(harrison:2023:GECCO, author = {Joe Harrison and Marco Virgolin and Tanja Alderliesten and Peter Bosman}, title = {{Mini-Batching,} {Gradient-Clipping,} First- versus {Second-Order:} What Works in {Gradient-Based} Coefficient Optimisation for Symbolic Regression?}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {1127--1136}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {genetic algorithms, genetic programming, symbolic regression, gradient descent, explainable AI, coefficient optimisation}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590368}, size = {10 pages}, abstract = {The aim of Symbolic Regression (SR) is to discover interpretable expressions that accurately describe data. The accuracy of an expression depends on both its structure and coefficients. To keep the structure simple enough to be interpretable, effective coefficient optimisation becomes key. Gradient-based optimisation is clearly effective at training neural networks in Deep Learning (DL), which can essentially be viewed as large, over-parameterised expressions: in this paper, we study how gradient-based optimisation techniques as often used in DL transfer to SR. In particular, we first assess what techniques work well across random SR expressions, independent of any specific SR algorithm. We find that mini-batching and gradient-clipping can be helpful (similar to DL), while second-order optimisers outperform first-order ones (different from DL). Next, we consider whether including gradient-based optimisation in Genetic Programming (GP), a classic SR algorithm, is beneficial. On five real-world datasets, in a generation-based comparison, we find that second-order optimisation outperforms coefficient mutation (or no optimisation). However, in time-based comparisons, performance gaps shrink substantially because the computational expensiveness of second-order optimisation causes GP to perform fewer generations. The interplay of computational costs between the optimisation of structure and coefficients is thus a critical aspect to consider.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(huang:2023:GECCO, author = {Zhixing Huang and Yi Mei and Fangfang Zhang and Mengjie Zhang}, title = {{Grammar-Guided} Linear Genetic Programming for Dynamic Job Shop Scheduling}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {1137--1145}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {genetic algorithms, genetic programming, grammar, linear genetic programming, dynamic job shop scheduling}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590394}, size = {9 pages}, abstract = {Dispatching rules are commonly used to make instant decisions in dynamic scheduling problems. Linear genetic programming (LGP) is one of the effective methods to design dispatching rules automatically. However, the effectiveness and efficiency of LGP methods are limited due to the large search space. Exploring the entire search space of programs is inefficient for LGP since a large number of programs might contain redundant blocks and might be inconsistent with domain knowledge, which would further limit the effectiveness of the produced LGP models. To improve the performance of LGP in dynamic job shop scheduling problems, this paper proposes a grammar-guided LGP to make LGP focus more on promising programs. Our dynamic job shop scheduling simulation results show that the proposed grammar-guided LGP has better training efficiency than basic LGP, and can produce solutions with good explanations. Further analyses show that grammar-guided LGP significantly improves the overall test effectiveness when the number of LGP registers increases.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(liventsev:2023:GECCO, author = {Vadim Liventsev and Anastasiia Grishina and Aki H\"{a}rm\"{a} and Leon Moonen}, title = {Fully Autonomous Programming with Large Language Models}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {1146--1155}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {genetic algorithms, genetic programming, automatic programming, program repair, large language models}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590481}, size = {10 pages}, abstract = {Current approaches to program synthesis with Large Language Models (LLMs) exhibit a "near miss syndrome": they tend to generate programs that semantically resemble the correct answer (as measured by text similarity metrics or human evaluation), but achieve a low or even zero accuracy as measured by unit tests due to small imperfections, such as the wrong input or output format. This calls for an approach known as Synthesize, Execute, Debug (SED), whereby a draft of the solution is generated first, followed by a program repair phase addressing the failed tests. To effectively apply this approach to instruction-driven LLMs, one needs to determine which prompts perform best as instructions for LLMs, as well as strike a balance between repairing unsuccessful programs and replacing them with newly generated ones. We explore these trade-offs empirically, comparing replace-focused, repair-focused, and hybrid debug strategies, as well as different template-based and model-based prompt-generation techniques. We use OpenAI Codex as the LLM and Program Synthesis Benchmark 2 as a database of problem descriptions and tests for evaluation. The resulting framework outperforms both conventional usage of Codex without the repair phase and traditional genetic programming approaches.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(medvet:2023:GECCO, author = {Eric Medvet and Simone Pozzi and Luca Manzoni}, title = {A General Purpose Representation and Adaptive {EA} for Evolving Graphs}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {1156--1164}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {prolog, adaptive evolutionary algorithm, graph evolutionary algorithm, declarative programming}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590431}, size = {9 pages}, abstract = {Graphs are a way to describe complex entities and their relations that apply to many practically relevant domains. However, domains often differ not only in the properties of nodes and edges, but also in the constraints imposed to the overall structure. This makes hard to define a general representation and genetic operators for graphs that permit the evolutionary optimization over many domains. In this paper, we tackle this challenge. We first propose a representation template that can be customized by users for specific domains: the constraints and the genetic operators are given in Prolog, a declarative programming language for operating with logic. Then, we define an adaptive evolutionary algorithm that can work with a large number of genetic operators by modifying their usage probability during the evolution: in this way, we relieve the user from the burden of selecting in advance only operators that are "good enough". We experimentally evaluate our proposal on two radically different domains to demonstrate its applicability and effectiveness: symbolic regression with trees and text extraction with finite-state automata. The results are promising: our approach does not trade effectiveness for versatility and is not worse than other domain-tailored approaches.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(nadizar:2023:GECCO2, author = {Giorgia Nadizar and Fraser Garrow and Berfin Sakallioglu and Lorenzo Canonne and Sara Silva and Leonardo Vanneschi}, title = {An Investigation of Geometric Semantic {GP} with Linear Scaling}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {1165--1174}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {genetic algorithms, genetic programming, symbolic regression, linear scaling, geometric semantic genetic programming}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590418}, size = {10 pages}, abstract = {Geometric semantic genetic programming (GSGP) and linear scaling (LS) have both, independently, shown the ability to outperform standard genetic programming (GP) for symbolic regression. GSGP uses geometric semantic genetic operators, different from the standard ones, without altering the fitness, while LS modifies the fitness without altering the genetic operators. So far, these two methods have already been joined together in only one practical application. However, to the best of our knowledge, a methodological study on the pros and cons of integrating these two methods has never been performed. In this paper, we present a study of GSGP-LS, a system that integrates GSGP and LS. The results, obtained on five hand-tailored benchmarks and six real-life problems, indicate that GSGP-LS outperforms GSGP in the majority of the cases, confirming the expected benefit of this integration. However, for some particularly hard datasets, GSGP-LS overfits training data, being outperformed by GSGP on unseen data. Additional experiments using standard GP, with and without LS, confirm this trend also when standard crossover and mutation are employed. This contradicts the idea that LS is always beneficial for GP, warning the practitioners about its risk of overfitting in some specific cases.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(pantridge:2023:GECCO, author = {Edward Pantridge and Thomas Helmuth}, title = {Solving Novel Program Synthesis Problems with Genetic Programming Using Parametric Polymorphism}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {1175--1183}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {genetic algorithms, genetic programming, inductive program synthesis, automatic programming, polymorphism}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590502}, size = {9 pages}, abstract = {Contemporary genetic programming (GP) systems for general program synthesis have been primarily concerned with evolving programs that can manipulate values from a standard set of primitive data types and simple indexed data structures. In contrast, human programmers do not limit themselves to a small finite set of data types and use polymorphism to express an unbounded number of types including nested data structures, product types, and generic functions. Code-building Genetic Programming (CBGP) is a recently introduced method that compiles type-safe programs from linear genomes using stack-based compilation and a formal type system. Although prior work with CBGP has shown initial demonstrations of polymorphism inside evolved programs, we have provided a deeper exploration of these capabilities through the evolution of programs which make use of generic data types such as key-value maps, tuples, and sets, as well as higher order functions and functions with polymorphic type signatures. In our experiments, CBGP is able to solve problems with all of these properties, where every other GP system that we know of has restrictions that make it unable to even consider problems with these properties. This demonstration provides a significant step towards fully aligning the expressiveness of GP to real world programming.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(raymond:2023:GECCO, author = {Christian Raymond and Qi Chen and Bing Xue and Mengjie Zhang}, title = {Fast and Efficient {Local-Search} for Genetic Programming Based Loss Function Learning}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {1184--1193}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {genetic algorithms, genetic programming, loss function learning, meta-learning}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590361}, size = {10 pages}, abstract = {In this paper, we develop upon the topic of loss function learning, an emergent meta-learning paradigm that aims to learn loss functions that significantly improve the performance of the models trained under them. Specifically, we propose a new meta-learning framework for task and model-agnostic loss function learning via a hybrid search approach. The framework first uses genetic programming to find a set of symbolic loss functions. Second, the set of learned loss functions is subsequently parameterized and optimized via unrolled differentiation. The versatility and performance of the proposed framework are empirically validated on a diverse set of supervised learning tasks. Results show that the learned loss functions bring improved convergence, sample efficiency, and inference performance on tabulated, computer vision, and natural language processing problems, using a variety of task-specific neural network architectures.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(zhang:2023:GECCO2, author = {Hengzhe Zhang and Qi Chen and Bing Xue and Wolfgang Banzhaf and Mengjie Zhang}, title = {A Double Lexicase Selection Operator for Bloat Control in Evolutionary Feature Construction for Regression}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {1194--1202}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {genetic algorithms, genetic programming, evolutionary feature construction, bloat control}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590365}, size = {9 pages}, abstract = {Evolutionary feature construction is an important technique in the machine learning domain for enhancing learning performance. However, traditional genetic programming-based feature construction methods often suffer from bloat, which means the sizes of constructed features increase excessively without improved performance. To address this issue, this paper proposes a double-stage lexicase selection operator to control bloat while not damaging search effectiveness. This new operator contains a two-stage selection process, where the first stage selects individuals based on fitness values and the second stage selects individuals based on tree sizes. Therefore, the proposed operator can control bloat meanwhile leveraging the advantage of the lexicase selection operator. Experimental results on 98 regression datasets show that compared to the traditional bloat control method of having a depth limit, the proposed selection operator not only significantly reduces the sizes of constructed features on all datasets but also keeps a similar level of predictive performance. A comparative experiment with seven bloat control methods shows that the double lexicase selection operator achieves the best trade-off between the model performance and the model size.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(gasperov:2023:GECCO, author = {Bruno Ga\v{s}perov and Marko undefinedurasevi\'{c}}, title = {On Evolvability and Behavior Landscapes in Neuroevolutionary Divergent Search}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {1203--1211}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {neuroevolution, genetic algorithms, evolvability, divergent search, novelty search}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590427}, size = {9 pages}, abstract = {Evolvability refers to the ability of an individual genotype (solution) to produce offspring with mutually diverse phenotypes. Recent research has demonstrated that divergent search methods, particularly novelty search, promote evolvability by implicitly creating selective pressure for it. The main objective of this paper is to provide a novel perspective on the relationship between neuroevolutionary divergent search and evolvability. In order to achieve this, several types of walks from the literature on fitness landscape analysis are first adapted to this context. Subsequently, the interplay between neuroevolutionary divergent search and evolvability under varying amounts of evolutionary pressure and under different diversity metrics is investigated. To this end, experiments are performed on Fetch Pick and Place, a robotic arm task. Moreover, the performed study in particular sheds light on the structure of the genotype-phenotype mapping (the behavior landscape). Finally, a novel definition of evolvability that takes into account the evolvability of offspring and is appropriate for use with discretized behavior spaces is proposed, together with a Markov-chain-based estimation method for it.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(lim:2023:GECCO, author = {Bryan Lim and Manon Flageat and Antoine Cully}, title = {Understanding the Synergies between {Quality-Diversity} and Deep Reinforcement Learning}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {1212--1220}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {deep reinforcement learning, neuroevolution, quality-diversity}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590388}, size = {9 pages}, abstract = {The synergies between Quality-Diversity (QD) and Deep Reinforcement Learning (RL) have led to powerful hybrid QD-RL algorithms that have shown tremendous potential, and bring the best of both fields. However, only a single deep RL algorithm (TD3) has been used in prior hybrid methods despite notable progress made by other RL algorithms. Additionally, there are fundamental differences in the optimization procedures between QD and RL which would benefit from a more principled approach. We propose Generalized Actor-Critic QD-RL, a unified modular framework for actor-critic deep RL methods in the QD-RL setting. This framework provides a path to study insights from Deep RL in the QD-RL setting, which is an important and efficient way to make progress in QD-RL. We introduce two new algorithms, PGA-ME (SAC) and PGA-ME (DroQ) which apply recent advancements in Deep RL to the QD-RL setting, and solve the humanoid environment which was not possible using existing QD-RL algorithms. However, we also find that not all insights from Deep RL can be effectively translated to QD-RL. Critically, this work also demonstrates that the actor-critic models in QD-RL are generally insuficiently trained and performance gains can be achieved without any additional environment evaluations.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(mace:2023:GECCO, author = {Valentin Mac\'{e} and Rapha\"{e}l Boige and Felix Chalumeau and Thomas Pierrot and Guillaume Richard and Nicolas Perrin-Gilbert}, title = {The {Quality-Diversity} Transformer: Generating {Behavior-Conditioned} Trajectories with Decision Transformers}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {1221--1229}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {neuroevolution, quality-diversity, decision transformer}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590433}, size = {9 pages}, abstract = {In the context of neuroevolution, Quality-Diversity algorithms have proven effective in generating repertoires of diverse and efficient policies by relying on the definition of a behavior space. A natural goal induced by the creation of such a repertoire is trying to achieve behaviors on demand, which can be done by running the corresponding policy from the repertoire. However, in uncertain environments, two problems arise. First, policies can lack robustness and repeatability, meaning that multiple episodes under slightly different conditions often result in very different behaviors. Second, due to the discrete nature of the repertoire, solutions vary discontinuously. Here we present a new approach to achieve behavior-conditioned trajectory generation based on two mechanisms: First, MAP-Elites Low-Spread (ME-LS), which constrains the selection of solutions to those that are the most consistent in the behavior space. Second, the Quality-Diversity Transformer (QDT), a Transformer-based model conditioned on continuous behavior descriptors, which trains on a dataset generated by policies from a ME-LS repertoire and learns to autoregressively generate sequences of actions that achieve target behaviors. Results show that ME-LS produces consistent and robust policies, and that its combination with the QDT yields a single policy capable of achieving diverse behaviors on demand with high accuracy.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(naya-varela:2023:GECCO, author = {Mart\'{\i}n Naya-Varela and Andr\'{e}s Fa\'{\i}\~{n}a and Richard J. Duro}, title = {Guiding the Exploration of the Solution Space in Walking Robots Through {Growth-Based} Morphological Development}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {1230--1238}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {noise, search trajectory network, legged robots, developmental robotics}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590489}, size = {9 pages}, abstract = {In human beings, the joint development of the body and cognitive system has been shown to facilitate the acquisition of new skills and abilities. In the literature, these natural principles have been applied to robotics with mixed results and different authors have suggested several hypotheses to explain them. One of the most popular hypotheses states that morphological development improves learning by increasing exploration of the solution space, avoiding stagnation in local optima. In this article, we are going to study the influence of growth-based morphological development and its nuances as a tool to improve the exploration of the solution space. We will perform a series of experiments over two different robot morphologies which learn to walk. Furthermore, we will compare these results to another optimization strategy that has been shown to be useful to favor exploration in learning algorithms: the application of noise during learning. Finally, to check if the increased exploration hypothesis holds, we visualize the genotypic space during learning considering the different optimization strategies by using the Search Trajectory Network representation. The results indicate that noise and growth increase exploration, but only growth guides the search towards good solutions.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(nguyen:2023:GECCO, author = {Thai Huy Nguyen and Ngoc Hoang Luong}, title = {Stable and {Sample-Efficient} Policy Search for Continuous Control via Hybridizing Phenotypic Evolutionary Algorithm with the Double Actors Regularized Critics}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {1239--1247}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {variation operators, continuous control, evolutionary reinforcement learning, policy search}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590455}, size = {9 pages}, abstract = {Evolutionary Reinforcement Learning arises from hybridizing the sample efficiency of policy gradient with the stability of evolutionary computation. Proximal Distilled Evolutionary Reinforcement Learning (PDERL) implements the hybridization by having information transferred between an RL agent operating alongside a population of candidate policies. PDERL employs two phenotype-based variation operators, behavior distillation crossover and proximal mutation, which exhibit better effectiveness compared to traditional genotype-based operators. We demonstrate that the proximal mutation is sensitive to its mutation magnitude hyperparameter, which yields damaging effects if its value is improperly set. Inspired from Differential Evolution, we propose a novel mutation procedure that operates on action vectors generated by candidate policies. The phenotypic differential mutation (PhDM) shows its stability in diversity maintenance with little disruption. A recently-introduced actor-critic policy gradient algorithm, Double Actors Regularized Critics (DARC), exhibits a superior sample efficiency. DARC alleviates both overestimation and underestimation bias via the usage of two actors for better exploration and a dedicated critic regularization technique. In this paper, we restructure PDERL to incorporate PhDM and the policy gradient mechanism of DARC. Experimental results show that our Phenotypic Evolutionary DARC (PhEDARC) outperforms both PDERL and DARC in four control tasks from OpenAI Gym. Ablation studies support our design choices.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(pedersen:2023:GECCO, author = {Joachim Pedersen and Sebastian Risi}, title = {Learning to Act through Evolution of Neural Diversity in Random Neural Networks}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {1248--1256}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590460}, size = {9 pages}, abstract = {Biological nervous systems consist of networks of diverse, sophisticated information processors in the form of neurons of different classes. In most artificial neural networks (ANNs), neural computation is abstracted to an activation function that is usually shared between all neurons within a layer or even the whole network; training of ANNs focuses on synaptic optimization. In this paper, we propose the optimization of neuro-centric parameters to attain a set of diverse neurons that can perform complex computations. Demonstrating the promise of the approach, we show that evolving neural parameters alone allows agents to solve various reinforcement learning tasks without optimizing any synaptic weights. While not aiming to be an accurate biological model, parameterizing neurons to a larger degree than the current common practice, allows us to ask questions about the computational abilities afforded by neural diversity in random neural networks. The presented results open up interesting future research directions, such as combining evolved neural diversity with activity-dependent plasticity.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(peng:2023:GECCO, author = {Yameng Peng and Andy Song and Vic Ciesielski and Haytham Fayek and Xiaojun Chang}, title = {Fast Evolutionary Neural Architecture Search by Contrastive Predictor with Linear Regions}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {1257--1266}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {performance predictor, neural architecture search, training-free metric, evolutionary algorithm}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590452}, size = {10 pages}, abstract = {Evolutionary neural architecture search (ENAS) has emerged as a promising approach to finding high-performance neural architectures. However, widespread application has been limited by the expensive computational costs due to the nature of evolutionary algorithms. In this study, we aim to significantly reduce the computational costs of ENAS by involving a training-free performance metric. Specifically, the network performance can be estimated by the training-free metric with only a single forward pass. However, training-free metrics have their own challenges, in particular, an insufficient correlation with ground-truth performance. We adopt a Graph Convolutional Network (GCN) based contrastive predictor which can leverage the low cost of the training-free performance metric yet improve the correlation between the estimated performance and the true performance of the candidate architectures. Combining a training-free metric - the number of linear regions with the GCN-based contrastive predictor and an active learning scheme, we propose Fast-ENAS which can achieve superior search efficiency and performance on the benchmark NAS-Bench-201 and DARTS search spaces. Furthermore, with a single GPU searching on the DARTS space, Fast-ENAS requires only 0.02 (29 minutes) and 0.026 (37 minutes) GPU days to achieve test error rates of 2.50\% and 24.30\% on CIFAR-10 and ImageNet respectively.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(thomson:2023:GECCO, author = {Sarah L. Thomson and Gabriela Ochoa and Nadarajen Veerapen and Krzysztof Michalak}, title = {Channel Configuration for Neural Architecture: Insights from the Search Space}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {1267--1275}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {neural architecture search, fitness landscapes, local optima networks (LONs)}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590386}, size = {9 pages}, abstract = {We consider search spaces associated with neural network channel configuration. Architectures and their accuracy are visualised using low-dimensional Euclidean embedding (LDEE). Optimisation dynamics are captured using local optima networks (LONs). LONs are a compression of a fitness landscape: the nodes are local optima and the edges are search transitions between them. Several neural architecture search algorithms are tested on the search space and we discover that iterated local search (ILS) is a competitive algorithm for neural channel configuration. We additionally implement a landscape-aware ILS which performs well. Observations from the search and landscape space analyses bring visual clarity and insight to the science of neural network channel design: the results indicate that a high number of channels, kept constant throughout the network, is beneficial.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(xu:2023:GECCO, author = {Jinglue Xu and Suryanarayanan NAV and Hitoshi Iba}, title = {{MPENAS:} {Multi-Fidelity} {Predictor-Guided} Evolutionary Neural Architecture Search with {Zero-Cost} Proxies}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {1276--1285}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {multi-fidelity optimization, zero-cost proxy, surrogate-assisted evolutionary computation, neural architecture search}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590513}, size = {10 pages}, abstract = {Neural architecture search (NAS) aims to automatically design suitable architectures of artificial neural networks (ANNs) under various situations. Recently, NAS based on zero-cost proxies can predict the performance of ANNs with the cost of a single forward/backward propagation pass at most. While zero-cost proxies can speed up NAS by orders of magnitude, the gap between the predicted and actual performance of ANNs prevents zero-cost proxies from identifying ANNs with top performance.One solution is to regard zero-cost proxies as a low-fidelity evaluation method and switch from zero-cost proxies to high-fidelity evaluation methods when the zero-cost proxies struggle at selecting architectures. Based on this idea, we propose Multi-fidelity Predictor-guided Evolutionary Neural Architecture Search (MPENAS). MPENAS is based on a novel surrogate-assisted evolutionary computation framework. With a predictor, MPENAS combines architecture encodings, zero-cost proxies, learning curve extrapolations, and fully trained ANNs' performance into one consistent fitness across different fidelity.To our knowledge, MPENAS is the first work that integrates zero-cost proxies into a multi-fidelity optimization framework. MPENAS outperforms ten other methods for the NAS-Bench-201 search space in all cases. In addition, we demonstrate the generalizability of MPENAS for the TransNAS-Bench-101 search space.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(al-rifaie:2023:GECCO, author = {Mohammad Majid al-Rifaie and Tim Blackwell}, title = {Tomographic Reconstruction with Search Space Expansion}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {1286--1293}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {swarm optimisation, tomographic reconstruction, high dimensional optimisation, search space resizing}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590372}, size = {8 pages}, abstract = {A search space expansion process is proposed in the context of tomographic reconstruction (TR). The idea is to widen the effective search space in a series of increasing sizes with clamping on the search space boundary. The technique was tested on four simple phantoms and on the clinically important Shepp-Logan phantom. Dispersive flies optimisation (DFO), a lightweight particle swarm optimisation (PSO) variant, is shown to produce lower reproduction errors compared to standard TR toolbox algorithms. The expansion technique demonstrably decreases salt-and-pepper noise. DFO with 50 subspace searches was found to be superior to differential evolution, PSO and, more importantly, a number of conventional reconstruction techniques. To the best of our knowledge, this is the first work where search space expansion, in its literal form, is introduced, discussed and applied to this problem.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(andreadis:2023:GECCO, author = {Georgios Andreadis and Peter A.N. Bosman and Tanja Alderliesten}, title = {{MOREA:} A {GPU-Accelerated} Evolutionary Algorithm for {Multi-Objective} Deformable Registration of 3D Medical Images}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {1294--1302}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {deformable image registration, multi-objective optimization, repair method, GOMEA, smart mesh initialization}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590414}, size = {9 pages}, abstract = {Finding a realistic deformation that transforms one image into another, in case large deformations are required, is considered a key challenge in medical image analysis. Having a proper image registration approach to achieve this could unleash a number of applications requiring information to be transferred between images. Clinical adoption is currently hampered by many existing methods requiring extensive configuration effort before each use, or not being able to (realistically) capture large deformations. A recent multi-objective approach that uses the Multi-Objective Real-Valued Gene-pool Optimal Mixing Evolutionary Algorithm (MO-RV-GOMEA) and a dual-dynamic mesh transformation model has shown promise, exposing the trade-offs inherent to image registration problems and modeling large deformations in 2D. This work builds on this promise and introduces MOREA: the first evolutionary algorithm-based multi-objective approach to deformable registration of 3D images capable of tackling large deformations. MOREA includes a 3D biomechanical mesh model for physical plausibility and is fully GPU-accelerated. We compare MOREA to two state-of-the-art approaches on abdominal CT scans of 4 cervical cancer patients, with the latter two approaches configured for the best results per patient. Without requiring per-patient configuration, MOREA significantly outperforms these approaches on 3 of the 4 patients that represent the most difficult cases.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(bentley:2023:GECCO, author = {Peter Bentley and Soo Ling Lim and Paolo Arcaini and Fuyuki Ishikawa}, title = {Using a Variational Autoencoder to Learn Valid Search Spaces of Safely Monitored Autonomous Robots for {Last-Mile} Delivery}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {1303--1311}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {genetic algorithm, variational autoencoder, learning latent representations, autonomous robots, scheduling}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590459}, size = {9 pages}, abstract = {The use of autonomous robots for delivery of goods to customers is an exciting new way to provide a reliable and sustainable service. However, in the real world, autonomous robots still require human supervision for safety reasons. We tackle the real-world problem of optimizing autonomous robot timings to maximize deliveries, while ensuring that there are never too many robots running simultaneously so that they can be monitored safely. We assess the use of a recent hybrid machine-learning-optimization approach COIL (constrained optimization in learned latent space) and compare it with a baseline genetic algorithm for the purposes of exploring variations of this problem. We also investigate new methods for improving the speed and efficiency of COIL. We show that only COIL can find valid solutions where appropriate numbers of robots run simultaneously for all problem variations tested. We also show that when COIL has learned its latent representation, it can optimize 10\% faster than the GA, making it a good choice for daily re-optimization of robots where delivery requests for each day are allocated to robots while maintaining safe numbers of robots running at once.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(bostelmann-arp:2023:GECCO, author = {Lukas Bostelmann-Arp and Christoph Steup and Sanaz Mostaghim}, title = {{Multi-Objective} Seed Curve Optimization for Coverage Path Planning in Precision Farming}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {1312--1320}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {coverage path planning, evolutionary multi-objective optimization, agriculture}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590490}, size = {9 pages}, abstract = {Coverage path planning is one of the main challenges in precision farming. Here the goal is to compute a continuous optimal working path by combining individual tracks such that the entire field can be cultivated in the most efficient way. Various 2D and 3D approaches have already been presented in the literature. However, these are lacking in two major aspects: In each case, a seed curve is determined and offsetted to generate the individual tracks, but the seed curve is usually selected from a predefined set of alternatives. Additionally, the utilized objectives do not consider the efficiency of the tools used to work on the field, which typically depends on the path angle and the field's inclination. In this paper, we propose a scheme to cope with both aspects through an evolutionary multi-objective optimization approach. Three objectives are used, namely coverage, energy based on anisotropic friction and gravity, and precession, which describes the angle between the path direction and the gradient of the terrain.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(carazo-barbero:2023:GECCO, author = {Gonzalo Carazo-Barbero and Eva Besada-Portas and Jos\'{e} Luis Risco-Mart\'{\i}n and Jos\'{e} Antonio L\'{o}pez-Orozco}, title = {{EA-Based} {ASV} Trajectory Planner for Detecting Cyanobacterial Blooms in Freshwater}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {1321--1329}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {genetic algorithms, decision making, robotics, multi-objective optimization, earth sciences and the environment}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590484}, size = {9 pages}, abstract = {Cyanobacterial Blooms (CBs) constitute a relevant ecological and public health problem since they often produce toxic metabolites that endanger the lives of many species, and they prevent human water consumption and recreational use. To determine the locations of CBs in lentic water bodies, we present a new planner based on Evolutionary Algorithms (EAs) that optimizes the trajectory of an Autonomous Surface Vehicle (ASV) equipped with a probe capable of detecting CBs. The planner 1) exploits the information provided by a particle transport simulator that determines the CB distribution from the water currents and the inherent CB behavior (in particular, its biological growth and vertical displacements) and 2) is supported by an EA that optimizes the mission duration, the ASV trajectory length, and the contributions of each simulated particle to the predicted cyanobacterial concentration along the ASV trajectory. The planner also ensures the trajectory feasibility from the ASV, probe, and water body perspective; and refines the trajectory shape by increasing the number of the decision variables during the iteration of an EA supported by usual NSGA-II operations. The results over different scenarios show that the planner determines overall good solutions that adapt the ASV trajectory to the evolution of CB distribution.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(clement:2023:GECCO, author = {Fran\c{c}ois Cl\'{e}ment and Diederick Vermetten and Jacob {De Nobel} and Alexandre D. Jesus and Lu\'{\i}s Paquete and Carola Doerr}, title = {Computing Star Discrepancies with Numerical {Black-Box} Optimization Algorithms}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {1330--1338}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {evolutionary computation, black-box optimization, star discrepancy, uniform distributions, parallel computing}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590456}, size = {9 pages}, abstract = {The L∞ star discrepancy is a measure for the regularity of a finite set of points taken from [0, 1)d. Low discrepancy point sets are highly relevant for Quasi-Monte Carlo methods in numerical integration and several other applications. Unfortunately, computing the L∞ star discrepancy of a given point set is known to be a hard problem, with the best exact algorithms falling short for even moderate dimensions around 8. However, despite the difficulty of finding the global maximum that defines the L∞ star discrepancy of the set, local evaluations at selected points are inexpensive. This makes the problem tractable by black-box optimization approaches.In this work we compare 8 popular numerical black-box optimization algorithms on the L∞ star discrepancy computation problem, using a wide set of instances in dimensions 2 to 15. We show that all used optimizers perform very badly on a large majority of the instances and that in many cases random search outperforms even the more sophisticated solvers. We suspect that state-of-the-art numerical black-box optimization techniques fail to capture the global structure of the problem, an important shortcoming that may guide their future development.We also provide a parallel implementation of the best-known algorithm to compute the discrepancy.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(edo:2023:GECCO, author = {Hinata Edo and Yoshiki Miyauchi and Atsuo Maki and Youhei Akimoto}, title = {Trade-off Between Robustness and {Worst-Case} Performance in {Min-Max} Optimization}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {1339--1347}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {min-max optimization, robustness, bi-objective optimization}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590362}, size = {9 pages}, abstract = {Min-max optimization is an approach to avoid the risk of obtaining a solution whose performance is satisfactory in a simulation scenario but significantly degraded in real-world scenarios, thereby obtaining a robust solution under the worst case in the considered scenarios. However, owing to the trade-off between robustness and worst-case performance, it is often difficult to design a set of scenarios in simulation-based optimization, known as the uncertainty set, which can be a practical barrier to applying min-max optimization. In this study, we propose a novel approach to obtaining the Pareto front of the bi-objective optimization for the size of the uncertainty set and worst-case performance under the uncertainty set. The proposed approach aims to obtain a set of solutions that cover the worst-case performance effectively but are better than a given performance threshold. The effectiveness of each algorithmic component was evaluated through numerical experiments using a synthetic problem. Applications to automatic ship berthing tasks demonstrate the usefulness of the proposed approach and its limitations.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(goel:2023:GECCO, author = {Diksha Goel and Aneta Neumann and Frank Neumann and Hung Nguyen and Mingyu Guo}, title = {Evolving Reinforcement Learning Environment to Minimize Learner's Achievable Reward: An Application on Hardening Active Directory Systems}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {1348--1356}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {reinforcement learning, evolutionary diversity optimization, attack graph, active directory}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590436}, size = {9 pages}, abstract = {We study a Stackelberg game between one attacker and one defender in a configurable environment. The defender picks a specific environment configuration. The attacker observes the configuration and attacks via Reinforcement Learning (RL trained against the observed environment). The defender's goal is to find the environment with minimum achievable reward for the attacker. We apply Evolutionary Diversity Optimization (EDO) to generate diverse population of environments for training. Environments with clearly high rewards are killed off and replaced by new offsprings to avoid wasting training time. Diversity not only improves training quality but also fits well with our RL scenario: RL agents tend to improve gradually, so a slightly worse environment earlier on may become better later. We demonstrate the effectiveness of our approach by focusing on a specific application, Active Directory (AD). AD is the default security management system for Windows domain networks. AD environment describes an attack graph, where nodes represent computers/accounts/etc., and edges represent accesses. The attacker aims to find the best attack path to reach the highest-privilege node. The defender can change the graph by removing a limited number of edges (revoke accesses). Our approach generates better defensive plans than the existing approach and scales better.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(hayslep:2023:GECCO, author = {Matthew Hayslep and Edward Keedwell and Raziyeh Farmani}, title = {{Multi-Objective} {Multi-Gene} Genetic Programming for the Prediction of Leakage in Water Distribution Networks}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {1357--1364}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {genetic algorithms, genetic programming, feature construction, leakage, minimum night flow, linear regression}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590499}, size = {8 pages}, abstract = {Understanding leakage is an important challenge within the water sector to minimise waste, energy use and carbon emissions. Every Water Distribution Network (WDN) has leakage, usually approximated as Minimum Night Flow (MNF) for each District Metered Area (DMA). However, not all DMAs have instruments to monitor leakage directly, or the main dynamic factors that contribute to it. Therefore, this article will estimate the leakage of a DMA by using the recorded features of its pipes, making use of readily available asset data collected routinely by water companies. This article interprets this problem as a feature construction task and uses a multi-objective multi-gene strongly typed genetic programming approach to create a set of features. These features are used by a linear regression model to estimate the average long-term leakage in DMAs and Shapley values are used to understand the impact and importance of each tree. The methodology is applied to a dataset for a real-world WDN with over 700 DMAs and the results are compared to a previous work which used human-constructed features. The results show comparable performance with significantly fewer, and less complex features. In addition, novel features are found that were not part of the human-constructed features.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(kiribuchi:2023:GECCO, author = {Daiki Kiribuchi and Ryoko Hatakeyama and Tomoshi Otsuki and Tatsuya Yoshioka and Kana Konno and Takumi Matsuda}, title = {Combined Layout Optimization of Wind Farm and Cable Connection on Complex Terrain Using a Genetic Algorithm}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {1365--1373}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {cable connection layout optimization, capacitated minimum spanning tree, multi-objective optimization, integer linear programming, wind farm layout optimization, NSGA-II}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590377}, size = {9 pages}, abstract = {To improve the economic efficiency of wind farms, this paper proposes a method for simultaneously optimizing wind farm layout and cabling on complex terrain such as mountainous areas, which most previous studies have not considered. Multiple wind turbines should be placed to maximize energy production while minimizing the cable length (between wind turbines and between the substation and wind turbines). To optimize both, especially on complex terrain where wind speeds at a site are not constant, the proposed method combines a genetic algorithm (NSGA-II) and a capacitated minimum spanning tree approximation algorithm (Esau-Williams algorithm). For five sites with complex terrain, the proposed method is compared with the exact optimal solution obtained by the weighted sum method using the integer linear programming formulation. For a small number of candidate locations, the proposed method obtains a hyper-volume equivalent to the exact solution. In comparison, the proposed method can obtain a larger hyper-volume even in the case of many candidate locations where the weighted sum method is computationally infeasible in terms of practical resources and time. These results indicate that the proposed method effectively contributes to the wind farm design on complex terrain.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(kovacs:2023:GECCO, author = {Benjamin Kov\'{a}cs and Pierre Tassel and Martin Gebser}, title = {Optimizing Dispatching Strategies for Semiconductor Manufacturing Facilities with Genetic Programming}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {1374--1382}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {genetic algorithms, genetic programming, semiconductor fab, dispatching heuristics}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590402}, size = {9 pages}, abstract = {Optimizing operations in semiconductor manufacturing facilities is challenging. The production line can be modeled as an NP-hard constrained flexible job-shop scheduling problem, intractable with mathematical optimization due to its scale. Therefore, decision-making in factories is dominated by handcrafted heuristics. Though machine learning-based approaches proved efficient in solving such problems, their applications are limited due to the lack of trust in the underlying black-box models, and issues with scalability for larger instances. This work presents a genetic programming-based method to generate explainable, improved dispatching heuristics. Our method outputs a set of human-readable dispatching strategies, verifiable by scheduling experts before deployment. In case of minor changes in the environment or the optimization objectives, the continued evolution of the candidate solutions is possible without starting the training process from scratch. The introduced method is evaluated on a simulator executing real-world scale instances. The resulting heuristics improve the key performance indicators of the generated schedules. Furthermore, the generated dispatchers are easy to integrate into existing industrial systems. These favorable properties make the method applicable to various large-scale, dynamic, practical scheduling scenarios, where adaptions to different environments go along with modest human effort limited to the design of a fitness function.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(lanzi:2023:GECCO, author = {Pier Luca Lanzi and Daniele Loiacono}, title = {{ChatGPT} and Other Large Language Models as Evolutionary Engines for Online Interactive Collaborative Game Design}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {1383--1390}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {collaborative design, interactive evolution, large language models}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590351}, size = {8 pages}, abstract = {Large language models (LLMs) have taken the scientific world by storm, changing the landscape of natural language processing and human-computer interaction. These powerful tools can answer complex questions and, surprisingly, perform challenging creative tasks (e.g., generate code and applications to solve problems, write stories, pieces of music, etc.). In this paper, we present a collaborative game design framework that combines interactive evolution and large language models to simulate the typical human design process. We use the former to exploit users' feedback for selecting the most promising ideas and large language models for a very complex creative task---the recombination and variation of ideas. In our framework, the process starts with a brief and a set of candidate designs, either generated using a language model or proposed by the users. Next, users collaborate on the design process by providing feedback to an interactive genetic algorithm that selects, recombines, and mutates the most promising designs. We evaluated our framework on three game design tasks with human designers who collaborated remotely.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(lette:2023:GECCO, author = {Matthew Lette and Kamrul Hasan Rahi and Hemant Kumar Singh and Tapabrata Ray}, title = {{Vertical-Axis} Wind Turbine Design Using {Surrogate-Assisted} Optimization with Physical Experiments {In-Loop}}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {1391--1399}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {multi-objective optimization, experiment in-loop, vertical-axis wind turbine, expensive optimization}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590462}, size = {9 pages}, abstract = {Most of the existing wind power comes from the use of traditional Horizontal-Axis Wind Turbines (HAWT), which typically require installation over vast countryside areas (in wind farms) due to considerations such as wake effects and noise. Recently, there has been an increasing interest to explore an alternative solution - Vertical-Axis Wind Turbines (VAWT) - that may be more suitable for compact urban areas. In this paper, we conduct design optimization of twin-blade VAWTs by evaluating the candidate designs through direct small-scale prototyping and physical experiments in-loop. The problem is a practical example of expensive optimization where the number of evaluations affordable are severely limited. In addition to the conventional single-objective form (maximize rotational speed), we also solve a multi-objective version of the problem (maximize rotational speed and minimize mass). For conducting optimization, we leverage surrogate-assisted approaches that make use of both predicted mean and uncertainties in modeling for an efficient exploration of the design space. The experiments demonstrate that the approach is able to generate non-intuitive designs that are competitive or better than the baseline (classic twin-blade Savonius design) within a small evaluation budget. The study also strengthens the case for applying this approach for design optimization in general.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(lipinski:2023:GECCO, author = {Piotr Lipinski}, title = {Evolutionary Approach to Recommender Systems Improvement by Directory of Products Optimization}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {1400--1408}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {recommender systems, evolutionary algorithms}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590519}, size = {9 pages}, abstract = {This paper concerns improving recommender systems using evolutionary algorithms that optimize the composition of the directory of products in the recommender system. It focuses on merging some infrequent products into certain groups of similar products and replacing the regular products by some aggregates of products, such as a category of products or a cluster of products in the product embedding space, and preparing recommendations with such aggregates. Evolutionary algorithms endeavor to decide which products should be processed as single products and which ones should be merged with others into smaller or larger groups of products (merging products into groups is done by a given auxiliary merging strategy that cluster similar products into groups) with the aim to maximize the accuracy of the recommender system. Computational experiments on benchmark datasets confirmed the efficiency of the approach.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(maclachlan:2023:GECCO, author = {Jordan MacLachlan and Yi Mei and Fangfang Zhang and Mengjie Zhang and Jessica Signal}, title = {Learning Emergency Medical Dispatch Policies Via Genetic Programming}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {1409--1417}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {genetic algorithms, genetic programming, dynamic optimisation, hyper-heuristic, emergency medical dispatch, simulation}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590434}, size = {9 pages}, abstract = {Of great value to modern municipalities is the task of emergency medical response in the community. Resource allocation is vital to ensure minimal response times, which we may perform via human experts or automate by maximising ambulance coverage. To combat black-box modelling, we propose a modularised Genetic Programming Hyper Heuristic framework to learn the five key decisions of Emergency Medical Dispatch (EMD) within a reactive decision-making process. We minimise the representational distance between our work and reality by working with our local ambulance service to design a set of heuristics approximating their current decision-making processes and a set of synthetic datasets influenced by existing patterns in practice. Through our modularised framework, we learn each decision independently to identify those most valuable to EMD and learn all five decisions simultaneously, improving performance by 69\% on the largest novel dataset. We analyse the decision-making logic behind several learned rules to further improve our understanding of EMD. For example, we find that emergency urgency is not necessarily considered when dispatching idle ambulances in favour of maximising fleet availability.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(medina:2023:GECCO, author = {Alejandro Medina and Melanie Richey and Mark Mueller and Jacob Schrum}, title = {Evolving Flying Machines in Minecraft Using Quality Diversity}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {1418--1426}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {quality diversity, minecraft, MAP-elites}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590352}, size = {9 pages}, abstract = {Minecraft is a great testbed for human creativity that has inspired the design of various structures and even functioning machines, including flying machines. EvoCraft is an API for programmatically generating structures in Minecraft, but the initial work in this domain was not capable of evolving flying machines. This paper applies fitness-based evolution and quality diversity search in order to evolve flying machines. Although fitness alone can occasionally produce flying machines, thanks in part to a more sophisticated fitness function than was used previously, the quality diversity algorithm MAP-Elites is capable of discovering flying machines much more reliably, at least when an appropriate behavior characterization is used to guide the search for diverse solutions.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(mrazek:2023:GECCO, author = {Vojtech Mrazek and Soyiba Jawed and Muhammad Arif and Aamir Saeed Malik}, title = {Effective {EEG} Feature Selection for Interpretable {MDD} (Major Depressive Disorder) Classification}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {1427--1435}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {major depressive disorder, feature extraction, electroencephalogram (EEG)}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590398}, size = {9 pages}, abstract = {In this paper, we propose an interpretable electroencephalogram (EEG)-based solution for the diagnostics of major depressive disorder (MDD). The acquisition of EEG experimental data involved 32 MDD patients and 29 healthy controls. A feature matrix is constructed involving frequency decomposition of EEG data based on power spectrum density (PSD) using the Welch method. Those PSD features were selected, which were statistically significant. To improve interpretability, the best features are first selected from feature space via the non-dominated sorting genetic (NSGA-II) evolutionary algorithm. The best features are utilized for support vector machine (SVM), and k-nearest neighbors (k-NN) classifiers, and the results are then correlated with features to improve the interpretability. The results show that the features (gamma bands) extracted from the left temporal brain regions can distinguish MDD patients from control significantly. The proposed best solution by NSGA-II gives an average sensitivity of 93.3\%, specificity of 93.4\% and accuracy of 93.5\%. The complete framework is published as open-source at https://github.com/ehw-fit/eeg-mdd.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(neumann:2023:GECCO2, author = {Aneta Neumann and Sharlotte Gounder and Xiankun Yan and Gregory Sherman and Benjamin Campbell and Mingyu Guo and Frank Neumann}, title = {Diversity Optimization for the Detection and Concealment of Spatially Defined Communication Networks}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {1436--1444}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {minimum area spanning tree, low probability detection, quality diversity, evolutionary diversity optimization}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590405}, size = {9 pages}, abstract = {In recent years, computing diverse sets of high quality solutions for an optimization problem has become an important topic. The goal of computing diverse sets of high quality solutions is to provide a variety of options to decision makers, allowing them to choose the best solution for their particular problem. We consider the problem of constructing a wireless communication network for a given set of entities. Our goal is to minimize the area covered by the senders' transmissions while also avoiding adversaries that may observe the communication. We provide evolutionary diversity optimization (EDO) algorithms for this problem. We provide a formulation based on minimum spanning forests that are used as a representation and show how this formulation can be turned into a wireless communication network that avoids a given set of adversaries. We evaluate our EDO approach based on a number of benchmark instances and compare the diversity of the obtained populations in respect to the quality criterion of the given solutions as well as the chosen algorithm parameters. Our results demonstrate the effectiveness of our EDO approaches for the detection and concealment of communication networks both in terms of the quality and the diversity of the obtained solutions.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(panichella:2023:GECCO, author = {Annibale Panichella and Giuseppe {Di Domenico}}, title = {A Fast {Multi-Objective} Evolutionary Approach for Designing {Large-Scale} Optical Mode Sorter}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {1445--1453}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {mode sorter, linkage learning, many-objective optimization}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590479}, size = {9 pages}, abstract = {Spatial mode division de-multiplexing of optical signals has many real-world applications, such as quantum computing and both classical and quantum optical communication. In this context, it is crucial to develop devices able to efficiently sort optical signals according to the optical mode they belong to and route them on different paths. Depending on the mode selected, this problem can be very hard to tackle. Recently, researchers have proposed using multi-objective evolutionary algorithms (MOEAs) ---and NSGA-II in particular--- combined with Linkage Learning (LL) to automate the process of design mode sorter. However, given the very large-search scale of the problem, the existing evolutionary-based solutions have a very slow convergence rate. In this paper, we proposed a novel approach for mode sorter design that combines (1) stochastic linkage learning, (2) the adaptive geometry estimation-based MOEA (AGE-MOEA-II), and (3) an adaptive mutation operator. Our experiments with two- and three-objectives (beams) show that our approach is faster (better convergence rate) and produces better mode sorters (closer to the ideal solutions) than the state-of-the-art approach. A direct comparison with the vanilla NSGA-II and AGE-MOEA-II also further confirms the importance of adopting LL in this domain.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(schj\o{}lberg:2023:GECCO, author = {Magnus Eide Schj\o{}lberg and Nicklas Paus Bekkevold and Xavier S\'{a}nchez-D\'{\i}az and Ole Jakob Mengshoel}, title = {Comparing Metaheuristic Optimization Algorithms for Ambulance Allocation: An Experimental Simulation Study}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {1454--1463}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {stochastic local search, genetic algorithms, ambulance allocation, memetic algorithms, vehicle fleet management, response time, emergency medical service, optimization, simulation}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590345}, size = {10 pages}, abstract = {The optimization of Emergency Medical Services is a central issue in modern healthcare systems. With this in focus, we study a data set containing medical emergencies for the years 2015--2019 from Oslo and Akershus, Norway. By developing a discrete trace-based simulation model based on the data set, we compute average response times that are used to optimize ambulance allocations to stations in the region. We study several metaheuristics, specifically genetic, stochastic local search, and memetic algorithms. These metaheuristics are tested using the simulation to optimize ambulance allocations, considering response times. The algorithms are compared against each other and a set of baseline allocation models over different time periods. The main results of our experimental simulation study are that: (i) the metaheuristics generally outperform the simpler baselines, (ii) the best-performing metaheuristic is the genetic algorithm, and (iii) the performance difference between the metaheuristics and the simpler baselines increases in situations with high demand on ambulances. Finally, we present suggestions for future work that may help to further improve upon the current state-of-the-art.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(tonda:2023:GECCO, author = {Alberto Tonda and Isabelle Alvarez and Sophie Martin and Giovanni Squillero and Evelyne Lutton}, title = {Towards Evolutionary Control Laws for Viability Problems}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {1464--1472}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {genetic algorithms, genetic programming, viable feedback, machine learning, viability theory}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590415}, size = {9 pages}, abstract = {The mathematical theory of viability, developed to formalize problems related to natural and social phenomena, investigates the evolution of dynamical systems under constraints. A main objective of this theory is to design control laws to keep systems inside viable domains. Control laws are traditionally defined as rules, based on the current position in the state space with respect to the boundaries of the viability kernel. However, finding these boundaries is a computationally expensive procedure, feasible only for trivial systems. We propose an approach based on Genetic Programming (GP) to discover control laws for viability problems in analytic form. Such laws could keep a system viable without the need of computing its viability kernel, facilitate communication with stakeholders, and improve explainability. A candidate set of control rules is encoded as GP trees describing equations. Evaluation is noisy, due to stochastic sampling: initial conditions are randomly drawn from the state space of the problem, and for each, a system of differential equations describing the system is solved, creating a trajectory. Candidate control laws are rewarded for keeping viable as many trajectories as possible, for as long as possible. The proposed approach is evaluated on established benchmarks for viability and delivers promising results.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(whitley:2023:GECCO, author = {Darrell Whitley and Ozeas {Quevedo De Carvalho} and Mark Roberts and Vivint Shetty and Piyabutra Jampathom}, title = {Scheduling {Multi-Resource} Satellites Using Genetic Algorithms and Permutation Based Representations}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {1473--1481}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {order crossover, scheduling, steady state genetic algorithm}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590387}, size = {9 pages}, abstract = {The U.S. Navy currently deploys Genetic Algorithms to schedule multi-resource satellites. We document this real-world application and also introduce a new synthetic test problem generator. A permutation is used as the representation. A greedy scheduler then converts the permutation into a schedule which can be displayed as a Gantt chart. Surprisingly, there have been few careful comparisons of standard generational Genetic Algorithms and Steady State Genetic Algorithms for these types of problems. In addition, this paper compares different crossover operators for the multi-resource satellite scheduling problem. Finally, we look at two ways of mapping the permutation to a schedule in the form of a Gantt chart. One method gives priority to reducing conflicts, while the other gives priority to reducing overlaps of conflicting tasks. This can produce very different results, even when the evaluation function stays exactly the same.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(zambrano-gutierrez:2023:GECCO, author = {Daniel Zambrano-Gutierrez and Jorge Cruz-Duarte and Herman Casta\~{n}eda}, title = {Automatic {Hyper-Heuristic} to Generate {Heuristic-Based} Adaptive Sliding Mode Controller Tuners for {Buck-Boost} Converters}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {1482--1489}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {metaheuristics, control engineering, electronic and electrical engineering, parameter tuning and algorithm configuration, complex systems}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590510}, size = {8 pages}, abstract = {Metaheuristics are commonly used to solve complex and challenging problems, particularly in electrical system applications. Nevertheless, there is a colorful palette of metaheuristics to select from, which can be overwhelming for a practitioner with solid experience in controlling electrical systems. Still, it is well-known that empirical or analytical tuning of controllers is no trivial labor. This work implements a methodology to address the Metaheuristic Composition Optimization Problem to coin a heuristic-based technique that guides an initially random population of individuals to find the optimal set of parameters of an Adaptive Sliding Mode Controller in the search space. As a case study, we implement this methodology for controlling the dynamic response of a DC-DC Buck-Boost converter under different conditions, including nonlinear perturbations. The objective is clear: find the optimal heuristic-based solver to determine the control parameters satisfying a predefined performance. Our experimental results reveal the reliability and potential of the proposed methodology when finding suitable solutions for control system design applications. We also support our findings with statistical analyses performed on the results obtained by the tailored metaheuristic against other classical heuristic-based methods.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(applis:2023:GECCO, author = {Leonhard Applis and Annibale Panichella and Ruben Marang}, title = {Searching for Quality: Genetic Algorithms and Metamorphic Testing for Software Engineering {ML}}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {1490--1498}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590379}, size = {9 pages}, abstract = {More machine learning (ML) models are introduced to the field of Software Engineering (SE) and reached a stage of maturity to be considered for real-world use; But the real world is complex, and testing these models lacks often in explainability, feasibility and computational capacities. Existing research introduced meta-morphic testing to gain additional insights and certainty about the model, by applying semantic-preserving changes to input-data while observing model-output. As this is currently done at random places, it can lead to potentially unrealistic datapoints and high computational costs. With this work, we introduce genetic search as an aid for metamorphic testing in SE ML. Exploiting the delta in output as a fitness function, the evolutionary intelligence optimizes the transformations to produce higher deltas with less changes. We perform a case study minimizing F1 and MRR for Code2Vec on a representative sample from java-small with both genetic and random search. Our results show that within the same amount of time, genetic search was able to achieve a decrease of 10\% in F1 while random search produced 3\% drop.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(brizzio:2023:GECCO, author = {Mat\'{\i}as Brizzio and Maxime Cordy and Mike Papadakis and C\'{e}sar S\'{a}nchez and Nazareno Aguirre and Renzo Degiovanni}, title = {Automated Repair of Unrealisable {LTL} Specifications Guided by Model Counting}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {1499--1507}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {search-based software engineering, model counting, LTL-synthesis}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590454}, size = {9 pages}, abstract = {The reactive synthesis problem consists of automatically producing correct-by-construction operational models of systems from high-level formal specifications of their behaviours. However, specifications are often unrealisable, meaning that no system can be synthesised from the specification. To deal with this problem, we present AuRUS, a search-based approach to repair unrealisable Linear-Time Temporal Logic (LTL) specifications. AuRUS aims at generating solutions that are similar to the original specifications by using the notions of syntactic and semantic similarities. Intuitively, the syntactic similarity measures the text similarity between the specifications, while the semantic similarity measures the number of behaviours preserved/removed by the candidate repair. We propose a new heuristic based on model counting to approximate semantic similarity. We empirically assess AuRUS on many unrealisable specifications taken from different benchmarks and show that it can successfully repair all of them. Also, compared to related techniques, AuRUS can produce many unique solutions while showing more scalability.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(feldmeier:2023:GECCO, author = {Patric Feldmeier and Gordon Fraser}, title = {Learning by Viewing: Generating Test Inputs for Games by Integrating Human Gameplay Traces in Neuroevolution}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {1508--1517}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {game testing, supervised learning, neuroevolution}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590448}, size = {10 pages}, abstract = {Although automated test generation is common in many programming domains, games still challenge test generators due to their heavy randomisation and hard-to-reach program states. Neuroevolution combined with search-based software testing principles has been shown to be a promising approach for testing games, but the co-evolutionary search for optimal network topologies and weights involves unreasonably long search durations. In this paper, we aim to improve the evolutionary search for game input generators by integrating knowledge about human gameplay behaviour. To this end, we propose a novel way of systematically recording human gameplay traces, and integrating these traces into the evolutionary search for networks using traditional gradient descent as a mutation operator. Experiments conducted on eight diverse Scratch games demonstrate that the proposed approach reduces the required search time from five hours down to only 52 minutes.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(gereziher:2023:GECCO, author = {Teklit Gereziher and Selam Gebrekrstos and Gregory Gay}, title = {{Search-Based} Test Generation Targeting {Non-Functional} Quality Attributes of Android Apps}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {1518--1526}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {automated test generation, quality attributes, software quality, search-based software engineering, search-based test generation}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590449}, size = {9 pages}, abstract = {Mobile apps form a major proportion of the software marketplace and it is crucial to ensure that they meet both functional and nonfunctional quality thresholds. Automated test input generation can reduce the cost of the testing process. However, existing Android test generation approaches are focused on code coverage and cannot be customized to a tester's diverse goals---in particular, quality attributes such as resource use.We propose a flexible multi-objective search-based test generation framework for interface testing of Android apps---STGFA-SMOG. This framework allows testers to target a variety of fitness functions, corresponding to different software quality attributes, code coverage, and other test case properties. We find that STGFA-SMOG outperforms random test generation in exposing potential quality issues and triggering crashes. Our study also offers insights on how different combinations of fitness functions can affect test generation for Android apps.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(li-calsi:2023:GECCO, author = {Davide {Li Calsi} and Matias Duran and Thomas Laurent and Xiao-Yi Zhang and Paolo Arcaini and Fuyuki Ishikawa}, title = {Adaptive {Search-Based} Repair of Deep Neural Networks}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {1527--1536}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {fault localisation, DNN repair, search-based software engineering, deep neural networks}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590477}, size = {10 pages}, abstract = {Deep Neural Networks (DNNs) are finding a place at the heart of more and more critical systems, and it is necessary to ensure they perform in as correct a way as possible. Search-based repair methods, that search for new values for target neuron weights in the network to better process fault-inducing inputs, have shown promising results. These methods rely on fault localisation to determine what weights the search should target. However, as the search progresses and the network evolves, the weights responsible for the faults in the system will change, and the search will lose in effectiveness. In this work, we propose an adaptive search method for DNN repair that adaptively updates the target weights during the search by performing fault localisation on the current state of the model. We propose and implement two methods to decide when to update the target weights, based on the progress of the search's fitness value or on the evolution of fault localisation results. We apply our technique to two image classification DNN architectures against a dataset of autonomous driving images, and compare it with a state-of-the art search-based DNN repair approach.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(baguley:2023:GECCO, author = {Samuel Baguley and Tobias Friedrich and Aneta Neumann and Frank Neumann and Marcus Pappik and Ziena Zeif}, title = {Fixed Parameter {Multi-Objective} Evolutionary Algorithms for the {W-Separator} Problem}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {1537--1545}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {runtime analysis, evolutionary algorithms, parameterized complexity}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590501}, size = {9 pages}, abstract = {Parameterized analysis provides powerful mechanisms for obtaining fine-grained insights into different types of algorithms. In this work, we combine this field with evolutionary algorithms and provide parameterized complexity analysis of evolutionary multiobjective algorithms for the W-separator problem, which is a natural generalization of the vertex cover problem. The goal is to remove the minimum number of vertices such that each connected component in the resulting graph has at most W vertices. We provide different multi-objective formulations involving two or three objectives that provably lead to fixed-parameter evolutionary algorithms with respect to the value of an optimal solution OPT and W. Of particular interest are kernelizations and the reducible structures used for them. We show that in expectation the algorithms make incremental progress in finding such structures and beyond. The current best known kernelization of the W-separator uses linear programming methods and requires a non-trivial post-process to extract the reducible structures. We provide additional structural features to show that evolutionary algorithms with appropriate objectives are also capable of extracting them. Our results show that evolutionary algorithms with different objectives guide the search and admit fixed parameterized runtimes to solve or approximate (even arbitrarily close) the W-separator problem.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(bossek:2023:GECCO2, author = {Jakob Bossek and Dirk Sudholt}, title = {Runtime Analysis of Quality Diversity Algorithms}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {1546--1554}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {quality diversity, runtime analysis}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590383}, size = {9 pages}, abstract = {Quality diversity (QD) is a branch of evolutionary computation that gained increasing interest in recent years. The Map-Elites QD approach defines a feature space, i.e., a partition of the search space, and stores the best solution for each cell of this space. We study a simple QD algorithm in the context of pseudo-Boolean optimisation on the "number of ones" feature space, where the ith cell stores the best solution amongst those with a number of ones in [(i - 1)k, ik - 1]. Here k is a granularity parameter 1 ≤ k ≤ n+1. We give a tight bound on the expected time until all cells are covered for arbitrary fitness functions and for all k and analyse the expected optimisation time of QD on OneMax and other problems whose structure aligns favourably with the feature space. On combinatorial problems we show that QD finds a (1 - 1/e)-approximation when maximising any monotone sub-modular function with a single uniform cardinality constraint efficiently. Defining the feature space as the number of connected components of a connected graph, we show that QD finds a minimum spanning tree in expected polynomial time.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(doerr:2023:GECCO3, author = {Benjamin Doerr and Andrew James Kelley}, title = {Fourier Analysis Meets Runtime Analysis: Precise Runtimes on Plateaus}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {1555--1564}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {runtime analysis, discrete fourier analysis, plateaus, mutation}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590393}, size = {10 pages}, abstract = {We propose a new method based on discrete Fourier analysis to analyze the time evolutionary algorithms spend on plateaus. This immediately gives a concise proof of the classic estimate of the expected runtime of the (1 + 1) evolutionary algorithm on the Needle problem due to Garnier, Kallel, and Schoenauer (1999).We also use this method to analyze the runtime of the (1 + 1) evolutionary algorithm on a benchmark consisting of n/ℓ plateaus of effective size 2ℓ - 1 which have to be optimized sequentially in a LeadingOnes fashion.Using our new method, we determine the precise expected runtime both for static and fitness-dependent mutation rates. We also determine the asymptotically optimal static and fitness-dependent mutation rates. For ℓ = o(n), the optimal static mutation rate is approximately 1.59/n. The optimal fitness dependent mutation rate, when the first k fitness-relevant bits have been found, is asymptotically 1/(k + 1). These results, so far only proven for the single-instance problem LeadingOnes, thus hold for a much broader class of problems. We expect similar extensions to be true for other important results on LeadingOnes. We are also optimistic that our Fourier analysis approach can be applied to other plateau problems as well.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(doerr:2023:GECCO4, author = {Carola Doerr and Duri Andrea Janett and Johannes Lengler}, title = {Tight Runtime Bounds for Static Unary Unbiased Evolutionary Algorithms on Linear Functions}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {1565--1574}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {mutation operators, theory of evolutionary computation, runtime analysis}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590482}, size = {10 pages}, abstract = {In a seminal paper in 2013, Witt showed that the (1+1) Evolutionary Algorithm with standard bit mutation needs time (1 + o (1))n ln n/p1 to find the optimum of any linear function, as long as the probability p1 to flip exactly one bit is Θ(1). In this paper we investigate how this result generalizes if standard bit mutation is replaced by an arbitrary unbiased mutation operator. This situation is notably different, since the stochastic domination argument used for the lower bound by Witt no longer holds. In particular, starting closer to the optimum is not necessarily an advantage, and OneMax is no longer the easiest function for arbitrary starting positions.Nevertheless, we show that Witt's result carries over if p1 is not too small, with different constraints for upper and lower bounds, and if the number of flipped bits has bounded expectation χ. Notably, this includes some of the heavy-tail mutation operators used in fast genetic algorithms, but not all of them. We also give examples showing that algorithms with unbounded χ have qualitatively different trajectories close to the optimum.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(dolson:2023:GECCO, author = {Emily Dolson}, title = {Calculating Lexicase Selection Probabilities is {NP-Hard}}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {1575--1583}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {lexicase selection, computational complexity theory}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590356}, size = {9 pages}, abstract = {Lexicase selection is a state-of-the-art parent selection algorithm in evolutionary computation. Recently, there have been efforts to develop stronger theoretical analyses of lexicase selection. Many of these analysis hinge on calculating the probability of an individual solution being selected under lexicase selection. Discovering a fast way to perform this calculation would be very helpful to the development of theory regarding lexicase selection, and would also have implications for efforts to develop practical improvements to lexicase selection. Here, I prove that this problem of calculating selection probabilities under lexicase selection, which I name lex-prob, is NP-Hard. I achieve this proof by reducing SAT, a well-known NP-Hard problem, to lex-prob in polynomial time. This reduction involves an intermediate step in which a popular variant of lexicase selection, ∊-lexicase selection, is reduced to standard lexicase selection. This result also has deeper theoretical implications about the relationship between ∊-lexicase selection and lexicase selection and the relationship between lex-prob and other NP-Hard problems. Finally, I present a highly-optimized brute-force algorithm (and open-source implementation) for performing these calculations. While the worst-case time complexity of this solution is, of course, exponential it is capable of solving realistically-sized instances of the problem in approximately 15 seconds.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(friedrich:2023:GECCO, author = {Tobias Friedrich and Timo K\"{o}tzing and Aneta Neumann and Frank Neumann and Aishwarya Radhakrishnan}, title = {Analysis of (1+1) {EA} on {LeadingOnes} with Constraints}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {1584--1592}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {Evolutionary algorithms, run time analysis, chance constraint optimization, theory}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590453}, size = {9 pages}, abstract = {Understanding how evolutionary algorithms perform on constrained problems has gained increasing attention in recent years. In this paper, we study how evolutionary algorithms optimize constrained versions of the classical LeadingOnes problem. We first provide a run time analysis for the classical (1+1) EA on the LeadingOnes problem with a deterministic cardinality constraint, giving Θ(n(n - B) log(B) + n2) as the tight bound. Our results show that the behaviour of the algorithm is highly dependent on the constraint bound of the uniform constraint. Afterwards, we consider the problem in the context of stochastic constraints and provide insights using experimental studies on how the (μ+1) EA is able to deal with these constraints in a sampling-based setting.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(hevia-fajardo:2023:GECCO, author = {Mario Alejandro {Hevia Fajardo} and Per Kristian Lehre}, title = {How Fitness Aggregation Methods Affect the Performance of Competitive {CoEAs} on Bilinear Problems}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {1593--1601}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {runtime analysis, competitive coevolution, maximin optimisation}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590506}, size = {9 pages}, abstract = {Competitive co-evolutionary algorithms (CoEAs) do not rely solely on an external function to assign fitness values to sampled solutions. Instead, they use the aggregation of outcomes from interactions between competing solutions allowing to rank solutions and make selection decisions. This makes CoEAs a useful tool for optimisation problems that have intrinsically interactive domains.Over the past decades, many ways to aggregate the outcomes of interactions have been considered. At the moment, it is unclear which of these is the best choice. Previous research is fragmented and most of the fitness aggregation methods (fitness measures) proposed have only been studied empirically.We argue that a proper understanding of the dynamics of CoEAs and their fitness measures can only be achieved through rigorous analysis of their behaviour. In this work we make a step towards this goal by using runtime analysis to study two commonly used fitness measures. We show a dichotomy in the behaviour of a (1, Λ) CoEA when optimising a Bilinear problem. The algorithm finds a Nash equilibrium efficiently if the worst interaction is used as a fitness measure but it takes exponential time w.o.p. if the average of all interactions is used instead.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(jorritsma:2023:GECCO, author = {Joost Jorritsma and Johannes Lengler and Dirk Sudholt}, title = {Comma Selection Outperforms Plus Selection on {OneMax} with Randomly Planted Optima}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {1602--1610}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590488}, size = {9 pages}, abstract = {It is an ongoing debate whether and how comma selection in evolutionary algorithms helps to escape local optima. We propose a new benchmark function to investigate the benefits of comma selection: OneMax with randomly planted local optima, generated by frozen noise. We show that comma selection (the (1, Λ) EA) is faster than plus selection (the (1 + Λ) EA) on this benchmark, in a fixed-target scenario, and for offspring population sizes Λ for which both algorithms behave differently. For certain parameters, the (1, Λ) EA finds the target in Θ(n ln n) evaluations, with high probability (w.h.p.), while the (1 + Λ) EA w.h.p. requires almost Θ((n ln n)2) evaluations.We further show that the advantage of comma selection is not arbitrarily large: w.h.p. comma selection outperforms plus selection at most by a factor of O(n ln n) for most reasonable parameter choices. We develop novel methods for analysing frozen noise and give powerful and general fixed-target results with tail bounds that are of independent interest.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(lehre:2023:GECCO2, author = {Per Kristian Lehre and Andrew M. Sutton}, title = {Runtime Analysis with Variable Cost}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {1611--1618}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {variable cost model, runtime analysis, adaptive strategies}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590432}, size = {8 pages}, abstract = {The usual approach in runtime analysis is to derive estimates on the number of fitness function evaluations required by a method until a suitable element of the search space is found. One justification for this is that in real applications, fitness evaluation often contributes the most computational effort. A tacit assumption in this approach is that this effort is uniform and static across the search space. However, this assumption often does not hold in practice: some candidates may be far more expensive to evaluate than others. This might occur, for example, when fitness evaluation requires running a simulation or training a machine learning model.Despite the availability of a wide range of benchmark functions coupled with various runtime performance guarantees, the runtime analysis community currently lacks a solid perspective of handling variable fitness cost. Our goal with this paper is to argue for incorporating this perspective into our theoretical toolbox. We introduce two models of handling variable cost: a simple non-adaptive model together with a more general adaptive model. We prove cost bounds in these scenarios and discuss the implications for taking into account costly regions in the search space.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(lehre:2023:GECCO3, author = {Per Kristian Lehre and Xiaoyu Qin}, title = {{Self-Adaptation} Can Help Evolutionary Algorithms Track Dynamic Optima}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {1619--1627}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {evolutionary algorithms, dynamic optimisation, self-adaptation}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590494}, size = {9 pages}, abstract = {Real-world optimisation problems often involve dynamics, where objective functions may change over time. Previous studies have shown that evolutionary algorithms (EAs) can solve dynamic optimisation problems. Additionally, the use of diversity mechanisms, populations, and parallelisation can enhance the performance of EAs in dynamic environments if appropriate parameter settings are utilised. Self-adaptation, which encodes parameters in genotypes of individuals and allows them to evolve together with solutions, can help configure parameters of EAs. This parameter control mechanism has been proved to effectively handle a static problem with unknown structure. However, the benefit of self-adaptation on dynamic optimisation problems remains unknown. We consider a tracking dynamic optima problem, the so-called Dynamic Substring Matching (DSM) problem, which requires algorithms to successively track a sequence of structure-changing optima. Our analyses show that mutation-based EAs with a fixed mutation rate have a negligible chance of tracking these dynamic optima, while the self-adaptive EA tracks them with an overwhelmingly high probability. Furthermore, we provide a level-based theorem with tail bounds, which bounds the chance of the algorithm finding the current optima within a given evaluation budget. Overall, self-adaptation is promising for tracking dynamic optima.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, ) @inproceedings(lengler:2023:GECCO, author = {Johannes Lengler and Andre Opris and Dirk Sudholt}, title = {Analysing Equilibrium States for Population Diversity}, booktitle = {Proceedings of the 2023 Genetic and Evolutionary Computation Conference}, year = {2023}, editor = {Sara Silva and Luis Paquete and Leonardo Vanneschi and Nuno Lourenco and Ales Zamuda and Ahmed Kheiri and Arnaud Liefooghe and Bing Xue and Ying Bi and Nelishia Pillay and Irene Moser and Arthur Guijt and Jessica Catarino and Pablo Garcia-Sanchez and Leonardo Trujillo and Carla Silva and Nadarajen Veerapen}, pages = {1628--1636}, address = {Lisbon, Portugal}, series = {GECCO '23}, month = {15-19 July}, organisation = {SIGEVO}, publisher = {Association for Computing Machinery}, publisher_address = {New York, NY, USA}, note = {}, keywords = {diversity, runtime analysis, population dynamics}, ISBN13 = {9798400701191}, url = {}, doi = {doi:10.1145/3583131.3590465}, size = {9 pages}, abstract = {Population diversity is crucial in evolutionary algorithms as it helps with global exploration and facilitates the use of crossover. Despite many runtime analyses showing advantages of population diversity, we have no clear picture of how diversity evolves over time.We study how population diversity of (μ + 1) algorithms, measured by the sum of pairwise Hamming distances, evolves in a fitness-neutral environment. We give an exact formula for the drift of population diversity and show that it is driven towards an equilibrium state. Moreover, we bound the expected time for getting close to the equilibrium state. We find that these dynamics, including the location of the equilibrium, are unaffected by surprisingly many algorithmic choices. All unbiased mutation operators with the same expected number of bit flips have the same effect on the expected diversity. Many crossover operators have no effect at all, including all binary unbiased, respectful operators. We review crossover operators from the literature and identify crossovers that are neutral towards the evolution of diversity and crossovers that are not.}, notes = {GECCO-2023 A Recombination of the 32nd International Conference on Genetic Algorithms (ICGA) and the 28th Annual Genetic Programming Conference (GP)}, )