@article{830b141725ca4350bb38e7f13014d1f1,
title = "First Steps Towards a Runtime Analysis When Starting With a Good Solution",
abstract = "The mathematical runtime analysis of evolutionary algorithms traditionally regards the time an algorithm needs to find a solution of a certain quality when initialized with a random population. In practical applications it may be possible to guess solutions that are better than random ones. We start a mathematical runtime analysis for such situations. We observe that different algorithms profit to a very different degree from a better initialization. We also show that the optimal parameterization of an algorithm can depend strongly on the quality of the initial solutions. To overcome this difficulty, self-adjusting and randomized heavy-tailed parameter choices can be profitable. Finally, we observe a larger gap between the performance of the best evolutionary algorithm we found and the corresponding black-box complexity. This could suggest that evolutionary algorithms better exploiting good initial solutions are still to be found. These first findings stem from analyzing the performance of the \((1+1)\) evolutionary algorithm and the static, self-adjusting, and heavy-tailed \((1+(\lambda,\lambda))\) genetic algorithms on the OneMax benchmark. We are optimistic that the question of how to profit from good initial solutions is interesting beyond these first examples.",
author = "Denis Antipov and Maxim Buzdalov and Benjamin Doerr",
year = "2024",
month = jul,
day = "1",
doi = "10.1145/3675783",
language = "English",
journal = "ACM Transactions on Evolutionary Learning and Optimization",
issn = "2688-3007",
publisher = "Association for Computing Machinery",
}