chapter written, new 1d plots, new imgs
This commit is contained in:
parent
cc50202cce
commit
294cb4c501
279
arbeit/bibma.bib
279
arbeit/bibma.bib
@ -1,128 +1,175 @@
|
|||||||
@article{anrichterEvol,
|
% This file was created with JabRef 2.10.
|
||||||
author = "Richter, Andreas and Achenbach, Jascha and Menzel, Stefan and Botsch, Mario",
|
% Encoding: UTF-8
|
||||||
booktitle = "IEEE Congress on Evolutionary Computation",
|
|
||||||
location = "Vancouver, Canada",
|
|
||||||
publisher = "IEEE",
|
@Article{back1993overview,
|
||||||
title = "Evolvability as a Quality Criterion for Linear Deformation Representations in Evolutionary Optimization",
|
Title = {An overview of evolutionary algorithms for parameter optimization},
|
||||||
year = "2016",
|
Author = {B{\"a}ck, Thomas and Schwefel, Hans-Paul},
|
||||||
note={\url{http://graphics.uni-bielefeld.de/publications/cec16.pdf}, \url{https://pub.uni-bielefeld.de/publication/2902698}},
|
Journal = {Evolutionary computation},
|
||||||
|
Year = {1993},
|
||||||
|
Number = {1},
|
||||||
|
Pages = {1--23},
|
||||||
|
Volume = {1},
|
||||||
|
|
||||||
|
Publisher = {MIT Press},
|
||||||
|
Url = {https://www.researchgate.net/profile/Hans-Paul_Schwefel/publication/220375001_An_Overview_of_Evolutionary_Algorithms_for_Parameter_Optimization/links/543663d00cf2dc341db30452.pdf}
|
||||||
}
|
}
|
||||||
@article{spitzmuller1996bezier,
|
|
||||||
title="Partial derivatives of Bèzier surfaces",
|
@Book{golub2012matrix,
|
||||||
author="Spitzmüller, Klaus",
|
Title = {Matrix computations},
|
||||||
journal="Computer-Aided Design",
|
Author = {Golub, Gene H and Van Loan, Charles F},
|
||||||
volume="28",
|
Publisher = {JHU Press},
|
||||||
number="1",
|
Year = {2012},
|
||||||
pages="67--72",
|
Volume = {3}
|
||||||
year="1996",
|
|
||||||
publisher="Elsevier",
|
|
||||||
url={https://doi.org/10.1016/0010-4485(95)00044-5},
|
|
||||||
}
|
}
|
||||||
@article{hsu1991dmffd,
|
|
||||||
title={A direct manipulation interface to free-form deformations},
|
@Article{hsu1991dmffd,
|
||||||
author={Hsu, William M},
|
Title = {A direct manipulation interface to free-form deformations},
|
||||||
journal={Master's thesis, Brown University},
|
Author = {Hsu, William M},
|
||||||
year={1991},
|
Journal = {Master's thesis, Brown University},
|
||||||
url={https://cs.brown.edu/research/pubs/theses/masters/1991/hsu.pdf},
|
Year = {1991},
|
||||||
|
|
||||||
|
Url = {https://cs.brown.edu/research/pubs/theses/masters/1991/hsu.pdf}
|
||||||
}
|
}
|
||||||
@article{hsu1992direct,
|
|
||||||
title={Direct Manipulation of Free-Form Deformations},
|
@Article{hsu1992direct,
|
||||||
author={Hsu, William M and Hughes, John F and Kaufman, Henry},
|
Title = {Direct Manipulation of Free-Form Deformations},
|
||||||
journal={Computer Graphics},
|
Author = {Hsu, William M and Hughes, John F and Kaufman, Henry},
|
||||||
volume={26},
|
Journal = {Computer Graphics},
|
||||||
pages={2},
|
Year = {1992},
|
||||||
year={1992},
|
Pages = {2},
|
||||||
url={http://graphics.cs.brown.edu/~jfh/papers/Hsu-DMO-1992/paper.pdf},
|
Volume = {26},
|
||||||
|
|
||||||
|
Url = {http://graphics.cs.brown.edu/~jfh/papers/Hsu-DMO-1992/paper.pdf}
|
||||||
}
|
}
|
||||||
@inproceedings{Menzel2006,
|
|
||||||
author = {Menzel, Stefan and Olhofer, Markus and Sendhoff, Bernhard},
|
@Article{gaussNewton,
|
||||||
title = {Direct Manipulation of Free Form Deformation in Evolutionary Design Optimisation},
|
Title = {An Algorithm for Least-Squares Estimation of Nonlinear Parameters},
|
||||||
booktitle = {Proceedings of the 9th International Conference on Parallel Problem Solving from Nature},
|
Author = {Donald W. Marquardt},
|
||||||
series = {PPSN'06},
|
Journal = {Journal of the Society for Industrial and Applied Mathematics},
|
||||||
year = {2006},
|
Year = {1963},
|
||||||
isbn = {3-540-38990-3, 978-3-540-38990-3},
|
Number = {2},
|
||||||
location = {Reykjavik, Iceland},
|
Pages = {431-441},
|
||||||
pages = {352--361},
|
Volume = {11},
|
||||||
numpages = {10},
|
|
||||||
url = {http://dx.doi.org/10.1007/11844297_36},
|
Doi = {10.1137/0111030},
|
||||||
doi = {10.1007/11844297_36},
|
Eprint = {https://doi.org/10.1137/0111030},
|
||||||
acmid = {2079770},
|
Url = {https://doi.org/10.1137/0111030}
|
||||||
publisher = {Springer-Verlag},
|
|
||||||
address = {Berlin, Heidelberg},
|
|
||||||
}
|
|
||||||
@book{golub2012matrix,
|
|
||||||
title={Matrix computations},
|
|
||||||
author={Golub, Gene H and Van Loan, Charles F},
|
|
||||||
volume={3},
|
|
||||||
year={2012},
|
|
||||||
publisher={JHU Press}
|
|
||||||
}
|
}
|
||||||
@article{weise2012evolutionary,
|
|
||||||
title={Evolutionary Optimization: Pitfalls and Booby Traps},
|
@InProceedings{Menzel2006,
|
||||||
author={Weise, Thomas and Chiong, Raymond and Tang, Ke},
|
Title = {Direct Manipulation of Free Form Deformation in Evolutionary Design Optimisation},
|
||||||
journal={J. Comput. Sci. \& Technol},
|
Author = {Menzel, Stefan and Olhofer, Markus and Sendhoff, Bernhard},
|
||||||
volume={27},
|
Booktitle = {Proceedings of the 9th International Conference on Parallel Problem Solving from Nature},
|
||||||
number={5},
|
Year = {2006},
|
||||||
year={2012},
|
|
||||||
url={http://jcst.ict.ac.cn:8080/jcst/EN/article/downloadArticleFile.do?attachType=PDF\&id=9543}
|
Address = {Berlin, Heidelberg},
|
||||||
|
Pages = {352--361},
|
||||||
|
Publisher = {Springer-Verlag},
|
||||||
|
Series = {PPSN'06},
|
||||||
|
|
||||||
|
Acmid = {2079770},
|
||||||
|
Doi = {10.1007/11844297_36},
|
||||||
|
ISBN = {3-540-38990-3, 978-3-540-38990-3},
|
||||||
|
Location = {Reykjavik, Iceland},
|
||||||
|
Numpages = {10},
|
||||||
|
Url = {http://dx.doi.org/10.1007/11844297_36}
|
||||||
}
|
}
|
||||||
@inproceedings{thorhauer2014locality,
|
|
||||||
title={On the locality of standard search operators in grammatical evolution},
|
@Article{minai2006complex,
|
||||||
author={Thorhauer, Ann and Rothlauf, Franz},
|
Title = {Complex engineered systems: A new paradigm},
|
||||||
booktitle={International Conference on Parallel Problem Solving from Nature},
|
Author = {Minai, Ali A and Braha, Dan and Bar-Yam, Yaneer},
|
||||||
pages={465--475},
|
Journal = {Complex engineered systems: Science meets technology},
|
||||||
year={2014},
|
Year = {2006},
|
||||||
organization={Springer},
|
Pages = {1--21},
|
||||||
url={https://www.lri.fr/~hansen/proceedings/2014/PPSN/papers/8672/86720465.pdf}
|
|
||||||
|
Publisher = {Springer},
|
||||||
|
Url = {https://www.researchgate.net/profile/Yaneer_Bar-Yam/publication/225104044_Complex_Engineered_Systems_A_New_Paradigm/links/59107f20a6fdccbfd57eb84d/Complex-Engineered-Systems-A-New-Paradigm.pdf}
|
||||||
}
|
}
|
||||||
@article{gaussNewton,
|
|
||||||
author = {Donald W. Marquardt},
|
@Article{anrichterEvol,
|
||||||
title = {An Algorithm for Least-Squares Estimation of Nonlinear Parameters},
|
Title = {Evolvability as a Quality Criterion for Linear Deformation Representations in Evolutionary Optimization},
|
||||||
journal = {Journal of the Society for Industrial and Applied Mathematics},
|
Author = {Richter, Andreas and Achenbach, Jascha and Menzel, Stefan and Botsch, Mario},
|
||||||
volume = {11},
|
Year = {2016},
|
||||||
number = {2},
|
Note = {\url{http://graphics.uni-bielefeld.de/publications/cec16.pdf}, \url{https://pub.uni-bielefeld.de/publication/2902698}},
|
||||||
pages = {431-441},
|
|
||||||
year = {1963},
|
Booktitle = {IEEE Congress on Evolutionary Computation},
|
||||||
doi = {10.1137/0111030},
|
Location = {Vancouver, Canada},
|
||||||
URL = {https://doi.org/10.1137/0111030},
|
Publisher = {IEEE}
|
||||||
eprint = {https://doi.org/10.1137/0111030}
|
|
||||||
}
|
}
|
||||||
@article{minai2006complex,
|
|
||||||
title={Complex engineered systems: A new paradigm},
|
@InProceedings{richter2015evolvability,
|
||||||
author={Minai, Ali A and Braha, Dan and Bar-Yam, Yaneer},
|
Title = {Evolvability of representations in complex system engineering: a survey},
|
||||||
journal={Complex engineered systems: Science meets technology},
|
Author = {Richter, Andreas and Botsch, Mario and Menzel, Stefan},
|
||||||
pages={1--21},
|
Booktitle = {Evolutionary Computation (CEC), 2015 IEEE Congress on},
|
||||||
year={2006},
|
Year = {2015},
|
||||||
publisher={Springer},
|
Organization = {IEEE},
|
||||||
url={https://www.researchgate.net/profile/Yaneer_Bar-Yam/publication/225104044_Complex_Engineered_Systems_A_New_Paradigm/links/59107f20a6fdccbfd57eb84d/Complex-Engineered-Systems-A-New-Paradigm.pdf}
|
Pages = {1327--1335},
|
||||||
|
|
||||||
|
Url = {http://www.graphics.uni-bielefeld.de/publications/cec15.pdf}
|
||||||
}
|
}
|
||||||
@article{wagner1996complex,
|
|
||||||
title={Complex adaptations and the evolution of evolvability},
|
@InBook{Rothlauf2006,
|
||||||
author={Wagner, Gunter P and Altenberg, Lee},
|
Title = {Representations for Genetic and Evolutionary Algorithms},
|
||||||
journal={Evolution},
|
Author = {Rothlauf, Franz},
|
||||||
volume={50},
|
Chapter = {2},
|
||||||
number={3},
|
Pages = {9--32},
|
||||||
pages={967--976},
|
Publisher = {Springer Berlin Heidelberg},
|
||||||
year={1996},
|
Year = {2006},
|
||||||
url={http://arep.med.harvard.edu/pdf/Wagner96.pdf},
|
|
||||||
|
Address = {Berlin, Heidelberg},
|
||||||
|
|
||||||
|
Abstract = {In this second chapter, we present an introduction into the field of representations for genetic and evolutionary algorithms. The chapter provides the basis and definitions which are essential for understanding the content of this work.},
|
||||||
|
Booktitle = {Representations for Genetic and Evolutionary Algorithms},
|
||||||
|
Doi = {10.1007/3-540-32444-5_2},
|
||||||
|
ISBN = {978-3-540-32444-7},
|
||||||
|
Url = {https://doi.org/10.1007/3-540-32444-5_2}
|
||||||
}
|
}
|
||||||
@inproceedings{richter2015evolvability,
|
|
||||||
title={Evolvability of representations in complex system engineering: a survey},
|
@Article{spitzmuller1996bezier,
|
||||||
author={Richter, Andreas and Botsch, Mario and Menzel, Stefan},
|
Title = {Partial derivatives of Bèzier surfaces},
|
||||||
booktitle={Evolutionary Computation (CEC), 2015 IEEE Congress on},
|
Author = {Spitzmüller, Klaus},
|
||||||
pages={1327--1335},
|
Journal = {Computer-Aided Design},
|
||||||
year={2015},
|
Year = {1996},
|
||||||
organization={IEEE},
|
Number = {1},
|
||||||
url={http://www.graphics.uni-bielefeld.de/publications/cec15.pdf}
|
Pages = {67--72},
|
||||||
|
Volume = {28},
|
||||||
|
|
||||||
|
Publisher = {Elsevier},
|
||||||
|
Url = {https://doi.org/10.1016/0010-4485(95)00044-5}
|
||||||
}
|
}
|
||||||
@article{back1993overview,
|
|
||||||
title={An overview of evolutionary algorithms for parameter optimization},
|
@InProceedings{thorhauer2014locality,
|
||||||
author={B{\"a}ck, Thomas and Schwefel, Hans-Paul},
|
Title = {On the locality of standard search operators in grammatical evolution},
|
||||||
journal={Evolutionary computation},
|
Author = {Thorhauer, Ann and Rothlauf, Franz},
|
||||||
volume={1},
|
Booktitle = {International Conference on Parallel Problem Solving from Nature},
|
||||||
number={1},
|
Year = {2014},
|
||||||
pages={1--23},
|
Organization = {Springer},
|
||||||
year={1993},
|
Pages = {465--475},
|
||||||
publisher={MIT Press},
|
|
||||||
url={https://www.researchgate.net/profile/Hans-Paul_Schwefel/publication/220375001_An_Overview_of_Evolutionary_Algorithms_for_Parameter_Optimization/links/543663d00cf2dc341db30452.pdf}
|
Url = {https://www.lri.fr/~hansen/proceedings/2014/PPSN/papers/8672/86720465.pdf}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Article{wagner1996complex,
|
||||||
|
Title = {Complex adaptations and the evolution of evolvability},
|
||||||
|
Author = {Wagner, Gunter P and Altenberg, Lee},
|
||||||
|
Journal = {Evolution},
|
||||||
|
Year = {1996},
|
||||||
|
Number = {3},
|
||||||
|
Pages = {967--976},
|
||||||
|
Volume = {50},
|
||||||
|
|
||||||
|
Url = {http://arep.med.harvard.edu/pdf/Wagner96.pdf}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Article{weise2012evolutionary,
|
||||||
|
Title = {Evolutionary Optimization: Pitfalls and Booby Traps},
|
||||||
|
Author = {Weise, Thomas and Chiong, Raymond and Tang, Ke},
|
||||||
|
Journal = {J. Comput. Sci. \& Technol},
|
||||||
|
Year = {2012},
|
||||||
|
Number = {5},
|
||||||
|
Volume = {27},
|
||||||
|
|
||||||
|
Url = {http://jcst.ict.ac.cn:8080/jcst/EN/article/downloadArticleFile.do?attachType=PDF\&id=9543}
|
||||||
|
}
|
||||||
|
|
||||||
|
BIN
arbeit/img/weise_fig3.png
Normal file
BIN
arbeit/img/weise_fig3.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 183 KiB |
68
arbeit/ma.md
68
arbeit/ma.md
@ -44,7 +44,9 @@ etc.), the translation of the problem--domain into a simple parametric
|
|||||||
representation can be challenging.
|
representation can be challenging.
|
||||||
|
|
||||||
The quality of such a representation in biological evolution is called
|
The quality of such a representation in biological evolution is called
|
||||||
*evolvability*\cite{wagner1996complex} and is at the core of this thesis.
|
*evolvability*\cite{wagner1996complex} and is at the core of this thesis, as the
|
||||||
|
parametrization of the problem has serious implications on the convergence speed
|
||||||
|
and the quality of the solution\cite{Rothlauf2006}.
|
||||||
However, there is no consensus on how *evolvability* is defined and the meaning
|
However, there is no consensus on how *evolvability* is defined and the meaning
|
||||||
varies from context to context\cite{richter2015evolvability}.
|
varies from context to context\cite{richter2015evolvability}.
|
||||||
|
|
||||||
@ -196,59 +198,72 @@ however, is very generic and we introduce it here in a broader sense.
|
|||||||
\end{algorithm}
|
\end{algorithm}
|
||||||
|
|
||||||
The general shape of an evolutional algorithm (adapted from
|
The general shape of an evolutional algorithm (adapted from
|
||||||
\cite{back1993overview}} is outlined in Algorithm \ref{alg:evo}. Here, $P(t)$
|
\cite{back1993overview}) is outlined in Algorithm \ref{alg:evo}. Here, $P(t)$
|
||||||
denotes the population of parameters in step $t$ of the algorithm. The
|
denotes the population of parameters in step $t$ of the algorithm. The
|
||||||
population contains $\mu$ individuals $a_i$ that fit the shape of the parameters
|
population contains $\mu$ individuals $a_i$ that fit the shape of the parameters
|
||||||
we are looking for. Typically these are initialized by a random guess or just
|
we are looking for. Typically these are initialized by a random guess or just
|
||||||
zero. Further on we need a so-called *fitness-function* $\Phi : I \mapsto M$ that can take
|
zero. Further on we need a so--called *fitness--function* $\Phi : I \mapsto M$ that can take
|
||||||
each parameter to a measurable space along a convergence-function $c : I \mapsto
|
each parameter to a measurable space along a convergence--function $c : I \mapsto
|
||||||
\mathbb{B}$ that terminates the optimization.
|
\mathbb{B}$ that terminates the optimization.
|
||||||
|
|
||||||
The main algorithm just repeats the following steps:
|
The main algorithm just repeats the following steps:
|
||||||
|
|
||||||
- **Recombine** with a recombination-function $r : I^{\mu} \mapsto I^{\lambda}$ to
|
- **Recombine** with a recombination--function $r : I^{\mu} \mapsto I^{\lambda}$ to
|
||||||
generate new individuals based on the parents characteristics.
|
generate new individuals based on the parents characteristics.
|
||||||
This makes sure that the next guess is close to the old guess.
|
This makes sure that the next guess is close to the old guess.
|
||||||
- **Mutate** with a mutation-function $m : I^{\lambda} \mapsto I^{\lambda}$ to
|
- **Mutate** with a mutation--function $m : I^{\lambda} \mapsto I^{\lambda}$ to
|
||||||
introduce new effects that cannot be produced by mere recombination of the
|
introduce new effects that cannot be produced by mere recombination of the
|
||||||
parents.
|
parents.
|
||||||
Typically this just adds minor defects to individual members of the population
|
Typically this just adds minor defects to individual members of the population
|
||||||
like adding a random gaussian noise or amplifying/dampening random parts.
|
like adding a random gaussian noise or amplifying/dampening random parts.
|
||||||
- **Selection** takes a selection-function $s : (I^\lambda \cup I^{\mu + \lambda},\Phi) \mapsto I^\mu$ that
|
- **Selection** takes a selection--function $s : (I^\lambda \cup I^{\mu + \lambda},\Phi) \mapsto I^\mu$ that
|
||||||
selects from the previously generated $I^\lambda$ children and optionally also
|
selects from the previously generated $I^\lambda$ children and optionally also
|
||||||
the parents (denoted by the set $Q$ in the algorithm) using the
|
the parents (denoted by the set $Q$ in the algorithm) using the
|
||||||
fitness-function $\Phi$. The result of this operation is the next Population
|
fitness--function $\Phi$. The result of this operation is the next Population
|
||||||
of $\mu$ individuals.
|
of $\mu$ individuals.
|
||||||
|
|
||||||
All these functions can (and mostly do) have a lot of hidden parameters that
|
All these functions can (and mostly do) have a lot of hidden parameters that
|
||||||
can be changed over time. One can for example start off with a high
|
can be changed over time. One can for example start off with a high
|
||||||
mutation--rate that cools off over time (i.e. by lowering the variance of a
|
mutation--rate that cools off over time (i.e. by lowering the variance of a
|
||||||
gaussian noise). As the recombination and selection-steps are usually pure
|
gaussian noise).
|
||||||
|
|
||||||
## Advantages of evolutional algorithms
|
## Advantages of evolutional algorithms
|
||||||
\label{sec:back:evogood}
|
\label{sec:back:evogood}
|
||||||
|
|
||||||
\change[inline]{Needs citations}
|
|
||||||
The main advantage of evolutional algorithms is the ability to find optima of
|
The main advantage of evolutional algorithms is the ability to find optima of
|
||||||
general functions just with the help of a given error--function (or
|
general functions just with the help of a given fitness--function. With this
|
||||||
fitness--function in this domain). This avoids the general pitfalls of
|
most problems of simple gradient--based procedures, which often target the same
|
||||||
gradient--based procedures, which often target the same error--function as an
|
error--function which measures the fitness, as an evolutional algorithm, but can
|
||||||
evolutional algorithm, but can get stuck in local optima.
|
easily get stuck in local optima.
|
||||||
|
|
||||||
This is mostly due to the fact that a gradient--based procedure has only one
|
Components and techniques for evolutional algorithms are specifically known to
|
||||||
point of observation from where it evaluates the next steps, whereas an
|
help with different problems arising in the domain of
|
||||||
|
optimization\cite{weise2012evolutionary}. An overview of the typical problems
|
||||||
|
are shown in figure \ref{fig:probhard}.
|
||||||
|
|
||||||
|
\begin{figure}[!ht]
|
||||||
|
\includegraphics[width=\textwidth]{img/weise_fig3.png}
|
||||||
|
\caption{Fig.~3. taken from \cite{weise2012evolutionary}}
|
||||||
|
\label{fig:probhard}
|
||||||
|
\end{figure}
|
||||||
|
|
||||||
|
Most of the advantages stem from the fact that a gradient--based procedure has
|
||||||
|
only one point of observation from where it evaluates the next steps, whereas an
|
||||||
evolutional strategy starts with a population of guessed solutions. Because an
|
evolutional strategy starts with a population of guessed solutions. Because an
|
||||||
evolutional strategy modifies the solution randomly, keeps the best solutions
|
evolutional strategy modifies the solution randomly, keeps the best solutions
|
||||||
and purges the worst, it can also target multiple different hypothesis at the
|
and purges the worst, it can also target multiple different hypothesis at the
|
||||||
same time where the local optima die out in the face of other, better
|
same time where the local optima die out in the face of other, better
|
||||||
candidates.
|
candidates.
|
||||||
|
|
||||||
If an analytic best solution exists (i.e. because the error--function is convex)
|
If an analytic best solution exists and is easily computable (i.e. because the
|
||||||
an evolutional algorithm is not the right choice. Although both converge to the
|
error--function is convex) an evolutional algorithm is not the right choice.
|
||||||
same solution, the analytic one is usually faster. But in reality many problems
|
Although both converge to the same solution, the analytic one is usually faster.
|
||||||
have no analytic solution, because the problem is not convex. Here evolutional
|
|
||||||
optimization has one more advantage as you get bad solutions fast, which refine
|
But in reality many problems have no analytic solution, because the problem is
|
||||||
over time.
|
either not convex or there are so many parameters that an analytic solution
|
||||||
|
(mostly meaning the equivalence to an exhaustive search) is computationally not
|
||||||
|
feasible. Here evolutional optimization has one more advantage as you can at
|
||||||
|
least get a suboptimal solutions fast, which then refine over time.
|
||||||
|
|
||||||
## Criteria for the evolvability of linear deformations
|
## Criteria for the evolvability of linear deformations
|
||||||
\label{sec:intro:rvi}
|
\label{sec:intro:rvi}
|
||||||
@ -427,8 +442,13 @@ linear equations.
|
|||||||
## Parametrisierung sinnvoll?
|
## Parametrisierung sinnvoll?
|
||||||
|
|
||||||
- Nachteile von Parametrisierung
|
- Nachteile von Parametrisierung
|
||||||
|
- wie in kap. \ref{sec:back:evo} zu sehen, ist Parametrisierung
|
||||||
|
wichtig\cite{Rothlauf2006}.
|
||||||
|
- Parametrisierung zwar lokal, aber nicht 1:1
|
||||||
- Deformation ist um einen Kontrollpunkt viel direkter zu steuern.
|
- Deformation ist um einen Kontrollpunkt viel direkter zu steuern.
|
||||||
- => DM--FFD?
|
- => DM--FFD kann abhelfen, further study.
|
||||||
|
- Schlechte Parametrisierung sorgt dafür, dass CP u.U. nicht zur
|
||||||
|
Parametrisierung verwendet werden.
|
||||||
|
|
||||||
|
|
||||||
# Scenarios for testing evolvability criteria using \acf{FFD}
|
# Scenarios for testing evolvability criteria using \acf{FFD}
|
||||||
@ -480,7 +500,7 @@ linear equations.
|
|||||||
## Results of 1D Function Approximation
|
## Results of 1D Function Approximation
|
||||||
|
|
||||||
\begin{figure}[!ht]
|
\begin{figure}[!ht]
|
||||||
\includegraphics[width=\textwidth]{img/evolution1d/20170830-evolution1D_5x5_100Times-all_appended.png}
|
\includegraphics[width=\textwidth]{img/evolution1d/20171005-all_appended.png}
|
||||||
\caption{Results 1D}
|
\caption{Results 1D}
|
||||||
|
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
BIN
arbeit/ma.pdf
BIN
arbeit/ma.pdf
Binary file not shown.
@ -189,8 +189,10 @@ into a simple parametric representation can be challenging.
|
|||||||
|
|
||||||
The quality of such a representation in biological evolution is called
|
The quality of such a representation in biological evolution is called
|
||||||
\emph{evolvability}\cite{wagner1996complex} and is at the core of this
|
\emph{evolvability}\cite{wagner1996complex} and is at the core of this
|
||||||
thesis. However, there is no consensus on how \emph{evolvability} is
|
thesis, as the parametrization of the problem has serious implications
|
||||||
defined and the meaning varies from context to
|
on the convergence speed and the quality of the
|
||||||
|
solution\cite{Rothlauf2006}. However, there is no consensus on how
|
||||||
|
\emph{evolvability} is defined and the meaning varies from context to
|
||||||
context\cite{richter2015evolvability}.
|
context\cite{richter2015evolvability}.
|
||||||
|
|
||||||
As we transfer the results of Richter et al.\cite{anrichterEvol} from
|
As we transfer the results of Richter et al.\cite{anrichterEvol} from
|
||||||
@ -331,10 +333,10 @@ optimization?}\label{what-is-evolutional-optimization}
|
|||||||
|
|
||||||
\label{sec:back:evo}
|
\label{sec:back:evo}
|
||||||
|
|
||||||
\change[inline]{Write this section} In this thesis we are using an
|
In this thesis we are using an evolutional optimization strategy to
|
||||||
evolutional optimization strategy to solve the problem of finding the
|
solve the problem of finding the best parameters for our deformation.
|
||||||
best parameters for our deformation. This approach, however, is very
|
This approach, however, is very generic and we introduce it here in a
|
||||||
generic and we introduce it here in a broader sense.
|
broader sense.
|
||||||
|
|
||||||
\begin{algorithm}
|
\begin{algorithm}
|
||||||
\caption{An outline of evolutional algorithms}
|
\caption{An outline of evolutional algorithms}
|
||||||
@ -354,13 +356,13 @@ generic and we introduce it here in a broader sense.
|
|||||||
\end{algorithm}
|
\end{algorithm}
|
||||||
|
|
||||||
The general shape of an evolutional algorithm (adapted from
|
The general shape of an evolutional algorithm (adapted from
|
||||||
\cite{back1993overview}\} is outlined in Algorithm \ref{alg:evo}. Here,
|
\cite{back1993overview}) is outlined in Algorithm \ref{alg:evo}. Here,
|
||||||
\(P(t)\) denotes the population of parameters in step \(t\) of the
|
\(P(t)\) denotes the population of parameters in step \(t\) of the
|
||||||
algorithm. The population contains \(\mu\) individuals \(a_i\) that fit
|
algorithm. The population contains \(\mu\) individuals \(a_i\) that fit
|
||||||
the shape of the parameters we are looking for. Typically these are
|
the shape of the parameters we are looking for. Typically these are
|
||||||
initialized by a random guess or just zero. Further on we need a
|
initialized by a random guess or just zero. Further on we need a
|
||||||
so-called \emph{fitness-function} \(\Phi : I \mapsto M\) that can take
|
so--called \emph{fitness--function} \(\Phi : I \mapsto M\) that can take
|
||||||
each parameter to a measurable space along a convergence-function
|
each parameter to a measurable space along a convergence--function
|
||||||
\(c : I \mapsto \mathbb{B}\) that terminates the optimization.
|
\(c : I \mapsto \mathbb{B}\) that terminates the optimization.
|
||||||
|
|
||||||
The main algorithm just repeats the following steps:
|
The main algorithm just repeats the following steps:
|
||||||
@ -368,58 +370,73 @@ The main algorithm just repeats the following steps:
|
|||||||
\begin{itemize}
|
\begin{itemize}
|
||||||
\tightlist
|
\tightlist
|
||||||
\item
|
\item
|
||||||
\textbf{Recombine} with a recombination-function
|
\textbf{Recombine} with a recombination--function
|
||||||
\(r : I^{\mu} \mapsto I^{\lambda}\) to generate new individuals based
|
\(r : I^{\mu} \mapsto I^{\lambda}\) to generate new individuals based
|
||||||
on the parents characteristics.\\
|
on the parents characteristics.\\
|
||||||
This makes sure that the next guess is close to the old guess.
|
This makes sure that the next guess is close to the old guess.
|
||||||
\item
|
\item
|
||||||
\textbf{Mutate} with a mutation-function
|
\textbf{Mutate} with a mutation--function
|
||||||
\(m : I^{\lambda} \mapsto I^{\lambda}\) to introduce new effects that
|
\(m : I^{\lambda} \mapsto I^{\lambda}\) to introduce new effects that
|
||||||
cannot be produced by mere recombination of the parents.\\
|
cannot be produced by mere recombination of the parents.\\
|
||||||
Typically this just adds minor defects to individual members of the
|
Typically this just adds minor defects to individual members of the
|
||||||
population like adding a random gaussian noise or amplifying/dampening
|
population like adding a random gaussian noise or amplifying/dampening
|
||||||
random parts.
|
random parts.
|
||||||
\item
|
\item
|
||||||
\textbf{Selection} takes a selection-function
|
\textbf{Selection} takes a selection--function
|
||||||
\(s : (I^\lambda \cup I^{\mu + \lambda},\Phi) \mapsto I^\mu\) that
|
\(s : (I^\lambda \cup I^{\mu + \lambda},\Phi) \mapsto I^\mu\) that
|
||||||
selects from the previously generated \(I^\lambda\) children and
|
selects from the previously generated \(I^\lambda\) children and
|
||||||
optionally also the parents (denoted by the set \(Q\) in the
|
optionally also the parents (denoted by the set \(Q\) in the
|
||||||
algorithm) using the fitness-function \(\Phi\). The result of this
|
algorithm) using the fitness--function \(\Phi\). The result of this
|
||||||
operation is the next Population of \(\mu\) individuals.
|
operation is the next Population of \(\mu\) individuals.
|
||||||
\end{itemize}
|
\end{itemize}
|
||||||
|
|
||||||
All these functions can (and mostly do) have a lot of hidden parameters
|
All these functions can (and mostly do) have a lot of hidden parameters
|
||||||
that can be changed over time. One can for example start off with a high
|
that can be changed over time. One can for example start off with a high
|
||||||
mutation--rate that cools off over time (i.e.~by lowering the variance
|
mutation--rate that cools off over time (i.e.~by lowering the variance
|
||||||
of a gaussian noise). As the recombination and selection-steps are
|
of a gaussian noise).
|
||||||
usually pure
|
|
||||||
|
|
||||||
\section{Advantages of evolutional
|
\section{Advantages of evolutional
|
||||||
algorithms}\label{advantages-of-evolutional-algorithms}
|
algorithms}\label{advantages-of-evolutional-algorithms}
|
||||||
|
|
||||||
\label{sec:back:evogood}
|
\label{sec:back:evogood}
|
||||||
|
|
||||||
\change[inline]{Needs citations} The main advantage of evolutional
|
The main advantage of evolutional algorithms is the ability to find
|
||||||
algorithms is the ability to find optima of general functions just with
|
optima of general functions just with the help of a given
|
||||||
the help of a given error--function (or fitness--function in this
|
fitness--function. With this most problems of simple gradient--based
|
||||||
domain). This avoids the general pitfalls of gradient--based procedures,
|
procedures, which often target the same error--function which measures
|
||||||
which often target the same error--function as an evolutional algorithm,
|
the fitness, as an evolutional algorithm, but can easily get stuck in
|
||||||
but can get stuck in local optima.
|
local optima.
|
||||||
|
|
||||||
This is mostly due to the fact that a gradient--based procedure has only
|
Components and techniques for evolutional algorithms are specifically
|
||||||
one point of observation from where it evaluates the next steps, whereas
|
known to help with different problems arising in the domain of
|
||||||
an evolutional strategy starts with a population of guessed solutions.
|
optimization\cite{weise2012evolutionary}. An overview of the typical
|
||||||
Because an evolutional strategy modifies the solution randomly, keeps
|
problems are shown in figure \ref{fig:probhard}.
|
||||||
the best solutions and purges the worst, it can also target multiple
|
|
||||||
different hypothesis at the same time where the local optima die out in
|
|
||||||
the face of other, better candidates.
|
|
||||||
|
|
||||||
If an analytic best solution exists (i.e.~because the error--function is
|
\begin{figure}[!ht]
|
||||||
convex) an evolutional algorithm is not the right choice. Although both
|
\includegraphics[width=\textwidth]{img/weise_fig3.png}
|
||||||
converge to the same solution, the analytic one is usually faster. But
|
\caption{Fig.~3. taken from \cite{weise2012evolutionary}}
|
||||||
in reality many problems have no analytic solution, because the problem
|
\label{fig:probhard}
|
||||||
is not convex. Here evolutional optimization has one more advantage as
|
\end{figure}
|
||||||
you get bad solutions fast, which refine over time.
|
|
||||||
|
Most of the advantages stem from the fact that a gradient--based
|
||||||
|
procedure has only one point of observation from where it evaluates the
|
||||||
|
next steps, whereas an evolutional strategy starts with a population of
|
||||||
|
guessed solutions. Because an evolutional strategy modifies the solution
|
||||||
|
randomly, keeps the best solutions and purges the worst, it can also
|
||||||
|
target multiple different hypothesis at the same time where the local
|
||||||
|
optima die out in the face of other, better candidates.
|
||||||
|
|
||||||
|
If an analytic best solution exists and is easily computable
|
||||||
|
(i.e.~because the error--function is convex) an evolutional algorithm is
|
||||||
|
not the right choice. Although both converge to the same solution, the
|
||||||
|
analytic one is usually faster.
|
||||||
|
|
||||||
|
But in reality many problems have no analytic solution, because the
|
||||||
|
problem is either not convex or there are so many parameters that an
|
||||||
|
analytic solution (mostly meaning the equivalence to an exhaustive
|
||||||
|
search) is computationally not feasible. Here evolutional optimization
|
||||||
|
has one more advantage as you can at least get a suboptimal solutions
|
||||||
|
fast, which then refine over time.
|
||||||
|
|
||||||
\section{Criteria for the evolvability of linear
|
\section{Criteria for the evolvability of linear
|
||||||
deformations}\label{criteria-for-the-evolvability-of-linear-deformations}
|
deformations}\label{criteria-for-the-evolvability-of-linear-deformations}
|
||||||
@ -606,10 +623,18 @@ system of linear equations.
|
|||||||
\tightlist
|
\tightlist
|
||||||
\item
|
\item
|
||||||
Nachteile von Parametrisierung
|
Nachteile von Parametrisierung
|
||||||
|
\item
|
||||||
|
wie in kap. \ref{sec:back:evo} zu sehen, ist Parametrisierung
|
||||||
|
wichtig\cite{Rothlauf2006}.
|
||||||
|
\item
|
||||||
|
Parametrisierung zwar lokal, aber nicht 1:1
|
||||||
\item
|
\item
|
||||||
Deformation ist um einen Kontrollpunkt viel direkter zu steuern.
|
Deformation ist um einen Kontrollpunkt viel direkter zu steuern.
|
||||||
\item
|
\item
|
||||||
=\textgreater{} DM--FFD?
|
=\textgreater{} DM--FFD kann abhelfen, further study.
|
||||||
|
\item
|
||||||
|
Schlechte Parametrisierung sorgt dafür, dass CP u.U. nicht zur
|
||||||
|
Parametrisierung verwendet werden.
|
||||||
\end{itemize}
|
\end{itemize}
|
||||||
|
|
||||||
\chapter{\texorpdfstring{Scenarios for testing evolvability criteria
|
\chapter{\texorpdfstring{Scenarios for testing evolvability criteria
|
||||||
@ -710,7 +735,7 @@ Optimierung}\label{besonderheiten-der-optimierung}
|
|||||||
Approximation}\label{results-of-1d-function-approximation}
|
Approximation}\label{results-of-1d-function-approximation}
|
||||||
|
|
||||||
\begin{figure}[!ht]
|
\begin{figure}[!ht]
|
||||||
\includegraphics[width=\textwidth]{img/evolution1d/20170830-evolution1D_5x5_100Times-all_appended.png}
|
\includegraphics[width=\textwidth]{img/evolution1d/20171005-all_appended.png}
|
||||||
\caption{Results 1D}
|
\caption{Results 1D}
|
||||||
|
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
BIN
dokumentation/evolution1d/20171005-all_appended.png
Normal file
BIN
dokumentation/evolution1d/20171005-all_appended.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 29 KiB |
Loading…
Reference in New Issue
Block a user