chapter written, new 1d plots, new imgs

This commit is contained in:
Nicole Dresselhaus 2017-10-10 13:53:17 +02:00
parent cc50202cce
commit 294cb4c501
Signed by: Drezil
GPG Key ID: 057D94F356F41E25
6 changed files with 268 additions and 176 deletions

View File

@ -1,128 +1,175 @@
@article{anrichterEvol,
author = "Richter, Andreas and Achenbach, Jascha and Menzel, Stefan and Botsch, Mario",
booktitle = "IEEE Congress on Evolutionary Computation",
location = "Vancouver, Canada",
publisher = "IEEE",
title = "Evolvability as a Quality Criterion for Linear Deformation Representations in Evolutionary Optimization",
year = "2016",
note={\url{http://graphics.uni-bielefeld.de/publications/cec16.pdf}, \url{https://pub.uni-bielefeld.de/publication/2902698}},
% This file was created with JabRef 2.10.
% Encoding: UTF-8
@Article{back1993overview,
Title = {An overview of evolutionary algorithms for parameter optimization},
Author = {B{\"a}ck, Thomas and Schwefel, Hans-Paul},
Journal = {Evolutionary computation},
Year = {1993},
Number = {1},
Pages = {1--23},
Volume = {1},
Publisher = {MIT Press},
Url = {https://www.researchgate.net/profile/Hans-Paul_Schwefel/publication/220375001_An_Overview_of_Evolutionary_Algorithms_for_Parameter_Optimization/links/543663d00cf2dc341db30452.pdf}
}
@article{spitzmuller1996bezier,
title="Partial derivatives of Bèzier surfaces",
author="Spitzmüller, Klaus",
journal="Computer-Aided Design",
volume="28",
number="1",
pages="67--72",
year="1996",
publisher="Elsevier",
url={https://doi.org/10.1016/0010-4485(95)00044-5},
@Book{golub2012matrix,
Title = {Matrix computations},
Author = {Golub, Gene H and Van Loan, Charles F},
Publisher = {JHU Press},
Year = {2012},
Volume = {3}
}
@article{hsu1991dmffd,
title={A direct manipulation interface to free-form deformations},
author={Hsu, William M},
journal={Master's thesis, Brown University},
year={1991},
url={https://cs.brown.edu/research/pubs/theses/masters/1991/hsu.pdf},
@Article{hsu1991dmffd,
Title = {A direct manipulation interface to free-form deformations},
Author = {Hsu, William M},
Journal = {Master's thesis, Brown University},
Year = {1991},
Url = {https://cs.brown.edu/research/pubs/theses/masters/1991/hsu.pdf}
}
@article{hsu1992direct,
title={Direct Manipulation of Free-Form Deformations},
author={Hsu, William M and Hughes, John F and Kaufman, Henry},
journal={Computer Graphics},
volume={26},
pages={2},
year={1992},
url={http://graphics.cs.brown.edu/~jfh/papers/Hsu-DMO-1992/paper.pdf},
@Article{hsu1992direct,
Title = {Direct Manipulation of Free-Form Deformations},
Author = {Hsu, William M and Hughes, John F and Kaufman, Henry},
Journal = {Computer Graphics},
Year = {1992},
Pages = {2},
Volume = {26},
Url = {http://graphics.cs.brown.edu/~jfh/papers/Hsu-DMO-1992/paper.pdf}
}
@inproceedings{Menzel2006,
author = {Menzel, Stefan and Olhofer, Markus and Sendhoff, Bernhard},
title = {Direct Manipulation of Free Form Deformation in Evolutionary Design Optimisation},
booktitle = {Proceedings of the 9th International Conference on Parallel Problem Solving from Nature},
series = {PPSN'06},
year = {2006},
isbn = {3-540-38990-3, 978-3-540-38990-3},
location = {Reykjavik, Iceland},
pages = {352--361},
numpages = {10},
url = {http://dx.doi.org/10.1007/11844297_36},
doi = {10.1007/11844297_36},
acmid = {2079770},
publisher = {Springer-Verlag},
address = {Berlin, Heidelberg},
}
@book{golub2012matrix,
title={Matrix computations},
author={Golub, Gene H and Van Loan, Charles F},
volume={3},
year={2012},
publisher={JHU Press}
@Article{gaussNewton,
Title = {An Algorithm for Least-Squares Estimation of Nonlinear Parameters},
Author = {Donald W. Marquardt},
Journal = {Journal of the Society for Industrial and Applied Mathematics},
Year = {1963},
Number = {2},
Pages = {431-441},
Volume = {11},
Doi = {10.1137/0111030},
Eprint = {https://doi.org/10.1137/0111030},
Url = {https://doi.org/10.1137/0111030}
}
@article{weise2012evolutionary,
title={Evolutionary Optimization: Pitfalls and Booby Traps},
author={Weise, Thomas and Chiong, Raymond and Tang, Ke},
journal={J. Comput. Sci. \& Technol},
volume={27},
number={5},
year={2012},
url={http://jcst.ict.ac.cn:8080/jcst/EN/article/downloadArticleFile.do?attachType=PDF\&id=9543}
@InProceedings{Menzel2006,
Title = {Direct Manipulation of Free Form Deformation in Evolutionary Design Optimisation},
Author = {Menzel, Stefan and Olhofer, Markus and Sendhoff, Bernhard},
Booktitle = {Proceedings of the 9th International Conference on Parallel Problem Solving from Nature},
Year = {2006},
Address = {Berlin, Heidelberg},
Pages = {352--361},
Publisher = {Springer-Verlag},
Series = {PPSN'06},
Acmid = {2079770},
Doi = {10.1007/11844297_36},
ISBN = {3-540-38990-3, 978-3-540-38990-3},
Location = {Reykjavik, Iceland},
Numpages = {10},
Url = {http://dx.doi.org/10.1007/11844297_36}
}
@inproceedings{thorhauer2014locality,
title={On the locality of standard search operators in grammatical evolution},
author={Thorhauer, Ann and Rothlauf, Franz},
booktitle={International Conference on Parallel Problem Solving from Nature},
pages={465--475},
year={2014},
organization={Springer},
url={https://www.lri.fr/~hansen/proceedings/2014/PPSN/papers/8672/86720465.pdf}
@Article{minai2006complex,
Title = {Complex engineered systems: A new paradigm},
Author = {Minai, Ali A and Braha, Dan and Bar-Yam, Yaneer},
Journal = {Complex engineered systems: Science meets technology},
Year = {2006},
Pages = {1--21},
Publisher = {Springer},
Url = {https://www.researchgate.net/profile/Yaneer_Bar-Yam/publication/225104044_Complex_Engineered_Systems_A_New_Paradigm/links/59107f20a6fdccbfd57eb84d/Complex-Engineered-Systems-A-New-Paradigm.pdf}
}
@article{gaussNewton,
author = {Donald W. Marquardt},
title = {An Algorithm for Least-Squares Estimation of Nonlinear Parameters},
journal = {Journal of the Society for Industrial and Applied Mathematics},
volume = {11},
number = {2},
pages = {431-441},
year = {1963},
doi = {10.1137/0111030},
URL = {https://doi.org/10.1137/0111030},
eprint = {https://doi.org/10.1137/0111030}
@Article{anrichterEvol,
Title = {Evolvability as a Quality Criterion for Linear Deformation Representations in Evolutionary Optimization},
Author = {Richter, Andreas and Achenbach, Jascha and Menzel, Stefan and Botsch, Mario},
Year = {2016},
Note = {\url{http://graphics.uni-bielefeld.de/publications/cec16.pdf}, \url{https://pub.uni-bielefeld.de/publication/2902698}},
Booktitle = {IEEE Congress on Evolutionary Computation},
Location = {Vancouver, Canada},
Publisher = {IEEE}
}
@article{minai2006complex,
title={Complex engineered systems: A new paradigm},
author={Minai, Ali A and Braha, Dan and Bar-Yam, Yaneer},
journal={Complex engineered systems: Science meets technology},
pages={1--21},
year={2006},
publisher={Springer},
url={https://www.researchgate.net/profile/Yaneer_Bar-Yam/publication/225104044_Complex_Engineered_Systems_A_New_Paradigm/links/59107f20a6fdccbfd57eb84d/Complex-Engineered-Systems-A-New-Paradigm.pdf}
@InProceedings{richter2015evolvability,
Title = {Evolvability of representations in complex system engineering: a survey},
Author = {Richter, Andreas and Botsch, Mario and Menzel, Stefan},
Booktitle = {Evolutionary Computation (CEC), 2015 IEEE Congress on},
Year = {2015},
Organization = {IEEE},
Pages = {1327--1335},
Url = {http://www.graphics.uni-bielefeld.de/publications/cec15.pdf}
}
@article{wagner1996complex,
title={Complex adaptations and the evolution of evolvability},
author={Wagner, Gunter P and Altenberg, Lee},
journal={Evolution},
volume={50},
number={3},
pages={967--976},
year={1996},
url={http://arep.med.harvard.edu/pdf/Wagner96.pdf},
@InBook{Rothlauf2006,
Title = {Representations for Genetic and Evolutionary Algorithms},
Author = {Rothlauf, Franz},
Chapter = {2},
Pages = {9--32},
Publisher = {Springer Berlin Heidelberg},
Year = {2006},
Address = {Berlin, Heidelberg},
Abstract = {In this second chapter, we present an introduction into the field of representations for genetic and evolutionary algorithms. The chapter provides the basis and definitions which are essential for understanding the content of this work.},
Booktitle = {Representations for Genetic and Evolutionary Algorithms},
Doi = {10.1007/3-540-32444-5_2},
ISBN = {978-3-540-32444-7},
Url = {https://doi.org/10.1007/3-540-32444-5_2}
}
@inproceedings{richter2015evolvability,
title={Evolvability of representations in complex system engineering: a survey},
author={Richter, Andreas and Botsch, Mario and Menzel, Stefan},
booktitle={Evolutionary Computation (CEC), 2015 IEEE Congress on},
pages={1327--1335},
year={2015},
organization={IEEE},
url={http://www.graphics.uni-bielefeld.de/publications/cec15.pdf}
@Article{spitzmuller1996bezier,
Title = {Partial derivatives of Bèzier surfaces},
Author = {Spitzmüller, Klaus},
Journal = {Computer-Aided Design},
Year = {1996},
Number = {1},
Pages = {67--72},
Volume = {28},
Publisher = {Elsevier},
Url = {https://doi.org/10.1016/0010-4485(95)00044-5}
}
@article{back1993overview,
title={An overview of evolutionary algorithms for parameter optimization},
author={B{\"a}ck, Thomas and Schwefel, Hans-Paul},
journal={Evolutionary computation},
volume={1},
number={1},
pages={1--23},
year={1993},
publisher={MIT Press},
url={https://www.researchgate.net/profile/Hans-Paul_Schwefel/publication/220375001_An_Overview_of_Evolutionary_Algorithms_for_Parameter_Optimization/links/543663d00cf2dc341db30452.pdf}
@InProceedings{thorhauer2014locality,
Title = {On the locality of standard search operators in grammatical evolution},
Author = {Thorhauer, Ann and Rothlauf, Franz},
Booktitle = {International Conference on Parallel Problem Solving from Nature},
Year = {2014},
Organization = {Springer},
Pages = {465--475},
Url = {https://www.lri.fr/~hansen/proceedings/2014/PPSN/papers/8672/86720465.pdf}
}
@Article{wagner1996complex,
Title = {Complex adaptations and the evolution of evolvability},
Author = {Wagner, Gunter P and Altenberg, Lee},
Journal = {Evolution},
Year = {1996},
Number = {3},
Pages = {967--976},
Volume = {50},
Url = {http://arep.med.harvard.edu/pdf/Wagner96.pdf}
}
@Article{weise2012evolutionary,
Title = {Evolutionary Optimization: Pitfalls and Booby Traps},
Author = {Weise, Thomas and Chiong, Raymond and Tang, Ke},
Journal = {J. Comput. Sci. \& Technol},
Year = {2012},
Number = {5},
Volume = {27},
Url = {http://jcst.ict.ac.cn:8080/jcst/EN/article/downloadArticleFile.do?attachType=PDF\&id=9543}
}

BIN
arbeit/img/weise_fig3.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 183 KiB

View File

@ -44,7 +44,9 @@ etc.), the translation of the problem--domain into a simple parametric
representation can be challenging.
The quality of such a representation in biological evolution is called
*evolvability*\cite{wagner1996complex} and is at the core of this thesis.
*evolvability*\cite{wagner1996complex} and is at the core of this thesis, as the
parametrization of the problem has serious implications on the convergence speed
and the quality of the solution\cite{Rothlauf2006}.
However, there is no consensus on how *evolvability* is defined and the meaning
varies from context to context\cite{richter2015evolvability}.
@ -196,59 +198,72 @@ however, is very generic and we introduce it here in a broader sense.
\end{algorithm}
The general shape of an evolutional algorithm (adapted from
\cite{back1993overview}} is outlined in Algorithm \ref{alg:evo}. Here, $P(t)$
\cite{back1993overview}) is outlined in Algorithm \ref{alg:evo}. Here, $P(t)$
denotes the population of parameters in step $t$ of the algorithm. The
population contains $\mu$ individuals $a_i$ that fit the shape of the parameters
we are looking for. Typically these are initialized by a random guess or just
zero. Further on we need a so-called *fitness-function* $\Phi : I \mapsto M$ that can take
each parameter to a measurable space along a convergence-function $c : I \mapsto
zero. Further on we need a so--called *fitness--function* $\Phi : I \mapsto M$ that can take
each parameter to a measurable space along a convergence--function $c : I \mapsto
\mathbb{B}$ that terminates the optimization.
The main algorithm just repeats the following steps:
- **Recombine** with a recombination-function $r : I^{\mu} \mapsto I^{\lambda}$ to
- **Recombine** with a recombination--function $r : I^{\mu} \mapsto I^{\lambda}$ to
generate new individuals based on the parents characteristics.
This makes sure that the next guess is close to the old guess.
- **Mutate** with a mutation-function $m : I^{\lambda} \mapsto I^{\lambda}$ to
- **Mutate** with a mutation--function $m : I^{\lambda} \mapsto I^{\lambda}$ to
introduce new effects that cannot be produced by mere recombination of the
parents.
Typically this just adds minor defects to individual members of the population
like adding a random gaussian noise or amplifying/dampening random parts.
- **Selection** takes a selection-function $s : (I^\lambda \cup I^{\mu + \lambda},\Phi) \mapsto I^\mu$ that
- **Selection** takes a selection--function $s : (I^\lambda \cup I^{\mu + \lambda},\Phi) \mapsto I^\mu$ that
selects from the previously generated $I^\lambda$ children and optionally also
the parents (denoted by the set $Q$ in the algorithm) using the
fitness-function $\Phi$. The result of this operation is the next Population
fitness--function $\Phi$. The result of this operation is the next Population
of $\mu$ individuals.
All these functions can (and mostly do) have a lot of hidden parameters that
can be changed over time. One can for example start off with a high
mutation--rate that cools off over time (i.e. by lowering the variance of a
gaussian noise). As the recombination and selection-steps are usually pure
gaussian noise).
## Advantages of evolutional algorithms
\label{sec:back:evogood}
\change[inline]{Needs citations}
The main advantage of evolutional algorithms is the ability to find optima of
general functions just with the help of a given error--function (or
fitness--function in this domain). This avoids the general pitfalls of
gradient--based procedures, which often target the same error--function as an
evolutional algorithm, but can get stuck in local optima.
general functions just with the help of a given fitness--function. With this
most problems of simple gradient--based procedures, which often target the same
error--function which measures the fitness, as an evolutional algorithm, but can
easily get stuck in local optima.
This is mostly due to the fact that a gradient--based procedure has only one
point of observation from where it evaluates the next steps, whereas an
Components and techniques for evolutional algorithms are specifically known to
help with different problems arising in the domain of
optimization\cite{weise2012evolutionary}. An overview of the typical problems
are shown in figure \ref{fig:probhard}.
\begin{figure}[!ht]
\includegraphics[width=\textwidth]{img/weise_fig3.png}
\caption{Fig.~3. taken from \cite{weise2012evolutionary}}
\label{fig:probhard}
\end{figure}
Most of the advantages stem from the fact that a gradient--based procedure has
only one point of observation from where it evaluates the next steps, whereas an
evolutional strategy starts with a population of guessed solutions. Because an
evolutional strategy modifies the solution randomly, keeps the best solutions
and purges the worst, it can also target multiple different hypothesis at the
same time where the local optima die out in the face of other, better
candidates.
If an analytic best solution exists (i.e. because the error--function is convex)
an evolutional algorithm is not the right choice. Although both converge to the
same solution, the analytic one is usually faster. But in reality many problems
have no analytic solution, because the problem is not convex. Here evolutional
optimization has one more advantage as you get bad solutions fast, which refine
over time.
If an analytic best solution exists and is easily computable (i.e. because the
error--function is convex) an evolutional algorithm is not the right choice.
Although both converge to the same solution, the analytic one is usually faster.
But in reality many problems have no analytic solution, because the problem is
either not convex or there are so many parameters that an analytic solution
(mostly meaning the equivalence to an exhaustive search) is computationally not
feasible. Here evolutional optimization has one more advantage as you can at
least get a suboptimal solutions fast, which then refine over time.
## Criteria for the evolvability of linear deformations
\label{sec:intro:rvi}
@ -427,8 +442,13 @@ linear equations.
## Parametrisierung sinnvoll?
- Nachteile von Parametrisierung
- wie in kap. \ref{sec:back:evo} zu sehen, ist Parametrisierung
wichtig\cite{Rothlauf2006}.
- Parametrisierung zwar lokal, aber nicht 1:1
- Deformation ist um einen Kontrollpunkt viel direkter zu steuern.
- => DM--FFD?
- => DM--FFD kann abhelfen, further study.
- Schlechte Parametrisierung sorgt dafür, dass CP u.U. nicht zur
Parametrisierung verwendet werden.
# Scenarios for testing evolvability criteria using \acf{FFD}
@ -480,7 +500,7 @@ linear equations.
## Results of 1D Function Approximation
\begin{figure}[!ht]
\includegraphics[width=\textwidth]{img/evolution1d/20170830-evolution1D_5x5_100Times-all_appended.png}
\includegraphics[width=\textwidth]{img/evolution1d/20171005-all_appended.png}
\caption{Results 1D}
\end{figure}

Binary file not shown.

View File

@ -189,8 +189,10 @@ into a simple parametric representation can be challenging.
The quality of such a representation in biological evolution is called
\emph{evolvability}\cite{wagner1996complex} and is at the core of this
thesis. However, there is no consensus on how \emph{evolvability} is
defined and the meaning varies from context to
thesis, as the parametrization of the problem has serious implications
on the convergence speed and the quality of the
solution\cite{Rothlauf2006}. However, there is no consensus on how
\emph{evolvability} is defined and the meaning varies from context to
context\cite{richter2015evolvability}.
As we transfer the results of Richter et al.\cite{anrichterEvol} from
@ -331,10 +333,10 @@ optimization?}\label{what-is-evolutional-optimization}
\label{sec:back:evo}
\change[inline]{Write this section} In this thesis we are using an
evolutional optimization strategy to solve the problem of finding the
best parameters for our deformation. This approach, however, is very
generic and we introduce it here in a broader sense.
In this thesis we are using an evolutional optimization strategy to
solve the problem of finding the best parameters for our deformation.
This approach, however, is very generic and we introduce it here in a
broader sense.
\begin{algorithm}
\caption{An outline of evolutional algorithms}
@ -354,13 +356,13 @@ generic and we introduce it here in a broader sense.
\end{algorithm}
The general shape of an evolutional algorithm (adapted from
\cite{back1993overview}\} is outlined in Algorithm \ref{alg:evo}. Here,
\cite{back1993overview}) is outlined in Algorithm \ref{alg:evo}. Here,
\(P(t)\) denotes the population of parameters in step \(t\) of the
algorithm. The population contains \(\mu\) individuals \(a_i\) that fit
the shape of the parameters we are looking for. Typically these are
initialized by a random guess or just zero. Further on we need a
so-called \emph{fitness-function} \(\Phi : I \mapsto M\) that can take
each parameter to a measurable space along a convergence-function
so--called \emph{fitness--function} \(\Phi : I \mapsto M\) that can take
each parameter to a measurable space along a convergence--function
\(c : I \mapsto \mathbb{B}\) that terminates the optimization.
The main algorithm just repeats the following steps:
@ -368,58 +370,73 @@ The main algorithm just repeats the following steps:
\begin{itemize}
\tightlist
\item
\textbf{Recombine} with a recombination-function
\textbf{Recombine} with a recombination--function
\(r : I^{\mu} \mapsto I^{\lambda}\) to generate new individuals based
on the parents characteristics.\\
This makes sure that the next guess is close to the old guess.
\item
\textbf{Mutate} with a mutation-function
\textbf{Mutate} with a mutation--function
\(m : I^{\lambda} \mapsto I^{\lambda}\) to introduce new effects that
cannot be produced by mere recombination of the parents.\\
Typically this just adds minor defects to individual members of the
population like adding a random gaussian noise or amplifying/dampening
random parts.
\item
\textbf{Selection} takes a selection-function
\textbf{Selection} takes a selection--function
\(s : (I^\lambda \cup I^{\mu + \lambda},\Phi) \mapsto I^\mu\) that
selects from the previously generated \(I^\lambda\) children and
optionally also the parents (denoted by the set \(Q\) in the
algorithm) using the fitness-function \(\Phi\). The result of this
algorithm) using the fitness--function \(\Phi\). The result of this
operation is the next Population of \(\mu\) individuals.
\end{itemize}
All these functions can (and mostly do) have a lot of hidden parameters
that can be changed over time. One can for example start off with a high
mutation--rate that cools off over time (i.e.~by lowering the variance
of a gaussian noise). As the recombination and selection-steps are
usually pure
of a gaussian noise).
\section{Advantages of evolutional
algorithms}\label{advantages-of-evolutional-algorithms}
\label{sec:back:evogood}
\change[inline]{Needs citations} The main advantage of evolutional
algorithms is the ability to find optima of general functions just with
the help of a given error--function (or fitness--function in this
domain). This avoids the general pitfalls of gradient--based procedures,
which often target the same error--function as an evolutional algorithm,
but can get stuck in local optima.
The main advantage of evolutional algorithms is the ability to find
optima of general functions just with the help of a given
fitness--function. With this most problems of simple gradient--based
procedures, which often target the same error--function which measures
the fitness, as an evolutional algorithm, but can easily get stuck in
local optima.
This is mostly due to the fact that a gradient--based procedure has only
one point of observation from where it evaluates the next steps, whereas
an evolutional strategy starts with a population of guessed solutions.
Because an evolutional strategy modifies the solution randomly, keeps
the best solutions and purges the worst, it can also target multiple
different hypothesis at the same time where the local optima die out in
the face of other, better candidates.
Components and techniques for evolutional algorithms are specifically
known to help with different problems arising in the domain of
optimization\cite{weise2012evolutionary}. An overview of the typical
problems are shown in figure \ref{fig:probhard}.
If an analytic best solution exists (i.e.~because the error--function is
convex) an evolutional algorithm is not the right choice. Although both
converge to the same solution, the analytic one is usually faster. But
in reality many problems have no analytic solution, because the problem
is not convex. Here evolutional optimization has one more advantage as
you get bad solutions fast, which refine over time.
\begin{figure}[!ht]
\includegraphics[width=\textwidth]{img/weise_fig3.png}
\caption{Fig.~3. taken from \cite{weise2012evolutionary}}
\label{fig:probhard}
\end{figure}
Most of the advantages stem from the fact that a gradient--based
procedure has only one point of observation from where it evaluates the
next steps, whereas an evolutional strategy starts with a population of
guessed solutions. Because an evolutional strategy modifies the solution
randomly, keeps the best solutions and purges the worst, it can also
target multiple different hypothesis at the same time where the local
optima die out in the face of other, better candidates.
If an analytic best solution exists and is easily computable
(i.e.~because the error--function is convex) an evolutional algorithm is
not the right choice. Although both converge to the same solution, the
analytic one is usually faster.
But in reality many problems have no analytic solution, because the
problem is either not convex or there are so many parameters that an
analytic solution (mostly meaning the equivalence to an exhaustive
search) is computationally not feasible. Here evolutional optimization
has one more advantage as you can at least get a suboptimal solutions
fast, which then refine over time.
\section{Criteria for the evolvability of linear
deformations}\label{criteria-for-the-evolvability-of-linear-deformations}
@ -606,10 +623,18 @@ system of linear equations.
\tightlist
\item
Nachteile von Parametrisierung
\item
wie in kap. \ref{sec:back:evo} zu sehen, ist Parametrisierung
wichtig\cite{Rothlauf2006}.
\item
Parametrisierung zwar lokal, aber nicht 1:1
\item
Deformation ist um einen Kontrollpunkt viel direkter zu steuern.
\item
=\textgreater{} DM--FFD?
=\textgreater{} DM--FFD kann abhelfen, further study.
\item
Schlechte Parametrisierung sorgt dafür, dass CP u.U. nicht zur
Parametrisierung verwendet werden.
\end{itemize}
\chapter{\texorpdfstring{Scenarios for testing evolvability criteria
@ -710,7 +735,7 @@ Optimierung}\label{besonderheiten-der-optimierung}
Approximation}\label{results-of-1d-function-approximation}
\begin{figure}[!ht]
\includegraphics[width=\textwidth]{img/evolution1d/20170830-evolution1D_5x5_100Times-all_appended.png}
\includegraphics[width=\textwidth]{img/evolution1d/20171005-all_appended.png}
\caption{Results 1D}
\end{figure}

Binary file not shown.

After

Width:  |  Height:  |  Size: 29 KiB