diff --git a/optimization/objectives/samples_vis.h b/optimization/objectives/samples_vis.h index 2753760a43584d7ed4f86d286d5434a46dd5cce3..426a5df97cd8bc2b5b0cca61a3aea6aabc0be6c8 100644 --- a/optimization/objectives/samples_vis.h +++ b/optimization/objectives/samples_vis.h @@ -1,9 +1,8 @@ #ifndef OPTIMIZATION_OBJECTIVES_SAMPLES_VIS_H #define OPTIMIZATION_OBJECTIVES_SAMPLES_VIS_H -#include "utils/vis_only.h" -#ifdef VISUALIZE #include "samples.h" +#include "utils/eigen_json.h" namespace optimization { @@ -20,4 +19,3 @@ void to_json(nlohmann::json& j, GradientSample<Vector> const& sample) { } #endif -#endif diff --git a/optimization/optimizers/gradient_descent/gradient_descent.h b/optimization/optimizers/gradient_descent/gradient_descent.h index 1061e181f11a3252ee7514a318b82b102409e85b..46dd99ee254c1f36287355935f228bff1262c87a 100644 --- a/optimization/optimizers/gradient_descent/gradient_descent.h +++ b/optimization/optimizers/gradient_descent/gradient_descent.h @@ -1,14 +1,14 @@ #ifndef OPTIMIZATION_GRADIENT_DESCENT_H #define OPTIMIZATION_GRADIENT_DESCENT_H -#include "gradient_descent_log.h" +#include "logs/nothing.h" #include <iostream> namespace optimization { //-------------------------------------------------------------------------------------------------- -template <int32_t N> -class GradientDescent : public GradientDescentLog<N> { +template <int32_t N, typename Log = GradientDescentLogNothing> +class GradientDescent : public Log { public: GradientDescent() {} GradientDescent(Scalar learning_rate, uint32_t me, Scalar gt) @@ -29,8 +29,8 @@ public: VectorNs<N> gradient; gradient.resize(point.size()); objective.eval(point, value, gradient); - GradientDescentLog<N>::initialize(objective); - GradientDescentLog<N>::push_back(point, value, gradient); + Log::initialize(objective); + Log::push_back(point, value, gradient); for (n_evaluations_ = 1; n_evaluations_ < max_evaluations_; ++n_evaluations_) { if (gradient.norm() <= gradient_threshold_) { @@ -41,7 +41,7 @@ public: point -= learning_rate_ * gradient; objective.eval(point, value, gradient); - GradientDescentLog<N>::push_back(point, value, gradient); + Log::push_back(point, value, gradient); } return point; diff --git a/optimization/optimizers/gradient_descent/gradient_descent_log.h b/optimization/optimizers/gradient_descent/logs/everything.h similarity index 75% rename from optimization/optimizers/gradient_descent/gradient_descent_log.h rename to optimization/optimizers/gradient_descent/logs/everything.h index 7011c079850deada42c5a02bd5e31d3cac7af33a..c9be4ec064dcfd3c3d33e8ac882996090ae5db07 100644 --- a/optimization/optimizers/gradient_descent/gradient_descent_log.h +++ b/optimization/optimizers/gradient_descent/logs/everything.h @@ -1,5 +1,5 @@ -#ifndef OPTIMIZATION_GRADIENT_DESCENT_LOG_H -#define OPTIMIZATION_GRADIENT_DESCENT_LOG_H +#ifndef OPTIMIZATION_GRADIENT_DESCENT_LOGS_EVERYTHING_H +#define OPTIMIZATION_GRADIENT_DESCENT_LOGS_EVERYTHING_H #include "utils/vector.h" #include "utils/vis_only.h" @@ -17,8 +17,7 @@ namespace optimization { // This is used as a base class rather than a member so that the empty base class optimization can // be applied (the member would take up space even if it is an empty class). template <int32_t N> -struct GradientDescentLog { - void reserve(uint32_t n) VIS_ONLY_METHOD; +struct GradientDescentLogEverything { template <typename Objective> void initialize(Objective const&) VIS_ONLY_METHOD; void push_back( @@ -36,22 +35,16 @@ struct GradientDescentLog { #ifdef VISUALIZE -//.................................................................................................. -template <int32_t N> -void GradientDescentLog<N>::reserve(uint32_t n) { - samples.reserve(n); -} - //.................................................................................................. template <int32_t N> template <typename Objective> -void GradientDescentLog<N>::initialize(Objective const&) { +void GradientDescentLogEverything<N>::initialize(Objective const&) { objective_name = Objective::name; } //.................................................................................................. template <int32_t N> -void GradientDescentLog<N>::push_back( +void GradientDescentLogEverything<N>::push_back( VectorNs<N> const& point, Scalar value, VectorNs<N> const& gradient @@ -61,7 +54,7 @@ void GradientDescentLog<N>::push_back( //-------------------------------------------------------------------------------------------------- template <int32_t N> -void to_json(nlohmann::json& j, GradientDescentLog<N> const& log) { +void to_json(nlohmann::json& j, GradientDescentLogEverything<N> const& log) { j = nlohmann::json{ {"algorithm", "gradient descent"}, {"objective", log.objective_name}, diff --git a/optimization/optimizers/gradient_descent/gradient_descent_vis.h b/optimization/optimizers/gradient_descent/logs/everything_vis.h similarity index 81% rename from optimization/optimizers/gradient_descent/gradient_descent_vis.h rename to optimization/optimizers/gradient_descent/logs/everything_vis.h index 966d63a3e53076786779446042e785b80fa9a325..0bf1e466d3bf7a486c4bccc8ea1122287c5a6fc0 100644 --- a/optimization/optimizers/gradient_descent/gradient_descent_vis.h +++ b/optimization/optimizers/gradient_descent/logs/everything_vis.h @@ -2,14 +2,16 @@ #define OPTIMIZATION_GRADIENT_DESCENT_VIS_H #include <iostream> -#include "gradient_descent_log.h" +#include "everything.h" namespace optimization { //-------------------------------------------------------------------------------------------------- +// Provides an way to serialize a subset of an everything log that is compatible with the +// visualization script. template <int32_t N> struct GradientDescentVis { - GradientDescentLog<N> const& log; + GradientDescentLogEverything<N> const& log; }; //.................................................................................................. diff --git a/optimization/optimizers/gradient_descent/logs/nothing.h b/optimization/optimizers/gradient_descent/logs/nothing.h new file mode 100644 index 0000000000000000000000000000000000000000..3178cf774ed49c5f51f53f1d9542debdd8f19c50 --- /dev/null +++ b/optimization/optimizers/gradient_descent/logs/nothing.h @@ -0,0 +1,19 @@ +#ifndef OPTIMIZATION_GRADIENT_DESCENT_LOGS_NOTHING_H +#define OPTIMIZATION_GRADIENT_DESCENT_LOGS_NOTHING_H + +#include "utils/vector.h" + +namespace optimization { + +//-------------------------------------------------------------------------------------------------- +struct GradientDescentLogNothing { + template <typename Objective> + void initialize(Objective const&) {} + + template <int32_t N> + void push_back(VectorNs<N> const& point, Scalar value, VectorNs<N> const& gradient) {} +}; + +} + +#endif diff --git a/optimization/optimizers/gradient_descent/main.cpp b/optimization/optimizers/gradient_descent/main.cpp index dceb38c2e631eb0a58d684fb74d2a6b0d293a902..1032b09750bf49bb4ab23f3ca8fd739ad8818ca9 100644 --- a/optimization/optimizers/gradient_descent/main.cpp +++ b/optimization/optimizers/gradient_descent/main.cpp @@ -2,10 +2,11 @@ #include "gradient_descent.h" #include "objectives/paraboloid.h" #include "objectives/rosenbrock.h" +#include "logs/everything.h" #include <iostream> #ifdef VISUALIZE -#include "gradient_descent_vis.h" +#include "logs/everything_vis.h" #include "utils/eigen_json.h" #include <fstream> @@ -54,7 +55,8 @@ int main(int const argc, char const** argv) { Objective objective; objective.dim() = dim; - GradientDescent<-1> optimizer(learning_rate, max_evaluations, gradient_threshold); + using Log = GradientDescentLogEverything<-1>; + GradientDescent<-1, Log> optimizer(learning_rate, max_evaluations, gradient_threshold); VectorXs minimum = optimizer.optimize(objective, initial_point); std::cout << "n evaluations: " << optimizer.n_evaluations() << '\n'; std::cout << "final point: " << minimum << '\n';