From 0ca1aa19d42f7f775e3e1e5b8ea69ee7df745325 Mon Sep 17 00:00:00 2001 From: Erik Strand <erik.strand@cba.mit.edu> Date: Thu, 16 Apr 2020 17:45:52 -0400 Subject: [PATCH] Template gradient descent on log class As opposed to inheriting from one that's decided by compile time definitions. This will make it possible to compile instrumented and noninstrumented versions side by side. --- optimization/objectives/samples_vis.h | 4 +--- .../gradient_descent/gradient_descent.h | 12 ++++++------ .../everything.h} | 19 ++++++------------- .../everything_vis.h} | 6 ++++-- .../gradient_descent/logs/nothing.h | 19 +++++++++++++++++++ .../optimizers/gradient_descent/main.cpp | 6 ++++-- 6 files changed, 40 insertions(+), 26 deletions(-) rename optimization/optimizers/gradient_descent/{gradient_descent_log.h => logs/everything.h} (75%) rename optimization/optimizers/gradient_descent/{gradient_descent_vis.h => logs/everything_vis.h} (81%) create mode 100644 optimization/optimizers/gradient_descent/logs/nothing.h diff --git a/optimization/objectives/samples_vis.h b/optimization/objectives/samples_vis.h index 2753760..426a5df 100644 --- a/optimization/objectives/samples_vis.h +++ b/optimization/objectives/samples_vis.h @@ -1,9 +1,8 @@ #ifndef OPTIMIZATION_OBJECTIVES_SAMPLES_VIS_H #define OPTIMIZATION_OBJECTIVES_SAMPLES_VIS_H -#include "utils/vis_only.h" -#ifdef VISUALIZE #include "samples.h" +#include "utils/eigen_json.h" namespace optimization { @@ -20,4 +19,3 @@ void to_json(nlohmann::json& j, GradientSample<Vector> const& sample) { } #endif -#endif diff --git a/optimization/optimizers/gradient_descent/gradient_descent.h b/optimization/optimizers/gradient_descent/gradient_descent.h index 1061e18..46dd99e 100644 --- a/optimization/optimizers/gradient_descent/gradient_descent.h +++ b/optimization/optimizers/gradient_descent/gradient_descent.h @@ -1,14 +1,14 @@ #ifndef OPTIMIZATION_GRADIENT_DESCENT_H #define OPTIMIZATION_GRADIENT_DESCENT_H -#include "gradient_descent_log.h" +#include "logs/nothing.h" #include <iostream> namespace optimization { //-------------------------------------------------------------------------------------------------- -template <int32_t N> -class GradientDescent : public GradientDescentLog<N> { +template <int32_t N, typename Log = GradientDescentLogNothing> +class GradientDescent : public Log { public: GradientDescent() {} GradientDescent(Scalar learning_rate, uint32_t me, Scalar gt) @@ -29,8 +29,8 @@ public: VectorNs<N> gradient; gradient.resize(point.size()); objective.eval(point, value, gradient); - GradientDescentLog<N>::initialize(objective); - GradientDescentLog<N>::push_back(point, value, gradient); + Log::initialize(objective); + Log::push_back(point, value, gradient); for (n_evaluations_ = 1; n_evaluations_ < max_evaluations_; ++n_evaluations_) { if (gradient.norm() <= gradient_threshold_) { @@ -41,7 +41,7 @@ public: point -= learning_rate_ * gradient; objective.eval(point, value, gradient); - GradientDescentLog<N>::push_back(point, value, gradient); + Log::push_back(point, value, gradient); } return point; diff --git a/optimization/optimizers/gradient_descent/gradient_descent_log.h b/optimization/optimizers/gradient_descent/logs/everything.h similarity index 75% rename from optimization/optimizers/gradient_descent/gradient_descent_log.h rename to optimization/optimizers/gradient_descent/logs/everything.h index 7011c07..c9be4ec 100644 --- a/optimization/optimizers/gradient_descent/gradient_descent_log.h +++ b/optimization/optimizers/gradient_descent/logs/everything.h @@ -1,5 +1,5 @@ -#ifndef OPTIMIZATION_GRADIENT_DESCENT_LOG_H -#define OPTIMIZATION_GRADIENT_DESCENT_LOG_H +#ifndef OPTIMIZATION_GRADIENT_DESCENT_LOGS_EVERYTHING_H +#define OPTIMIZATION_GRADIENT_DESCENT_LOGS_EVERYTHING_H #include "utils/vector.h" #include "utils/vis_only.h" @@ -17,8 +17,7 @@ namespace optimization { // This is used as a base class rather than a member so that the empty base class optimization can // be applied (the member would take up space even if it is an empty class). template <int32_t N> -struct GradientDescentLog { - void reserve(uint32_t n) VIS_ONLY_METHOD; +struct GradientDescentLogEverything { template <typename Objective> void initialize(Objective const&) VIS_ONLY_METHOD; void push_back( @@ -36,22 +35,16 @@ struct GradientDescentLog { #ifdef VISUALIZE -//.................................................................................................. -template <int32_t N> -void GradientDescentLog<N>::reserve(uint32_t n) { - samples.reserve(n); -} - //.................................................................................................. template <int32_t N> template <typename Objective> -void GradientDescentLog<N>::initialize(Objective const&) { +void GradientDescentLogEverything<N>::initialize(Objective const&) { objective_name = Objective::name; } //.................................................................................................. template <int32_t N> -void GradientDescentLog<N>::push_back( +void GradientDescentLogEverything<N>::push_back( VectorNs<N> const& point, Scalar value, VectorNs<N> const& gradient @@ -61,7 +54,7 @@ void GradientDescentLog<N>::push_back( //-------------------------------------------------------------------------------------------------- template <int32_t N> -void to_json(nlohmann::json& j, GradientDescentLog<N> const& log) { +void to_json(nlohmann::json& j, GradientDescentLogEverything<N> const& log) { j = nlohmann::json{ {"algorithm", "gradient descent"}, {"objective", log.objective_name}, diff --git a/optimization/optimizers/gradient_descent/gradient_descent_vis.h b/optimization/optimizers/gradient_descent/logs/everything_vis.h similarity index 81% rename from optimization/optimizers/gradient_descent/gradient_descent_vis.h rename to optimization/optimizers/gradient_descent/logs/everything_vis.h index 966d63a..0bf1e46 100644 --- a/optimization/optimizers/gradient_descent/gradient_descent_vis.h +++ b/optimization/optimizers/gradient_descent/logs/everything_vis.h @@ -2,14 +2,16 @@ #define OPTIMIZATION_GRADIENT_DESCENT_VIS_H #include <iostream> -#include "gradient_descent_log.h" +#include "everything.h" namespace optimization { //-------------------------------------------------------------------------------------------------- +// Provides an way to serialize a subset of an everything log that is compatible with the +// visualization script. template <int32_t N> struct GradientDescentVis { - GradientDescentLog<N> const& log; + GradientDescentLogEverything<N> const& log; }; //.................................................................................................. diff --git a/optimization/optimizers/gradient_descent/logs/nothing.h b/optimization/optimizers/gradient_descent/logs/nothing.h new file mode 100644 index 0000000..3178cf7 --- /dev/null +++ b/optimization/optimizers/gradient_descent/logs/nothing.h @@ -0,0 +1,19 @@ +#ifndef OPTIMIZATION_GRADIENT_DESCENT_LOGS_NOTHING_H +#define OPTIMIZATION_GRADIENT_DESCENT_LOGS_NOTHING_H + +#include "utils/vector.h" + +namespace optimization { + +//-------------------------------------------------------------------------------------------------- +struct GradientDescentLogNothing { + template <typename Objective> + void initialize(Objective const&) {} + + template <int32_t N> + void push_back(VectorNs<N> const& point, Scalar value, VectorNs<N> const& gradient) {} +}; + +} + +#endif diff --git a/optimization/optimizers/gradient_descent/main.cpp b/optimization/optimizers/gradient_descent/main.cpp index dceb38c..1032b09 100644 --- a/optimization/optimizers/gradient_descent/main.cpp +++ b/optimization/optimizers/gradient_descent/main.cpp @@ -2,10 +2,11 @@ #include "gradient_descent.h" #include "objectives/paraboloid.h" #include "objectives/rosenbrock.h" +#include "logs/everything.h" #include <iostream> #ifdef VISUALIZE -#include "gradient_descent_vis.h" +#include "logs/everything_vis.h" #include "utils/eigen_json.h" #include <fstream> @@ -54,7 +55,8 @@ int main(int const argc, char const** argv) { Objective objective; objective.dim() = dim; - GradientDescent<-1> optimizer(learning_rate, max_evaluations, gradient_threshold); + using Log = GradientDescentLogEverything<-1>; + GradientDescent<-1, Log> optimizer(learning_rate, max_evaluations, gradient_threshold); VectorXs minimum = optimizer.optimize(objective, initial_point); std::cout << "n evaluations: " << optimizer.n_evaluations() << '\n'; std::cout << "final point: " << minimum << '\n'; -- GitLab