From 3fa17637d086bf4b166066010bbb5b0e76b01c83 Mon Sep 17 00:00:00 2001
From: Erik Strand <erik.strand@cba.mit.edu>
Date: Thu, 16 Apr 2020 18:04:50 -0400
Subject: [PATCH] Use appropriate log in gradient descent app

---
 apps/CMakeLists.txt                           |  2 +-
 optimization/CMakeLists.txt                   |  2 +-
 .../gradient_descent/CMakeLists.txt           |  6 +--
 .../gradient_descent/logs/everything.h        | 40 ++++------------
 .../gradient_descent/logs/nothing.h           |  2 +-
 .../optimizers/gradient_descent/main.cpp      | 48 ++++++++++---------
 6 files changed, 39 insertions(+), 61 deletions(-)

diff --git a/apps/CMakeLists.txt b/apps/CMakeLists.txt
index 3aa653e..3600b31 100644
--- a/apps/CMakeLists.txt
+++ b/apps/CMakeLists.txt
@@ -1,4 +1,4 @@
 add_executable(compare_convergence
     compare_convergence.cpp
 )
-target_link_libraries(compare_convergence optimization_lib cma-es)
+target_link_libraries(compare_convergence optimization_lib)
diff --git a/optimization/CMakeLists.txt b/optimization/CMakeLists.txt
index 4f3b6e2..a35d766 100644
--- a/optimization/CMakeLists.txt
+++ b/optimization/CMakeLists.txt
@@ -4,7 +4,7 @@ target_link_libraries(optimization_lib INTERFACE shared_settings Eigen3::Eigen)
 
 if (VISUALIZE)
     target_compile_definitions(optimization_lib INTERFACE VISUALIZE)
-    target_link_libraries(optimization_lib INTERFACE json)
 endif()
+target_link_libraries(optimization_lib INTERFACE cma-es json)
 
 add_subdirectory(optimizers)
diff --git a/optimization/optimizers/gradient_descent/CMakeLists.txt b/optimization/optimizers/gradient_descent/CMakeLists.txt
index bb3b52e..8684fbf 100644
--- a/optimization/optimizers/gradient_descent/CMakeLists.txt
+++ b/optimization/optimizers/gradient_descent/CMakeLists.txt
@@ -3,7 +3,5 @@ add_executable(gradient_descent
 )
 target_link_libraries(gradient_descent optimization_lib clara)
 
-if (VISUALIZE)
-    make_plot_target(gradient_descent 2d ARGS -d 2 -l 0.0015 -n 10000)
-    make_plot_target(gradient_descent 10d ARGS -d 10 -l 0.0005 -n 10000)
-endif()
+make_plot_target(gradient_descent 2d ARGS -d 2 -l 0.0015 -n 10000)
+make_plot_target(gradient_descent 10d ARGS -d 10 -l 0.0005 -n 10000)
diff --git a/optimization/optimizers/gradient_descent/logs/everything.h b/optimization/optimizers/gradient_descent/logs/everything.h
index c9be4ec..8eaf557 100644
--- a/optimization/optimizers/gradient_descent/logs/everything.h
+++ b/optimization/optimizers/gradient_descent/logs/everything.h
@@ -1,15 +1,12 @@
 #ifndef OPTIMIZATION_GRADIENT_DESCENT_LOGS_EVERYTHING_H
 #define OPTIMIZATION_GRADIENT_DESCENT_LOGS_EVERYTHING_H
 
+#include "objectives/samples.h"
+#include "objectives/samples_vis.h"
+#include "utils/eigen_json.h"
 #include "utils/vector.h"
 #include "utils/vis_only.h"
-#include "objectives/samples.h"
-
-#ifdef VISUALIZE
 #include <vector>
-#include "json.hpp"
-#include "objectives/samples_vis.h"
-#endif
 
 namespace optimization {
 
@@ -19,39 +16,20 @@ namespace optimization {
 template <int32_t N>
 struct GradientDescentLogEverything {
     template <typename Objective>
-    void initialize(Objective const&) VIS_ONLY_METHOD;
+    void initialize(Objective const&) { objective_name = Objective::name; }
     void push_back(
         VectorNs<N> const& point,
         Scalar value,
         VectorNs<N> const& gradient
-    ) VIS_ONLY_METHOD;
-    void clear() VIS_ONLY_METHOD;
+    ) {
+        samples.emplace_back(point, value, gradient);
+    }
+    void clear();
 
-    #ifdef VISUALIZE
     std::string objective_name;
     std::vector<GradientSample<VectorNs<N>>> samples;
-    #endif
 };
 
-#ifdef VISUALIZE
-
-//..................................................................................................
-template <int32_t N>
-template <typename Objective>
-void GradientDescentLogEverything<N>::initialize(Objective const&) {
-    objective_name = Objective::name;
-}
-
-//..................................................................................................
-template <int32_t N>
-void GradientDescentLogEverything<N>::push_back(
-    VectorNs<N> const& point,
-    Scalar value,
-    VectorNs<N> const& gradient
-) {
-    samples.emplace_back(point, value, gradient);
-}
-
 //--------------------------------------------------------------------------------------------------
 template <int32_t N>
 void to_json(nlohmann::json& j, GradientDescentLogEverything<N> const& log) {
@@ -62,8 +40,6 @@ void to_json(nlohmann::json& j, GradientDescentLogEverything<N> const& log) {
     };
 }
 
-#endif
-
 }
 
 #endif
diff --git a/optimization/optimizers/gradient_descent/logs/nothing.h b/optimization/optimizers/gradient_descent/logs/nothing.h
index 3178cf7..1260357 100644
--- a/optimization/optimizers/gradient_descent/logs/nothing.h
+++ b/optimization/optimizers/gradient_descent/logs/nothing.h
@@ -11,7 +11,7 @@ struct GradientDescentLogNothing {
     void initialize(Objective const&) {}
 
     template <int32_t N>
-    void push_back(VectorNs<N> const& point, Scalar value, VectorNs<N> const& gradient) {}
+    void push_back(VectorNs<N> const&, Scalar, VectorNs<N> const&) {}
 };
 
 }
diff --git a/optimization/optimizers/gradient_descent/main.cpp b/optimization/optimizers/gradient_descent/main.cpp
index 1032b09..b71f588 100644
--- a/optimization/optimizers/gradient_descent/main.cpp
+++ b/optimization/optimizers/gradient_descent/main.cpp
@@ -1,17 +1,14 @@
 #include "clara.hpp"
 #include "gradient_descent.h"
-#include "objectives/paraboloid.h"
-#include "objectives/rosenbrock.h"
 #include "logs/everything.h"
-#include <iostream>
-
-#ifdef VISUALIZE
 #include "logs/everything_vis.h"
+#include "objectives/paraboloid.h"
+#include "objectives/rosenbrock.h"
 #include "utils/eigen_json.h"
 #include <fstream>
+#include <iostream>
 
 using json = nlohmann::json;
-#endif
 
 using namespace optimization;
 
@@ -55,25 +52,32 @@ int main(int const argc, char const** argv) {
     Objective objective;
     objective.dim() = dim;
 
-    using Log = GradientDescentLogEverything<-1>;
-    GradientDescent<-1, Log> optimizer(learning_rate, max_evaluations, gradient_threshold);
-    VectorXs minimum = optimizer.optimize(objective, initial_point);
-    std::cout << "n evaluations: " << optimizer.n_evaluations() << '\n';
-    std::cout << "final point: " << minimum << '\n';
+    if (log_file_path.empty() && vis_file_path.empty()) {
+        // If we're not saving data, use a lean optimizer.
+        // TODO: Find a way to deduplicate code between these branches.
+        GradientDescent<-1> optimizer(learning_rate, max_evaluations, gradient_threshold);
+        VectorXs minimum = optimizer.optimize(objective, initial_point);
+        std::cout << "n evaluations: " << optimizer.n_evaluations() << '\n';
+        std::cout << "final point: " << minimum << '\n';
+    } else {
+        using Log = GradientDescentLogEverything<-1>;
+        GradientDescent<-1, Log> optimizer(learning_rate, max_evaluations, gradient_threshold);
+        VectorXs minimum = optimizer.optimize(objective, initial_point);
+        std::cout << "n evaluations: " << optimizer.n_evaluations() << '\n';
+        std::cout << "final point: " << minimum << '\n';
 
-    #ifdef VISUALIZE
-    if (!log_file_path.empty()) {
-        json data = optimizer;
-        std::ofstream log_file(log_file_path);
-        log_file << data.dump(4) << '\n';
-    }
+        if (!log_file_path.empty()) {
+            json data = optimizer;
+            std::ofstream log_file(log_file_path);
+            log_file << data.dump(4) << '\n';
+        }
 
-    if (!vis_file_path.empty()) {
-        json data = GradientDescentVis<-1>{optimizer};
-        std::ofstream vis_file(vis_file_path);
-        vis_file << data.dump(4) << '\n';
+        if (!vis_file_path.empty()) {
+            json data = GradientDescentVis<-1>{optimizer};
+            std::ofstream vis_file(vis_file_path);
+            vis_file << data.dump(4) << '\n';
+        }
     }
-    #endif
 
     return 0;
 }
-- 
GitLab