|
| 1 | +#include "LogisticRegression.h" |
| 2 | + |
| 3 | +#include <eigen3/Eigen/Dense> |
| 4 | +#include <iostream> |
| 5 | +#include <map> |
| 6 | +#include <list> |
| 7 | + |
| 8 | +Eigen::MatrixXd LogisticRegression::Sigmoid(Eigen::MatrixXd Z) { |
| 9 | +return 1/(1+(-Z.array()).exp()); |
| 10 | +} |
| 11 | + |
| 12 | +std::tuple<Eigen::MatrixXd, double, double> LogisticRegression::Propagate(Eigen::MatrixXd W, double b, Eigen::MatrixXd X, Eigen::MatrixXd y, double lambda) { |
| 13 | + |
| 14 | +int m = y.rows(); |
| 15 | + |
| 16 | +Eigen::MatrixXd Z = (W.transpose() * X.transpose()).array() + b; |
| 17 | +Eigen::MatrixXd A = Sigmoid(Z); |
| 18 | + |
| 19 | +auto cross_entropy = -(y.transpose()*(Eigen::VectorXd)A.array().log().transpose() + ((Eigen::VectorXd)(1-y.array())).transpose() * (Eigen::VectorXd)(1-A.array()).log().transpose())/m; |
| 20 | + |
| 21 | +double l2_reg_cost = W.array().pow(2).sum() * (lambda/(2*m)); |
| 22 | + |
| 23 | +double cost = static_cast<const double>((cross_entropy.array()[0])) + l2_reg_cost; |
| 24 | + |
| 25 | +Eigen::MatrixXd dw = (Eigen::MatrixXd)(((Eigen::MatrixXd)(clearA-y.transpose()) * X)/m) + ((Eigen::MatrixXd)(lambda/m*W)).transpose(); |
| 26 | + |
| 27 | +double db = (A-y.transpose()).array().sum()/m; |
| 28 | + |
| 29 | +return std::make_tuple(dw,db,cost); |
| 30 | +} |
| 31 | + |
| 32 | +std::tuple<Eigen::MatrixXd, double, Eigen::MatrixXd, double, std::list<double>> LogisticRegression::Optimize(Eigen::MatrixXd W, double b, Eigen::MatrixXd X, Eigen::MatrixXd y, int num_iter, double learning_rate, double lambda, bool log_cost) { |
| 33 | + |
| 34 | +std::list<double> costsList; |
| 35 | + |
| 36 | +Eigen::MatrixXd dw; |
| 37 | +double db, cost; |
| 38 | + |
| 39 | +for(int i=0; i<num_iter; i++){ |
| 40 | +std::tuple<Eigen::MatrixXd, double, double> propagate = Propagate(W, b, X, y, lambda); |
| 41 | +std::tie(dw, db, cost) = propagate; |
| 42 | + |
| 43 | +W = W - (learning_rate*dw).transpose(); |
| 44 | +b = b - (learning_rate*db); |
| 45 | + |
| 46 | +if(i%100==0) { |
| 47 | +costsList.push_back(cost); |
| 48 | +} |
| 49 | + |
| 50 | +if(log_cost && i%100==0) { |
| 51 | +std::cout << "Cost after iteration " << i << ": " << cost << std::endl; |
| 52 | +} |
| 53 | +} |
| 54 | + |
| 55 | +return std::make_tuple(W,b,dw,db,costsList); |
| 56 | +} |
| 57 | + |
| 58 | +Eigen::MatrixXd LogisticRegression::Predict(Eigen::MatrixXd W, double b, Eigen::MatrixXd X) { |
| 59 | + |
| 60 | +int m = X.rows(); |
| 61 | + |
| 62 | +Eigen::MatrixXd y_pred = Eigen::VectorXd::Zero(m).transpose(); |
| 63 | + |
| 64 | +Eigen::MatrixXd Z = (W.transpose() * X.transpose()).array() + b; |
| 65 | +Eigen::MatrixXd A = Sigmoid(Z); |
| 66 | + |
| 67 | +for(int i=0; i<A.cols(); i++) { |
| 68 | +if(A(0,i) <= 0.5) { |
| 69 | +y_pred(0,i) = 0; |
| 70 | +} else { |
| 71 | +y_pred(0,i) = 1; |
| 72 | +} |
| 73 | +} |
| 74 | + |
| 75 | +return y_pred.transpose(); |
| 76 | +} |
0 commit comments