Neural Network Regression From Scratch Using C#

It had been a long time since I looked at regression (predicting a single numeric value) using a neural network implemented from scratch using C#. So, for mental exercise, I zapped an example out.

I used one of my standard synthetic datasets where the goal is to predict a person’s income from their sex, age, State, and political leaning. The data looks like:

1, 0.24, 1, 0, 0, 0.2950, 0, 0, 1
0, 0.39, 0, 0, 1, 0.5120, 0, 1, 0
1, 0.63, 0, 1, 0, 0.7580, 1, 0, 0
0, 0.36, 1, 0, 0, 0.4450, 0, 1, 0
. . .

The fields are sex (male = 0, female = 1), age (divided by 100), State (Michigan = 100, Nebraska = 010, Oklahoma = 001), income (divided by 100,000), and political leaning (conservative = 100, moderate = 010, liberal = 001). There are 200 training items and 40 test items.

For simplicity, I used a single hidden layer with tanh() activation, and identity() activation on the single output node. For training I used the “online” approach where the the network weights and biases are updated after every training item as opposed to full-batch or mini-batch training.

For me, the exercise was a lot of fun, but only because I have spent many hundreds of hours working with neural networks over many years. The back-propagation from scratch is somewhat intimidating but once you know how it works, it’s not impossible to grasp.

Note: After I wrote this post, I realized that I should have implemented a batch training method for completeness. I’ll do that when I get some free time. . .



Modern AI systems are based on software Transformer Modules, which in turn are based on neural networks. Here are two artificial images that I generated using the DALL-E tool. Left: “Farm worker harvesting typewriters”. Right: “Giant robot growing humans”.


Demo code. Replace “lt” (less than), “gt”, “lte”, “gte” with Boolean operator symbols.

using System;
using System.IO;

namespace NeuralNetworkRegression
{
  internal class NeuralRegressionProgram
  {
    static void Main(string[] args)
    {
      Console.WriteLine("\nNeural network regression C# ");

      Console.WriteLine("Predict income from sex," +
        " age, State, political leaning ");

      // ------------------------------------------------------

      string trainFile =
        "..\\..\\..\\Data\\people_train.txt";
      // sex, age, State, income, politics
      double[][] trainX = Utils.MatLoad(trainFile,
        new int[] { 0, 1, 2,3,4, 6,7,8 }, ',', "#");
      double[] trainY =
        Utils.MatToVec(Utils.MatLoad(trainFile,
        new int[] { 5 }, ',', "#"));

      string testFile =
        "..\\..\\..\\Data\\people_test.txt";
      double[][] testX = Utils.MatLoad(testFile,
        new int[] { 0, 1, 2,3,4, 6,7,8 }, ',', "#");
      double[] testY =
        Utils.MatToVec(Utils.MatLoad(testFile,
        new int[] { 5 }, ',', "#"));

      Console.WriteLine("\nFirst three X data: ");
      for (int i = 0; i "lt" 3; ++i)
        Utils.VecShow(trainX[i], 4, 9, true);

      Console.WriteLine("\nFirst three target Y : ");
      for (int i = 0; i "lt" 3; ++i)
        Console.WriteLine(trainY[i].ToString("F5"));

      // ------------------------------------------------------

      Console.WriteLine("\nCreating 8-100-1 tanh Identity " +
        "neural network ");
      NeuralNetwork nn = 
        new NeuralNetwork(8, 100, 1, seed: 0);

      int maxEpochs = 400;
      double lrnRate = 0.005;
      Console.WriteLine("\nmaxEpochs = " + 
        maxEpochs);
      Console.WriteLine("lrnRate = " + 
        lrnRate.ToString("F3"));

      // ------------------------------------------------------

      Console.WriteLine("\nStarting training ");
      nn.Train(trainX, trainY, lrnRate, maxEpochs);
      Console.WriteLine("Done ");

      double trainAcc = nn.Accuracy(trainX, trainY, 0.10);
      Console.WriteLine("\nAccuracy on train data = " +
        trainAcc.ToString("F4"));

      double testAcc = nn.Accuracy(testX, testY, 0.10);
      Console.WriteLine("Accuracy on test data  = " +
        testAcc.ToString("F4"));

      Console.WriteLine("\nPredicting income for male" +
        " 34 Oklahoma moderate ");
      double[] X = new double[] { 0, 0.34, 0,0,1,  0,1,0 };
      double y = nn.ComputeOutput(X);
      Console.WriteLine("Predicted income = " +
        y.ToString("F5"));

      Console.WriteLine("\nEnd demo ");
      Console.ReadLine();
    } // Main

  } // Program

  // ----------------------------------------------------------

  public class NeuralNetwork
  {
    private int ni; // number input nodes
    private int nh;
    private int no;

    private double[] inputs;
    private double[][] ihWeights; // input-hidden
    private double[] hBiases;
    private double[] hOutputs;

    private double[][] hoWeights; // hidden-output
    private double[] oBiases;
    private double[] output;  // single val as array

    private Random rnd;

    // --------------------------------------------------------

    public NeuralNetwork(int numIn, int numHid,
      int numOut, int seed)
    {
      this.ni = numIn;
      this.nh = numHid;
      this.no = numOut;  // 1 for regression

      this.inputs = new double[numIn];

      this.ihWeights = Utils.MatCreate(numIn, numHid);
      this.hBiases = new double[numHid];
      this.hOutputs = new double[numHid];

      this.hoWeights = Utils.MatCreate(numHid, numOut);
      this.oBiases = new double[numOut];  // [1]
      this.output = new double[numOut];  // [1]

      this.rnd = new Random(seed);
      this.InitWeights(); // all weights and biases
    } // ctor

    // --------------------------------------------------------

    private void InitWeights() // helper for ctor
    {
      // weights and biases to small random values
      double lo = -0.01; double hi = +0.01;
      int numWts = (this.ni * this.nh) +
        (this.nh * this.no) + this.nh + this.no;
      double[] initialWeights = new double[numWts];
      for (int i = 0; i "lt" initialWeights.Length; ++i)
        initialWeights[i] = 
          (hi - lo) * rnd.NextDouble() + lo;
      this.SetWeights(initialWeights);
    }

    // --------------------------------------------------------

    public void SetWeights(double[] wts)
    {
      // copy serialized weights and biases in wts[] 
      // to ih weights, ih biases, ho weights, ho biases
      int numWts = (this.ni * this.nh) +
        (this.nh * this.no) + this.nh + this.no;
      if (wts.Length != numWts)
        throw new Exception("Bad array in SetWeights");

      int k = 0; // points into wts param

      for (int i = 0; i "lt" this.ni; ++i)
        for (int j = 0; j "lt" this.nh; ++j)
          this.ihWeights[i][j] = wts[k++];
      for (int i = 0; i "lt" this.nh; ++i)
        this.hBiases[i] = wts[k++];
      for (int i = 0; i "lt" this.nh; ++i)
        for (int j = 0; j "lt" this.no; ++j)
          this.hoWeights[i][j] = wts[k++];
      for (int i = 0; i "lt" this.no; ++i)
        this.oBiases[i] = wts[k++];
    }

    // --------------------------------------------------------

    public double[] GetWeights()
    {
      int numWts = (this.ni * this.nh) +
        (this.nh * this.no) + this.nh + this.no;
      double[] result = new double[numWts];
      int k = 0;
      for (int i = 0; i "lt" ihWeights.Length; ++i)
        for (int j = 0; j "lt" this.ihWeights[0].Length; ++j)
          result[k++] = this.ihWeights[i][j];
      for (int i = 0; i "lt" this.hBiases.Length; ++i)
        result[k++] = this.hBiases[i];
      for (int i = 0; i "lt" this.hoWeights.Length; ++i)
        for (int j = 0; j "lt" this.hoWeights[0].Length; ++j)
          result[k++] = this.hoWeights[i][j];
      for (int i = 0; i "lt" this.oBiases.Length; ++i)
        result[k++] = this.oBiases[i];
      return result;
    }

    // --------------------------------------------------------

    public double ComputeOutput(double[] x)
    {
      double[] hSums = new double[this.nh]; // scratch 
      double[] oSums = new double[this.no]; // out sums

      for (int i = 0; i "lt" x.Length; ++i) 
        this.inputs[i] = x[i];
      // note: no need to copy x-values unless
      // you implement a ToString.
      // more efficient to simply use the x[] directly.

      // 1. ompute i-h sum of weights * inputs
      for (int j = 0; j "lt" this.nh; ++j)  
        for (int i = 0; i "lt" this.ni; ++i)
          hSums[j] += this.inputs[i] *
            this.ihWeights[i][j]; // note +=

      // 2. add biases to hidden sums
      for (int i = 0; i "lt" this.nh; ++i)  
        hSums[i] += this.hBiases[i];

      // 3. apply hidden activation
      for (int i = 0; i "lt" this.nh; ++i)   
        this.hOutputs[i] = HyperTan(hSums[i]);

      // 4. compute h-o sum of wts * hOutputs
      for (int j = 0; j "lt" this.no; ++j)   
        for (int i = 0; i "lt" this.nh; ++i)
          oSums[j] += this.hOutputs[i] *
            this.hoWeights[i][j];  // [1]

      // 5. add biases to output sums
      for (int i = 0; i "lt" this.no; ++i)  
        oSums[i] += this.oBiases[i];

      // 6. apply output activation
      for (int i = 0; i "lt" this.no; ++i)
        this.output[i] = Identity(oSums[i]);

      return this.output[0];  // single value
    }

    // --------------------------------------------------------

    private static double HyperTan(double x)
    {
      if (x "lt" -10.0) return -1.0; 
      else if (x "gt" 10.0) return 1.0;
      else return Math.Tanh(x);
    }

    // --------------------------------------------------------

    private static double Identity(double x)
    {
      return x;
    }

    // --------------------------------------------------------

    public void Train(double[][] trainX,
      double[] trainY, double lrnRate,
      int maxEpochs)
    {
      // "online" version. TODO: ""batch" version.
      double[][] hoGrads = 
        Utils.MatCreate(this.nh, this.no);
      double[] obGrads = new double[this.no];
      double[][] ihGrads = 
        Utils.MatCreate(this.ni, this.nh);
      double[] hbGrads = new double[this.nh];

      double[] oSignals = new double[this.no]; 
      double[] hSignals = new double[this.nh];

      int n = trainX.Length;
      int[] indices = new int[n];
      for (int i = 0; i "lt" n; ++i)
        indices[i] = i;
      int freq = maxEpochs / 10;

      for (int epoch = 0; epoch "lt" maxEpochs; ++epoch)
      {
        Shuffle(indices);
        for (int ii = 0; ii "lt" n; ++ii)
        {
          int idx = indices[ii];
          double[] x = trainX[idx];
          double y = trainY[idx];
          this.ComputeOutput(x);

          // 1. compute output node signals 
          for (int k = 0; k "lt" this.no; ++k)
          {
            double derivative = 1.0;  // identity
            // E=(t-o)^2 gives o-t term
            oSignals[k] = derivative * 
              (this.output[k] - y);   
          }

          // --------------------------------------------------

          // 2. hidden-to-output gradients
          for (int j = 0; j "lt" this.nh; ++j)
          {
            for (int k = 0; k "lt" this.no; ++k)
            {
              hoGrads[j][k] = oSignals[k] * 
                this.hOutputs[j];
            }
          }

          // 3. output node bias gradients
          for (int k = 0; k "lt" this.no; ++k)
          {
            obGrads[k] = oSignals[k] * 1.0;  // 1.0 dummy 
          }

          // --------------------------------------------------

          // 4. compute hidden node signals
          for (int j = 0; j "lt" this.nh; ++j)
          {
            double sum = 0.0;
            for (int k = 0; k "lt" this.no; ++k)
            {
              sum += oSignals[k] * this.hoWeights[j][k];
            }
            double derivative = 
              (1 - this.hOutputs[j]) *
              (1 + this.hOutputs[j]);  // tanh
            hSignals[j] = derivative * sum;
          }

          // --------------------------------------------------

          // 5. input-to-hidden gradients
          for (int i = 0; i "lt" this.ni; ++i)
          {
            for (int j = 0; j "lt" this.nh; ++j)
            {
              ihGrads[i][j] = hSignals[j] * 
                this.inputs[i];
            }
          }

          // --------------------------------------------------

          // 6. hidden node bias gradients
          for (int j = 0; j "lt" this.nh; ++j)
          {
            hbGrads[j] = hSignals[j] * 1.0;  // 1.0 dummy
          }

          // 7. update input-to-hidden weights
          for (int i = 0; i "lt" this.ni; ++i)
          {
            for (int j = 0; j "lt" this.nh; ++j)
            {
              double delta = -1.0 * lrnRate * 
                ihGrads[i][j];
              this.ihWeights[i][j] += delta;
            }
          }

          // --------------------------------------------------

          // 8. update hidden node biases
          for (int j = 0; j "lt" this.nh; ++j)
          {
            double delta = -1.0 * lrnRate * hbGrads[j];
            this.hBiases[j] += delta;
          }

          // 9. update hidden-to-output weights
          for (int j = 0; j "lt" this.nh; ++j)
          {
            for (int k = 0; k "lt" this.no; ++k)
            {
              double delta = -1.0 * lrnRate * 
                hoGrads[j][k];
              this.hoWeights[j][k] += delta;
            }
          }

          // --------------------------------------------------

          // 10. update output node biases
          for (int k = 0; k "lt" this.no; ++k)
          {
            double delta = -1.0 * lrnRate * obGrads[k];
            this.oBiases[k] += delta;
          }

        } // ii each train X

        if (epoch % freq == 0)  // status
        {
          double mse = this.Error(trainX, trainY);
          double acc = this.Accuracy(trainX, trainY, 0.10);

          string s1 = "epoch: " + 
            epoch.ToString().PadLeft(4);
          string s2 = "  MSE = " + mse.ToString("F4");
          string s3 = "  acc = " + acc.ToString("F4");
          Console.WriteLine(s1 + s2 + s3);
        }

      } // epoch

    } // Train

    // --------------------------------------------------------

    private static void ZeroOut(double[] vector)
    {
      for (int i = 0; i "lt" vector.Length; ++i)
        vector[i] = 0.0;
    }

    // --------------------------------------------------------

    private static void ZeroOut(double[][] matrix)
    {
      for (int i = 0; i "lt" matrix.Length; ++i)
        for (int j = 0; j "lt" matrix[i].Length; ++j)
          matrix[i][j] = 0.0;
    }

    // --------------------------------------------------------

    private void Shuffle(int[] sequence) 
    {
      for (int i = 0; i "lt" sequence.Length; ++i)
      {
        int r = this.rnd.Next(i, sequence.Length);
        int tmp = sequence[r];
        sequence[r] = sequence[i];
        sequence[i] = tmp;
        //sequence[i] = i; // for testing
      }
    } // Shuffle

    // --------------------------------------------------------

    public double Error(double[][] trainX, double[] trainY)
    {
      // MSE
      int n = trainX.Length;
      double sumSquaredError = 0.0;
      for (int i = 0; i "lt" n; ++i)
      {
        double predY = this.ComputeOutput(trainX[i]);
        double actualY = trainY[i];
        sumSquaredError += (predY - actualY) *
          (predY - actualY);
      }
      return sumSquaredError / n;
    } // Error

    // --------------------------------------------------------

    public double Accuracy(double[][] dataX,
      double[] dataY, double pctClose)
    {
      // if predicted within pctClose of true target
      int n = dataX.Length;
      int nCorrect = 0;
      int nWrong = 0;
      for (int i = 0; i "lt" n; ++i)
      {
        double predY = this.ComputeOutput(dataX[i]);
        double actualY = dataY[i];
        if (Math.Abs(predY - actualY) "lt" 
          Math.Abs(pctClose * actualY))
          ++nCorrect;
        else
          ++nWrong;
      }
      return (nCorrect * 1.0) / (nCorrect + nWrong);
    }

    // --------------------------------------------------------

  } // NeuralNetwork class

  // ----------------------------------------------------------

  public class Utils
  {
    public static double[][] VecToMat(double[] vec,
      int rows, int cols)
    {
      // vector to row vec/matrix
      double[][] result = MatCreate(rows, cols);
      int k = 0;
      for (int i = 0; i "lt" rows; ++i)
        for (int j = 0; j "lt" cols; ++j)
          result[i][j] = vec[k++];
      return result;
    }

    // --------------------------------------------------------

    public static double[][] MatCreate(int rows,
      int cols)
    {
      double[][] result = new double[rows][];
      for (int i = 0; i "lt" rows; ++i)
        result[i] = new double[cols];
      return result;
    }

    // --------------------------------------------------------

    static int NumNonCommentLines(string fn,
      string comment)
    {
      int ct = 0;
      string line = "";
      FileStream ifs = new FileStream(fn,
        FileMode.Open);
      StreamReader sr = new StreamReader(ifs);
      while ((line = sr.ReadLine()) != null)
        if (line.StartsWith(comment) == false)
          ++ct;
      sr.Close(); ifs.Close();
      return ct;
    }

    // --------------------------------------------------------

    public static double[][] MatLoad(string fn,
      int[] usecols, char sep, string comment)
    {
      // count number of non-comment lines
      int nRows = NumNonCommentLines(fn, comment);
      int nCols = usecols.Length;
      double[][] result = MatCreate(nRows, nCols);
      string line = "";
      string[] tokens = null;
      FileStream ifs = new FileStream(fn, FileMode.Open);
      StreamReader sr = new StreamReader(ifs);

      int i = 0;
      while ((line = sr.ReadLine()) != null)
      {
        if (line.StartsWith(comment) == true)
          continue;
        tokens = line.Split(sep);
        for (int j = 0; j "lt" nCols; ++j)
        {
          int k = usecols[j];  // into tokens
          result[i][j] = double.Parse(tokens[k]);
        }
        ++i;
      }
      sr.Close(); ifs.Close();
      return result;
    }

    // --------------------------------------------------------

    public static double[] MatToVec(double[][] m)
    {
      int rows = m.Length;
      int cols = m[0].Length;
      double[] result = new double[rows * cols];
      int k = 0;
      for (int i = 0; i "lt" rows; ++i)
        for (int j = 0; j "lt" cols; ++j)
          result[k++] = m[i][j];

      return result;
    }

    // --------------------------------------------------------

    public static void MatShow(double[][] m,
      int dec, int wid)
    {
      for (int i = 0; i "lt" m.Length; ++i)
      {
        for (int j = 0; j "lt" m[0].Length; ++j)
        {
          double v = m[i][j];
          if (Math.Abs(v) "lt" 1.0e-8) v = 0.0; // hack
          Console.Write(v.ToString("F" +
            dec).PadLeft(wid));
        }
        Console.WriteLine("");
      }
    }

    // --------------------------------------------------------

    public static void VecShow(int[] vec, int wid)
    {
      for (int i = 0; i "lt" vec.Length; ++i)
        Console.Write(vec[i].ToString().PadLeft(wid));
      Console.WriteLine("");
    }

    // --------------------------------------------------------

    public static void VecShow(double[] vec,
      int dec, int wid, bool newLine)
    {
      for (int i = 0; i "lt" vec.Length; ++i)
      {
        double x = vec[i];
        if (Math.Abs(x) "lt" 1.0e-8) x = 0.0;
        Console.Write(x.ToString("F" +
          dec).PadLeft(wid));
      }
      if (newLine == true)
        Console.WriteLine("");
    }

  } // Utils class

} // ns

Training data:

# people_train.txt
#
# sex (0 = male, 1 = female), age / 100,
# state (michigan = 100, nebraska = 010,
# oklahoma = 001),
# income / 100_000,
# politics (conservative = 100,
# moderate = 010, liberal = 001)
#
1, 0.24, 1, 0, 0, 0.2950, 0, 0, 1
0, 0.39, 0, 0, 1, 0.5120, 0, 1, 0
1, 0.63, 0, 1, 0, 0.7580, 1, 0, 0
0, 0.36, 1, 0, 0, 0.4450, 0, 1, 0
1, 0.27, 0, 1, 0, 0.2860, 0, 0, 1
1, 0.50, 0, 1, 0, 0.5650, 0, 1, 0
1, 0.50, 0, 0, 1, 0.5500, 0, 1, 0
0, 0.19, 0, 0, 1, 0.3270, 1, 0, 0
1, 0.22, 0, 1, 0, 0.2770, 0, 1, 0
0, 0.39, 0, 0, 1, 0.4710, 0, 0, 1
1, 0.34, 1, 0, 0, 0.3940, 0, 1, 0
0, 0.22, 1, 0, 0, 0.3350, 1, 0, 0
1, 0.35, 0, 0, 1, 0.3520, 0, 0, 1
0, 0.33, 0, 1, 0, 0.4640, 0, 1, 0
1, 0.45, 0, 1, 0, 0.5410, 0, 1, 0
1, 0.42, 0, 1, 0, 0.5070, 0, 1, 0
0, 0.33, 0, 1, 0, 0.4680, 0, 1, 0
1, 0.25, 0, 0, 1, 0.3000, 0, 1, 0
0, 0.31, 0, 1, 0, 0.4640, 1, 0, 0
1, 0.27, 1, 0, 0, 0.3250, 0, 0, 1
1, 0.48, 1, 0, 0, 0.5400, 0, 1, 0
0, 0.64, 0, 1, 0, 0.7130, 0, 0, 1
1, 0.61, 0, 1, 0, 0.7240, 1, 0, 0
1, 0.54, 0, 0, 1, 0.6100, 1, 0, 0
1, 0.29, 1, 0, 0, 0.3630, 1, 0, 0
1, 0.50, 0, 0, 1, 0.5500, 0, 1, 0
1, 0.55, 0, 0, 1, 0.6250, 1, 0, 0
1, 0.40, 1, 0, 0, 0.5240, 1, 0, 0
1, 0.22, 1, 0, 0, 0.2360, 0, 0, 1
1, 0.68, 0, 1, 0, 0.7840, 1, 0, 0
0, 0.60, 1, 0, 0, 0.7170, 0, 0, 1
0, 0.34, 0, 0, 1, 0.4650, 0, 1, 0
0, 0.25, 0, 0, 1, 0.3710, 1, 0, 0
0, 0.31, 0, 1, 0, 0.4890, 0, 1, 0
1, 0.43, 0, 0, 1, 0.4800, 0, 1, 0
1, 0.58, 0, 1, 0, 0.6540, 0, 0, 1
0, 0.55, 0, 1, 0, 0.6070, 0, 0, 1
0, 0.43, 0, 1, 0, 0.5110, 0, 1, 0
0, 0.43, 0, 0, 1, 0.5320, 0, 1, 0
0, 0.21, 1, 0, 0, 0.3720, 1, 0, 0
1, 0.55, 0, 0, 1, 0.6460, 1, 0, 0
1, 0.64, 0, 1, 0, 0.7480, 1, 0, 0
0, 0.41, 1, 0, 0, 0.5880, 0, 1, 0
1, 0.64, 0, 0, 1, 0.7270, 1, 0, 0
0, 0.56, 0, 0, 1, 0.6660, 0, 0, 1
1, 0.31, 0, 0, 1, 0.3600, 0, 1, 0
0, 0.65, 0, 0, 1, 0.7010, 0, 0, 1
1, 0.55, 0, 0, 1, 0.6430, 1, 0, 0
0, 0.25, 1, 0, 0, 0.4030, 1, 0, 0
1, 0.46, 0, 0, 1, 0.5100, 0, 1, 0
0, 0.36, 1, 0, 0, 0.5350, 1, 0, 0
1, 0.52, 0, 1, 0, 0.5810, 0, 1, 0
1, 0.61, 0, 0, 1, 0.6790, 1, 0, 0
1, 0.57, 0, 0, 1, 0.6570, 1, 0, 0
0, 0.46, 0, 1, 0, 0.5260, 0, 1, 0
0, 0.62, 1, 0, 0, 0.6680, 0, 0, 1
1, 0.55, 0, 0, 1, 0.6270, 1, 0, 0
0, 0.22, 0, 0, 1, 0.2770, 0, 1, 0
0, 0.50, 1, 0, 0, 0.6290, 1, 0, 0
0, 0.32, 0, 1, 0, 0.4180, 0, 1, 0
0, 0.21, 0, 0, 1, 0.3560, 1, 0, 0
1, 0.44, 0, 1, 0, 0.5200, 0, 1, 0
1, 0.46, 0, 1, 0, 0.5170, 0, 1, 0
1, 0.62, 0, 1, 0, 0.6970, 1, 0, 0
1, 0.57, 0, 1, 0, 0.6640, 1, 0, 0
0, 0.67, 0, 0, 1, 0.7580, 0, 0, 1
1, 0.29, 1, 0, 0, 0.3430, 0, 0, 1
1, 0.53, 1, 0, 0, 0.6010, 1, 0, 0
0, 0.44, 1, 0, 0, 0.5480, 0, 1, 0
1, 0.46, 0, 1, 0, 0.5230, 0, 1, 0
0, 0.20, 0, 1, 0, 0.3010, 0, 1, 0
0, 0.38, 1, 0, 0, 0.5350, 0, 1, 0
1, 0.50, 0, 1, 0, 0.5860, 0, 1, 0
1, 0.33, 0, 1, 0, 0.4250, 0, 1, 0
0, 0.33, 0, 1, 0, 0.3930, 0, 1, 0
1, 0.26, 0, 1, 0, 0.4040, 1, 0, 0
1, 0.58, 1, 0, 0, 0.7070, 1, 0, 0
1, 0.43, 0, 0, 1, 0.4800, 0, 1, 0
0, 0.46, 1, 0, 0, 0.6440, 1, 0, 0
1, 0.60, 1, 0, 0, 0.7170, 1, 0, 0
0, 0.42, 1, 0, 0, 0.4890, 0, 1, 0
0, 0.56, 0, 0, 1, 0.5640, 0, 0, 1
0, 0.62, 0, 1, 0, 0.6630, 0, 0, 1
0, 0.50, 1, 0, 0, 0.6480, 0, 1, 0
1, 0.47, 0, 0, 1, 0.5200, 0, 1, 0
0, 0.67, 0, 1, 0, 0.8040, 0, 0, 1
0, 0.40, 0, 0, 1, 0.5040, 0, 1, 0
1, 0.42, 0, 1, 0, 0.4840, 0, 1, 0
1, 0.64, 1, 0, 0, 0.7200, 1, 0, 0
0, 0.47, 1, 0, 0, 0.5870, 0, 0, 1
1, 0.45, 0, 1, 0, 0.5280, 0, 1, 0
0, 0.25, 0, 0, 1, 0.4090, 1, 0, 0
1, 0.38, 1, 0, 0, 0.4840, 1, 0, 0
1, 0.55, 0, 0, 1, 0.6000, 0, 1, 0
0, 0.44, 1, 0, 0, 0.6060, 0, 1, 0
1, 0.33, 1, 0, 0, 0.4100, 0, 1, 0
1, 0.34, 0, 0, 1, 0.3900, 0, 1, 0
1, 0.27, 0, 1, 0, 0.3370, 0, 0, 1
1, 0.32, 0, 1, 0, 0.4070, 0, 1, 0
1, 0.42, 0, 0, 1, 0.4700, 0, 1, 0
0, 0.24, 0, 0, 1, 0.4030, 1, 0, 0
1, 0.42, 0, 1, 0, 0.5030, 0, 1, 0
1, 0.25, 0, 0, 1, 0.2800, 0, 0, 1
1, 0.51, 0, 1, 0, 0.5800, 0, 1, 0
0, 0.55, 0, 1, 0, 0.6350, 0, 0, 1
1, 0.44, 1, 0, 0, 0.4780, 0, 0, 1
0, 0.18, 1, 0, 0, 0.3980, 1, 0, 0
0, 0.67, 0, 1, 0, 0.7160, 0, 0, 1
1, 0.45, 0, 0, 1, 0.5000, 0, 1, 0
1, 0.48, 1, 0, 0, 0.5580, 0, 1, 0
0, 0.25, 0, 1, 0, 0.3900, 0, 1, 0
0, 0.67, 1, 0, 0, 0.7830, 0, 1, 0
1, 0.37, 0, 0, 1, 0.4200, 0, 1, 0
0, 0.32, 1, 0, 0, 0.4270, 0, 1, 0
1, 0.48, 1, 0, 0, 0.5700, 0, 1, 0
0, 0.66, 0, 0, 1, 0.7500, 0, 0, 1
1, 0.61, 1, 0, 0, 0.7000, 1, 0, 0
0, 0.58, 0, 0, 1, 0.6890, 0, 1, 0
1, 0.19, 1, 0, 0, 0.2400, 0, 0, 1
1, 0.38, 0, 0, 1, 0.4300, 0, 1, 0
0, 0.27, 1, 0, 0, 0.3640, 0, 1, 0
1, 0.42, 1, 0, 0, 0.4800, 0, 1, 0
1, 0.60, 1, 0, 0, 0.7130, 1, 0, 0
0, 0.27, 0, 0, 1, 0.3480, 1, 0, 0
1, 0.29, 0, 1, 0, 0.3710, 1, 0, 0
0, 0.43, 1, 0, 0, 0.5670, 0, 1, 0
1, 0.48, 1, 0, 0, 0.5670, 0, 1, 0
1, 0.27, 0, 0, 1, 0.2940, 0, 0, 1
0, 0.44, 1, 0, 0, 0.5520, 1, 0, 0
1, 0.23, 0, 1, 0, 0.2630, 0, 0, 1
0, 0.36, 0, 1, 0, 0.5300, 0, 0, 1
1, 0.64, 0, 0, 1, 0.7250, 1, 0, 0
1, 0.29, 0, 0, 1, 0.3000, 0, 0, 1
0, 0.33, 1, 0, 0, 0.4930, 0, 1, 0
0, 0.66, 0, 1, 0, 0.7500, 0, 0, 1
0, 0.21, 0, 0, 1, 0.3430, 1, 0, 0
1, 0.27, 1, 0, 0, 0.3270, 0, 0, 1
1, 0.29, 1, 0, 0, 0.3180, 0, 0, 1
0, 0.31, 1, 0, 0, 0.4860, 0, 1, 0
1, 0.36, 0, 0, 1, 0.4100, 0, 1, 0
1, 0.49, 0, 1, 0, 0.5570, 0, 1, 0
0, 0.28, 1, 0, 0, 0.3840, 1, 0, 0
0, 0.43, 0, 0, 1, 0.5660, 0, 1, 0
0, 0.46, 0, 1, 0, 0.5880, 0, 1, 0
1, 0.57, 1, 0, 0, 0.6980, 1, 0, 0
0, 0.52, 0, 0, 1, 0.5940, 0, 1, 0
0, 0.31, 0, 0, 1, 0.4350, 0, 1, 0
0, 0.55, 1, 0, 0, 0.6200, 0, 0, 1
1, 0.50, 1, 0, 0, 0.5640, 0, 1, 0
1, 0.48, 0, 1, 0, 0.5590, 0, 1, 0
0, 0.22, 0, 0, 1, 0.3450, 1, 0, 0
1, 0.59, 0, 0, 1, 0.6670, 1, 0, 0
1, 0.34, 1, 0, 0, 0.4280, 0, 0, 1
0, 0.64, 1, 0, 0, 0.7720, 0, 0, 1
1, 0.29, 0, 0, 1, 0.3350, 0, 0, 1
0, 0.34, 0, 1, 0, 0.4320, 0, 1, 0
0, 0.61, 1, 0, 0, 0.7500, 0, 0, 1
1, 0.64, 0, 0, 1, 0.7110, 1, 0, 0
0, 0.29, 1, 0, 0, 0.4130, 1, 0, 0
1, 0.63, 0, 1, 0, 0.7060, 1, 0, 0
0, 0.29, 0, 1, 0, 0.4000, 1, 0, 0
0, 0.51, 1, 0, 0, 0.6270, 0, 1, 0
0, 0.24, 0, 0, 1, 0.3770, 1, 0, 0
1, 0.48, 0, 1, 0, 0.5750, 0, 1, 0
1, 0.18, 1, 0, 0, 0.2740, 1, 0, 0
1, 0.18, 1, 0, 0, 0.2030, 0, 0, 1
1, 0.33, 0, 1, 0, 0.3820, 0, 0, 1
0, 0.20, 0, 0, 1, 0.3480, 1, 0, 0
1, 0.29, 0, 0, 1, 0.3300, 0, 0, 1
0, 0.44, 0, 0, 1, 0.6300, 1, 0, 0
0, 0.65, 0, 0, 1, 0.8180, 1, 0, 0
0, 0.56, 1, 0, 0, 0.6370, 0, 0, 1
0, 0.52, 0, 0, 1, 0.5840, 0, 1, 0
0, 0.29, 0, 1, 0, 0.4860, 1, 0, 0
0, 0.47, 0, 1, 0, 0.5890, 0, 1, 0
1, 0.68, 1, 0, 0, 0.7260, 0, 0, 1
1, 0.31, 0, 0, 1, 0.3600, 0, 1, 0
1, 0.61, 0, 1, 0, 0.6250, 0, 0, 1
1, 0.19, 0, 1, 0, 0.2150, 0, 0, 1
1, 0.38, 0, 0, 1, 0.4300, 0, 1, 0
0, 0.26, 1, 0, 0, 0.4230, 1, 0, 0
1, 0.61, 0, 1, 0, 0.6740, 1, 0, 0
1, 0.40, 1, 0, 0, 0.4650, 0, 1, 0
0, 0.49, 1, 0, 0, 0.6520, 0, 1, 0
1, 0.56, 1, 0, 0, 0.6750, 1, 0, 0
0, 0.48, 0, 1, 0, 0.6600, 0, 1, 0
1, 0.52, 1, 0, 0, 0.5630, 0, 0, 1
0, 0.18, 1, 0, 0, 0.2980, 1, 0, 0
0, 0.56, 0, 0, 1, 0.5930, 0, 0, 1
0, 0.52, 0, 1, 0, 0.6440, 0, 1, 0
0, 0.18, 0, 1, 0, 0.2860, 0, 1, 0
0, 0.58, 1, 0, 0, 0.6620, 0, 0, 1
0, 0.39, 0, 1, 0, 0.5510, 0, 1, 0
0, 0.46, 1, 0, 0, 0.6290, 0, 1, 0
0, 0.40, 0, 1, 0, 0.4620, 0, 1, 0
0, 0.60, 1, 0, 0, 0.7270, 0, 0, 1
1, 0.36, 0, 1, 0, 0.4070, 0, 0, 1
1, 0.44, 1, 0, 0, 0.5230, 0, 1, 0
1, 0.28, 1, 0, 0, 0.3130, 0, 0, 1
1, 0.54, 0, 0, 1, 0.6260, 1, 0, 0

Test data:

# people_test.txt
#
0, 0.51, 1, 0, 0, 0.6120, 0, 1, 0
0, 0.32, 0, 1, 0, 0.4610, 0, 1, 0
1, 0.55, 1, 0, 0, 0.6270, 1, 0, 0
1, 0.25, 0, 0, 1, 0.2620, 0, 0, 1
1, 0.33, 0, 0, 1, 0.3730, 0, 0, 1
0, 0.29, 0, 1, 0, 0.4620, 1, 0, 0
1, 0.65, 1, 0, 0, 0.7270, 1, 0, 0
0, 0.43, 0, 1, 0, 0.5140, 0, 1, 0
0, 0.54, 0, 1, 0, 0.6480, 0, 0, 1
1, 0.61, 0, 1, 0, 0.7270, 1, 0, 0
1, 0.52, 0, 1, 0, 0.6360, 1, 0, 0
1, 0.30, 0, 1, 0, 0.3350, 0, 0, 1
1, 0.29, 1, 0, 0, 0.3140, 0, 0, 1
0, 0.47, 0, 0, 1, 0.5940, 0, 1, 0
1, 0.39, 0, 1, 0, 0.4780, 0, 1, 0
1, 0.47, 0, 0, 1, 0.5200, 0, 1, 0
0, 0.49, 1, 0, 0, 0.5860, 0, 1, 0
0, 0.63, 0, 0, 1, 0.6740, 0, 0, 1
0, 0.30, 1, 0, 0, 0.3920, 1, 0, 0
0, 0.61, 0, 0, 1, 0.6960, 0, 0, 1
0, 0.47, 0, 0, 1, 0.5870, 0, 1, 0
1, 0.30, 0, 0, 1, 0.3450, 0, 0, 1
0, 0.51, 0, 0, 1, 0.5800, 0, 1, 0
0, 0.24, 1, 0, 0, 0.3880, 0, 1, 0
0, 0.49, 1, 0, 0, 0.6450, 0, 1, 0
1, 0.66, 0, 0, 1, 0.7450, 1, 0, 0
0, 0.65, 1, 0, 0, 0.7690, 1, 0, 0
0, 0.46, 0, 1, 0, 0.5800, 1, 0, 0
0, 0.45, 0, 0, 1, 0.5180, 0, 1, 0
0, 0.47, 1, 0, 0, 0.6360, 1, 0, 0
0, 0.29, 1, 0, 0, 0.4480, 1, 0, 0
0, 0.57, 0, 0, 1, 0.6930, 0, 0, 1
0, 0.20, 1, 0, 0, 0.2870, 0, 0, 1
0, 0.35, 1, 0, 0, 0.4340, 0, 1, 0
0, 0.61, 0, 0, 1, 0.6700, 0, 0, 1
0, 0.31, 0, 0, 1, 0.3730, 0, 1, 0
1, 0.18, 1, 0, 0, 0.2080, 0, 0, 1
1, 0.26, 0, 0, 1, 0.2920, 0, 0, 1
0, 0.28, 1, 0, 0, 0.3640, 0, 0, 1
0, 0.59, 0, 0, 1, 0.6940, 0, 0, 1
This entry was posted in Machine Learning. Bookmark the permalink.