class NeuralNetwork

Public Class Methods

load(path) click to toggle source
# File lib/NeuralNet.rb, line 105
def self.load(path)
  if File.exist?(path)
    t  = File.binread(path)
    return Marshal.load(t)
  else
    raise Errno::ENOENT , path
  end
end
new(inputNodes:0,hiddenNodes:[],outputNodes:0, learningRate: 0.01,activation: "sigmoid",oneHot: false, reluFactor: 1,momentum: 0.0) click to toggle source
# File lib/NeuralNet.rb, line 8
def initialize(inputNodes:0,hiddenNodes:[],outputNodes:0,
  learningRate: 0.01,activation: "sigmoid",oneHot: false,
  reluFactor: 1,momentum: 0.0)
  @nInputs = inputNodes
  @nHidden = hiddenNodes
  @nHiddenLayers = hiddenNodes.length
  @nOutputs = outputNodes
  @learningRate = learningRate
  @momentum = momentum
  @hiddenWeights = []
  @hiddenBias = []
  @prevHWeightDeltas = []
  @prevHBiasDeltas = []

  tmp1,tmp2 = @nInputs,@nHidden[0]
  @hiddenWeights[0] = DFloat.new([tmp2,tmp1]).rand * 2 - 1
  @hiddenBias[0] = DFloat.new([tmp2,1]).rand * 2 - 1

  # Hidden Layers Update Matrix (momentum)
  @prevHWeightDeltas[0] = DFloat.zeros([tmp2,tmp1])
  @prevHBiasDeltas[0] = DFloat.zeros([tmp2,1])

  for i in (1...@nHiddenLayers)
    tmp1,tmp2 = @nHidden[i-1],@nHidden[i]
    @hiddenWeights[i] =  DFloat.new([tmp2,tmp1]).rand * 2 - 1
    @hiddenBias[i] =  DFloat.new([tmp2,1]).rand * 2 - 1
    @prevHWeightDeltas[i] = DFloat.zeros([tmp2,tmp1])
    @prevHBiasDeltas[i] = DFloat.zeros([tmp2,1])
  end

  @outputWeights = DFloat.new([@nOutputs,@nHidden[@nHiddenLayers-1]]).rand * 2 - 1
  @outputBias =  DFloat.new([@nOutputs,1]).rand * 2 - 1

  # Output Layer Update Matrix (momentum)
  @prevOWeightDeltas =  DFloat.zeros([@nOutputs,@nHidden[@nHiddenLayers-1]])
  @prevOBiasDeltas = DFloat.zeros([@nOutputs,1])

  @hiddenActivation = activation
  @hiddenActivationDerv = activation + "_prime"
  if activation == "relu"
    @reluFactor = reluFactor.to_f
  end

  if oneHot
    @outputActivation = "softmax"
    @outDel = "softmax_out_delta"
  else
    @outputActivation = @hiddenActivation
    @outputActivationDerv = @hiddenActivationDerv
    @outDel = "_out_delta"
  end
end

Public Instance Methods

predict(data) click to toggle source
# File lib/NeuralNet.rb, line 93
def predict(data)
  x = DFloat[data].transpose
  activations = [x]
  #feed forward
  for i in(0...@nHiddenLayers)
    x1 = method(@hiddenActivation).call(@hiddenWeights[i].dot(activations[-1])+@hiddenBias[i])
    activations.push(x1)
  end
  out = method(@outputActivation).call(@outputWeights.dot(activations[-1])+@outputBias).flatten.to_a
  out
end
save(path) click to toggle source
# File lib/NeuralNet.rb, line 114
def save(path)
  File.binwrite(path,Marshal.dump(self))
end
train!(data,label) click to toggle source
# File lib/NeuralNet.rb, line 61
def train!(data,label)
  x = DFloat[data].transpose
  y =  DFloat[label].transpose
  activations = [x]
  #feed forward
  for i in(0...@nHiddenLayers)
    x1 = method(@hiddenActivation).call(@hiddenWeights[i].dot(activations[-1])+@hiddenBias[i])
    activations.push(x1)
  end
  output = method(@outputActivation).call(@outputWeights.dot(activations[-1])+@outputBias)
  #backpropagation
  diff = output - y
  outdelta = method(@outDel).call(output,diff)

  @prevOBiasDeltas = @momentum * @prevOBiasDeltas + outdelta
  @prevOWeightDeltas = @momentum * @prevOWeightDeltas + outdelta.dot(activations[-1].transpose)

  @outputBias -= @prevOBiasDeltas
  @outputWeights -= @prevOWeightDeltas

  delta = @outputWeights.transpose.dot(outdelta)
  (@nHiddenLayers-1).downto(0) do |i|
    delta = delta*(method(@hiddenActivationDerv).call(activations[i+1]))
    @prevHWeightDeltas[i] = @momentum * @prevHWeightDeltas[i] + delta.dot(activations[i].transpose)
    @prevHBiasDeltas[i] = @momentum * @prevHBiasDeltas[i] + delta

    @hiddenWeights[i] -= @prevHWeightDeltas[i]
    @hiddenBias[i] -= @prevHBiasDeltas[i]
    delta = @hiddenWeights[i].transpose.dot(delta)
  end
end

Private Instance Methods

_out_delta(output,diff) click to toggle source
# File lib/NeuralNet.rb, line 150
def _out_delta(output,diff)
  method(@outputActivationDerv).call(output)*diff * @learningRate
end
relu(x) click to toggle source
# File lib/NeuralNet.rb, line 119
def relu(x)
  (x * (x>0))*@reluFactor
end
relu_prime(y) click to toggle source
# File lib/NeuralNet.rb, line 123
def relu_prime(y)
  @reluFactor * (y > 0)
end
sigmoid(x) click to toggle source
# File lib/NeuralNet.rb, line 137
def sigmoid(x)
  return (NMath.exp(-x) + 1)**-1
end
sigmoid_prime(y) click to toggle source
# File lib/NeuralNet.rb, line 141
def sigmoid_prime(y)
  return y*(1-y)
end
softmax(x) click to toggle source
# File lib/NeuralNet.rb, line 127
def softmax(x)
  v = NMath.exp(x)
  return (v*(v.sum**-1))
end
softmax_out_delta(output,diff) click to toggle source

cross-entropy loss for softmax

# File lib/NeuralNet.rb, line 146
def softmax_out_delta(output,diff)
  diff * @learningRate
end
softmax_prime(y) click to toggle source
# File lib/NeuralNet.rb, line 132
def softmax_prime(y)
  phi = y.dot DFloat.ones(1,@nOutputs)
  return phi*( DFloat.eye(@nOutputs) - phi.transpose)
end