Skip to main content
deleted 1 character in body; edited tags; edited title
Source Link
Jamal
  • 35.2k
  • 13
  • 134
  • 238

Neural Network codein Julia

I am currently trying to implement a Neural Net in juliaJulia with the goal of eventually implementing a stacked autoencoder. My code seems to work but I would appreciate any constructive criticism...if If there exists a style guide for juliaJulia, I am not concerned with that. However, however any other comments would be very much welcome. I would also like to be able to write an implementation that can be extended to more complicated architectures without making significant alterations to the basics of the code...this This is not that in any way but ideas on how to do this would be very helpful.

Neural Network code

I am currently trying to implement a Neural Net in julia with the goal of eventually implementing a stacked autoencoder. My code seems to work but I would appreciate any constructive criticism...if there exists a style guide for julia I am not concerned with that, however any other comments would be very much welcome. I would also like to be able to write an implementation that can be extended to more complicated architectures without making significant alterations to the basics of the code...this is not that in any way but ideas on how to do this would be very helpful.

Neural Network in Julia

I am currently trying to implement a Neural Net in Julia with the goal of eventually implementing a stacked autoencoder. My code seems to work but I would appreciate any constructive criticism. If there exists a style guide for Julia, I am not concerned with that. However, any other comments would be very much welcome. I would also like to be able to write an implementation that can be extended to more complicated architectures without making significant alterations to the basics of the code. This is not that in any way but ideas on how to do this would be very helpful.

Deleted apparently extraneous 'end'
Source Link
200_success
  • 145.6k
  • 22
  • 191
  • 481

Neural Network code in Julia

end

Neural Network code in Julia

end

Neural Network code

Source Link

Neural Network code in Julia

I am currently trying to implement a Neural Net in julia with the goal of eventually implementing a stacked autoencoder. My code seems to work but I would appreciate any constructive criticism...if there exists a style guide for julia I am not concerned with that, however any other comments would be very much welcome. I would also like to be able to write an implementation that can be extended to more complicated architectures without making significant alterations to the basics of the code...this is not that in any way but ideas on how to do this would be very helpful.

type ANN2


#
# Neural Network type...
#


# define vars
weights::Dict
bias::Dict
As::Dict
Ns::Dict
Fs::Dict
Ss::Dict
weightdelta::Dict
biasdelta::Dict
shape::Array{Int64,1}
numlayers::Int64
averror::Float64

# define methods
forward::Function
calcuate_deltas::Function
init::Function
setshape::Function
sgm::Function
updateone::Function
updateepoch::Function
calculate_error::Function

# Constructer
function ANN2()
    this = new ()
    
    this.weights = Dict{Int64,Any}()
    this.bias = Dict{Int64,Any}()
    this.As = Dict{Int64,Any}()
    this.Ns = Dict{Int64,Any}()
    this.Fs = Dict{Int64,Any}()
    this.weightdelta = Dict{Int64,Any}()
    this.biasdelta = Dict{Int64,Any}()
    this.Ss = Dict{Int64,Any}()
    this.numlayers = 0
    
    # Set the shape of the network
    this.setshape = function(shape)
        this.shape = shape
        this.numlayers = size(this.shape)[1] - 1
        return nothing
    end
    
    # initialise weights and bias
    this.init = function()
        for (ind,(a,b)) in enumerate(zip(this.shape[1:end-1],this.shape[2:end]))
            this.weights[ind] = rand(b,a)
            this.bias[ind] = rand(b)
        end
        return nothing
    end
    
    # Calculate output of network given one input
    this.forward = function (input::Array{Float64,1})
        this.As[0] = input 
        for i = 1:this.numlayers                
            this.Ns[i] = net.weights[i]*this.As[i-1] + net.bias[i]
            this.As[i] = this.sgm(this.Ns[i])
            this.Fs[i] = this.As[i].*(1-this.As[i])
        end
        return this.As[this.numlayers]
    end

    # calculate weight and bias updates
    # if avg is true then updates are accumulated 
    # if avg is false then updates are overwritten
    this.calcuate_deltas = function (input::Array{Float64,1},target::Array{Float64,1},rate::Float64,avg::Bool)
        this.forward(input)
        for i in reverse(1:this.numlayers)
            if i == this.numlayers
                this.Ss[i] = this.Fs[i].*(this.As[i] - target)
                if avg
                    this.weightdelta[i] = this.weightdelta[i]+rate.*(this.Ss[i]*this.As[i-1]')
                    this.biasdelta[i] = this.biasdelta[i]+rate.*this.Ss[i]
                else
                    this.weightdelta[i] = rate.*(this.Ss[i]*this.As[i-1]')
                    this.biasdelta[i] = rate.*this.Ss[i]
                end
            else
                this.Ss[i] = this.Fs[i].*(this.weights[i+1]'*this.Ss[i+1])
                if avg
                    this.weightdelta[i] = this.weightdelta[i]+rate.*(this.Ss[i]*this.As[i-1]')
                    this.biasdelta[i] = this.biasdelta[i]+rate.*this.Ss[i]
                else
                    this.weightdelta[i] = rate.*(this.Ss[i]*this.As[i-1]')
                    this.biasdelta[i] = rate.*this.Ss[i]
                end
            end
        end
        return nothing
    end
    
    # calculate new weights and bias from one input target pair
    this.updateone = function(input::Array{Float64,1},target::Array{Float64,1},rate::Float64)
        this.calcuate_deltas(input,target,rate,false)
        for i in 1:this.numlayers
            this.weights[i] = this.weights[i] - this.weightdelta[i]
            this.bias[i] = this.bias[i] - this.biasdelta[i]
        end
        return nothing
    end
    
    # calculate new weights and bias from training set
    # randomly sample from training set n (cases) input target pairs
    # update weights and bias by averaging updates for each pair
    this.updateepoch = function(cases::Int64,inputs::Dict,targets::Dict,rate::Float64)
        this.updateone(inputs[1],targets[1],rate)
        for i in 1:cases
            ind = rand(1:length(inputs))
            input = inputs[ind]
            target = targets[ind]
            this.calcuate_deltas(input,target,rate,true)
        end
        
        for i in 1:this.numlayers
            this.weightdelta[i] = (1/cases).*this.weightdelta[i]
            this.weights[i] = this.weights[i] - this.weightdelta[i]
            this.biasdelta[i] = (1/cases).*this.biasdelta[i]
            this.bias[i] = this.bias[i] - this.biasdelta[i]
        end
    end
    
    # sigmoid function
    this.sgm = function(x::Array{Float64,1})
            return 1./(1+exp(-x))
    end
    
    # calculate current error for one input target pair
    this.calculate_error = function(input::Array{Float64,1},target::Array{Float64,1})
        this.forward(input)
        return (this.As[this.numlayers] - target)'*(this.As[this.numlayers] - target)
    end
    
    return this
end

end