Created
October 20, 2012 00:05
-
-
Save michealbenedict/3921375 to your computer and use it in GitHub Desktop.
Logistic Regression
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
var LogisticRegression = function() { | |
this.N = 0; // Number of Samples | |
this.D = 0; // Number of Dimensions|Features | |
this.K = 0; // Number of Classes | |
this.classes = []; // Classes to classify | |
this.weights = []; // Weights | |
this.input = []; // [{0 1 0 1 0 ... 512} , {}] | |
this.output = []; // [{zero:0,one:1.0...}, ...] | |
} | |
// NOTE: | |
// Increase in input samples, decrease learning curve, decrease iterations ?? | |
// Steps | |
// Every i/p sample gets a Weight Matrix (class x features) | |
// We try to modify the weight matrix such that dW is min (close to 0 or threshold) | |
// 1. Assign Random Weights for each feature for each class W[i][j] = rand(-0.01,0.01) [10x512] | |
// 2. Initialize dW -> 0 (error) | |
// 3. For every input sample (n) | |
// For every class (i) | |
// calculated_output[class] = 0 // for every i/p u calculate output of all classes | |
// For every feature|dimension in i/p (j) | |
// Calculate output , weight*feature of i/p | |
// calculated_output[n][i] = calculated_output[n][i] + W[i][j]*X[n][j]; | |
// now translate calculated_output in 0-1 | |
// Y = exp(calculated_output[n][i])/sum(exp(calculated_output[n][i])) | |
// For every feature|dimension in i/p (j) | |
// dW[i][j] = dW[i][j] + (actual_output[n][i]-y[n][i]) * X[n][j] | |
// 4. For every class (i) | |
// For every dimension|feature (j) | |
// W[i][j] = W[i][j] + n * dW[i][j] | |
// 5. Repeat 2 till dW[i][j] close to zero | |
LogisticRegression.prototype = { | |
run: function(input) { | |
var self = this; | |
var Y = []; | |
input = "1 " + input; | |
input = input.split(" "); | |
for(var i=0; i<self.K; i++) { | |
Y.push(0); | |
for(var j=0; j < self.D; j++) { | |
Y[i] = Y[i] + input[j] * self.weights[i][j]; | |
} | |
} | |
return Y; | |
} | |
, train: function(data, iterations) { | |
var self = this; | |
// Lets Init some Data | |
self.init(data); | |
for(var z=0; z < 1000; z+=1) { | |
var coutput = [] | |
, dW = [] | |
, flag = 0; | |
// console.log("reset coutput", coutput); | |
// Init dW | |
for(var i=0; i < self.K; i++) { | |
dW.push([]); | |
// Calculate calculated output | |
for(var j=0; j < self.D; j++) { | |
dW[i].push(0); | |
} | |
} | |
// Start Logistic Regression | |
for(var n=0; n < self.N; n++) { | |
coutput.push([]); | |
for(var i=0; i < self.K; i++) { | |
coutput[n].push(0); | |
// Calculate calculated output | |
for(var j=0; j < self.D; j++) { | |
coutput[n][i] = coutput[n][i] + self.weights[i][j]*self.input[n][j]; | |
} | |
} | |
// Calculate Sum of calculated output | |
var sum_coutput = 0; | |
for(var i=0; i < self.K; i++) { | |
sum_coutput = sum_coutput + Math.exp(coutput[n][i]); | |
} | |
// Translate calculated output using X function to put in 0-1 | |
for(var i=0; i < self.K; i++) { | |
coutput[n][i] = Math.exp(coutput[n][i])/sum_coutput; | |
// console.log("for iteration z =>" + z + ", for class " + i, coutput[n][i]); | |
// Calculate dW | |
for(var j=0; j < self.D; j++) { | |
dW[i][j] = dW[i][j] + (self.output[n][i]-coutput[n][i]) * self.input[n][j]; | |
self.weights[i][j] = self.weights[i][j] + 0.00001 * dW[i][j] | |
} | |
} | |
} | |
for(var i=0; i < self.K; i++) { | |
for(var j=0; j < self.D; j++) { | |
if(Math.abs(dW[i][j]) <= 0.001) { | |
flag = 1; | |
} else { | |
flag = 0; | |
break; | |
} | |
} | |
if(flag == 0) break; | |
} | |
console.log(z); | |
if (flag == 1 || z == 50) { | |
console.log("logistic regression, ready afer =>", z); | |
break; | |
} else { | |
continue; | |
} | |
} | |
} | |
, init: function(data) { | |
var self = this; | |
self.N = n = data.length; // Length | |
// Lets create ip X[i] | |
for(var i=0; i < n; i++) { | |
data[i].input = "1 " + data[i].input; | |
self.input.push(data[i].input.split(" ")); // ip length x 512 | |
self.output.push(data[i].output); // {zero:0, one:0 ... } | |
} | |
// Number of Dimensions | |
self.D = self.input[0].length; | |
// generate output | |
self.generateKOutput(); | |
// generate random weights for W | |
self.generateWeightsMatrix(); | |
console.log(self.D, self.N, self.K); | |
} | |
, generateKOutput: function() { | |
// init vars | |
var self = this | |
, output = []; | |
input = self.output; | |
self.classes = classes = []; | |
n = self.N; | |
// get classes | |
for(var i=0; i < n; i++) { | |
var key = Object.keys(input[i])[0]; | |
// if class not found, then insert into classes | |
if(classes.indexOf(key) === -1) classes.push(key); | |
} | |
// Number of Classes | |
self.K = classes.length; | |
// now restructure output in 1..K encoding | |
for(var i=0; i < n; i++) { | |
output[i] = []; | |
for(var j=0; j < self.K; j++) { | |
if(Object.keys(input[i])[0] === classes[j]) { | |
output[i].push(1); | |
} else { | |
output[i].push(0); | |
} | |
} | |
} | |
// Re write output | |
self.output = output; | |
} | |
, generateWeightsMatrix: function() { | |
var self = this; | |
for(var i=0; i < self.K; i++) { | |
self.weights[i] = []; | |
for(var j=0; j< self.D; j++) { | |
self.weights[i][j] = Math.random() * 0.4 - 0.2; | |
} | |
} | |
} | |
, toJSON : function() { | |
} | |
, fromJSON : function(json) { | |
} | |
} | |
module.exports = LogisticRegression; |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment