Created
July 26, 2012 14:00
-
-
Save einblicker/3182195 to your computer and use it in GitHub Desktop.
neural network
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
open System | |
type Class = Virus | Noise | |
type Datum = { Image : float[]; Class : Class } | |
type NeuralNetwork(inputSize, hiddenSize, outputSize, patternSize, maxT, eps, alpha, beta, W0, high, low) = | |
let mutable eta = 0.005 | |
let inputLayer = Array.init (inputSize+1) (fun _ -> 0.0) | |
let hiddenLayer = Array.init (hiddenSize+1) (fun _ -> 0.0) | |
let outputLayer = Array.init outputSize (fun _ -> 0.0) | |
let weight1 = Array2D.init hiddenSize (inputSize+1) (fun _ _ -> 0.0) | |
let weight2 = Array2D.init outputSize (hiddenSize+1) (fun _ _ -> 0.0) | |
let dWeight1 = Array2D.init hiddenSize (inputSize+1) (fun _ _ -> 0.0) | |
let dWeight2 = Array2D.init outputSize (hiddenSize+1) (fun _ _ -> 0.0) | |
let preDWeight1 = Array2D.init hiddenSize (inputSize+1) (fun _ _ -> 0.0) | |
let preDWeight2 = Array2D.init outputSize (hiddenSize+1) (fun _ _ -> 0.0) | |
let random = new Random() | |
let load_data() = | |
let head = "C:\\virus\\" | |
let raw = ".raw" | |
let isNoise i = 1 <= i && i <= 400 | |
(*ここで学習する画像を指定する virusフォルダにある画像は1-200がノイズ1 201-400がノイズ2 401-700がウイルス1 701-900がウイルス2*) | |
[| | |
for v = 1 to 905 do | |
let fileName = sprintf "%s%d%s" head v raw | |
if System.IO.File.Exists(fileName) then | |
let image = System.IO.File.ReadAllBytes(fileName) | |
let data = [| | |
for k in 0 .. inputSize - 1 do | |
yield float(image.[k]) / 256.0 |] | |
yield { Image = data; Class = if isNoise v then Noise else Virus } | |
|] |> Array.sortBy (fun _ -> random.NextDouble()) | |
|> Seq.take patternSize | |
|> Array.ofSeq | |
let originalData = load_data() | |
let zeta = function | |
| Noise -> 0.0 | |
| Virus -> 1.0 | |
let sigmoid u = 1.0 / (1.0 + System.Math.Exp(-beta*u)) | |
let loop x y f = | |
for i in 0 .. x do | |
for j in 0 .. y do | |
f i j | |
let loopSum x y f g = | |
let mutable sum = 0.0 | |
for i in 0 .. x do | |
sum <- 0.0 | |
for j in 0 .. y do | |
sum <- sum + f i j | |
g i sum | |
member this.WeightInit() = | |
let ranran() = random.NextDouble() * 2.0 * W0 - W0 | |
loop (hiddenSize-1) inputSize <| fun j k -> | |
weight1.[j, k] <- ranran() | |
dWeight1.[j, k] <- 0.0 | |
loop (outputSize-1) hiddenSize <| fun i j -> | |
weight2.[i, j] <- ranran() | |
dWeight2.[i, j] <- 0.0 | |
member this.DWeightInit() = | |
loop (hiddenSize-1) inputSize <| fun j k -> | |
preDWeight1.[j, k] <- dWeight1.[j, k] | |
dWeight1.[j, k] <- 0.0 | |
loop (outputSize-1) hiddenSize <| fun i j -> | |
preDWeight2.[i, j] <- dWeight2.[i, j] | |
dWeight2.[i, j] <- 0.0 | |
member this.InputLayerSet(datum : Datum) = | |
for k in 0 .. inputSize-1 do | |
inputLayer.[k] <- datum.Image.[k] | |
inputLayer.[inputSize] <- 1.0 | |
member this.Forward() = | |
loopSum (hiddenSize-1) inputSize (fun j k -> | |
inputLayer.[k] * weight1.[j, k]) <| fun j sum -> | |
hiddenLayer.[j] <- sigmoid sum | |
hiddenLayer.[hiddenSize] <- 1.0 | |
loopSum (outputSize-1) hiddenSize (fun i j -> | |
hiddenLayer.[j] * weight2.[i, j]) <| fun i sum -> | |
outputLayer.[i] <- sigmoid sum | |
member this.Backward(datum : Datum) = | |
let delta1 = Array.init (hiddenSize+1) (fun _ -> 0.0) | |
let delta2 = Array.init outputSize (fun i -> beta * outputLayer.[i] * (1.0-outputLayer.[i]) * (zeta datum.Class - outputLayer.[i])) | |
loopSum (hiddenSize-1) (outputSize-1) (fun j i -> | |
weight2.[i, j] * delta2.[i]) <| fun j sum -> | |
delta1.[j] <- beta * hiddenLayer.[j] * (1.0 - hiddenLayer.[j]) * sum | |
loop (outputSize-1) hiddenSize <| fun i j -> | |
dWeight2.[i, j] <- dWeight2.[i, j] + delta2.[i] * hiddenLayer.[j] | |
loop (hiddenSize-1) inputSize <| fun j k -> | |
dWeight1.[j, k] <- dWeight1.[j, k] + delta1.[j] * inputLayer.[k] | |
member this.CalcError(datum: Datum) = | |
seq { | |
for i = 0 to outputSize - 1 do | |
yield (zeta datum.Class - outputLayer.[i]) * (zeta datum.Class - outputLayer.[i]) } | |
|> Seq.sum | |
member this.ModifyWeight() = | |
loop (outputSize-1) hiddenSize <| fun i j -> | |
dWeight2.[i, j] <- eta * dWeight2.[i,j] + alpha * preDWeight2.[i,j] | |
weight2.[i,j] <- weight2.[i,j] + dWeight2.[i,j] | |
loop (hiddenSize-1) inputSize <| fun j k -> | |
dWeight1.[j,k] <- eta * dWeight1.[j,k] + alpha * preDWeight1.[j,k] | |
weight1.[j,k] <- weight1.[j,k] + dWeight1.[j,k] | |
member this.Verification(testData : Datum[]) = | |
let a = ref 0 | |
let b = ref 0 | |
let c = ref 0 | |
let d = ref 0 | |
testData | |
|> Array.iteri (fun i ({Image=image; Class=cls}as datum) -> | |
this.InputLayerSet(datum) | |
this.Forward() | |
printfn "%A" (cls, outputLayer.[0]) | |
match cls with | |
| Noise -> | |
if outputLayer.[0] <= 0.4 then | |
incr a | |
else | |
incr c | |
| Virus -> | |
if outputLayer.[0] >= 0.6 then | |
incr d | |
else | |
incr b | |
) | |
(float !a, float !b, float !c, float !d) | |
member this.BackPropagation(data : Datum[]) = | |
this.WeightInit() | |
let mutable E = 0.0 | |
let mutable t = 0 | |
let mutable broken = false | |
while not broken && t < maxT do | |
printfn "%A" (t, E) | |
this.DWeightInit() | |
let Esum = | |
seq { | |
for datum in data do | |
this.InputLayerSet(datum) | |
this.Forward() | |
this.Backward(datum) | |
yield this.CalcError(datum) } | |
|> Seq.sum | |
eta <- | |
if E - Esum/float(outputSize * data.Length) > 0.0 then | |
eta * 1.05 | |
else | |
eta * 0.7 | |
this.ModifyWeight() | |
E <- Esum / float(outputSize * data.Length) | |
if E < eps then | |
broken <- true | |
else | |
t <- t + 1 | |
t | |
member this.CrossValidation(n : int) = | |
let size = originalData.Length | |
let splited = | |
originalData | |
|> Seq.windowed (size / n) | |
|> Seq.mapi(fun i x -> (i, x)) | |
let results = | |
[for (i, testData) in splited do | |
let trainData = | |
splited | |
|> Seq.filter (fun (j, _) -> j <> i) | |
|> Seq.map snd | |
|> Seq.concat | |
|> Array.ofSeq | |
let t = this.BackPropagation(trainData) | |
let (a, b, c, d) = this.Verification(testData) | |
yield (t, (a+d)/(a+b+c+d), a/(a+b), a/(a+c))] | |
let t = (results |> Seq.map (fun (t,_,_,_) -> t) |> Seq.sum) / (results.Length) | |
let x = (results |> Seq.map (fun (_,x,_,_) -> x) |> Seq.sum) / (float(results.Length)) | |
let y = (results |> Seq.map (fun (_,_,y,_) -> y) |> Seq.sum) / (float(results.Length)) | |
let z = (results |> Seq.map (fun (_,_,_,z) -> z) |> Seq.sum) / (float(results.Length)) | |
(t, x, y, z) | |
[<EntryPoint>] | |
let main _ = | |
let hiddenSize = 7 | |
let high = 1.05 | |
let low = 0.7 | |
let iterationSize = 1 | |
let now = System.DateTime.Now | |
let logFileName = sprintf "%02d%02d%02d%02d.csv" (now.Month+1) now.Day now.Hour now.Minute | |
let xs = Array.Parallel.init iterationSize (fun i -> | |
let nn = new NeuralNetwork(32*32, hiddenSize, 1, 100, 1000, 1.0e-4, 0.8, 1.0, 0.5, high, low) | |
let result = nn.CrossValidation(10) | |
printfn "%d done" i | |
result | |
) | |
// use writer = new System.IO.StreamWriter(name) | |
// Printf.fprintf writer "中間%d,学習率上%f,学習率下%f\n" HIDDEN high low | |
for i in 0 .. iterationSize - 1 do | |
let (time, accuracy, precision, recall) = xs.[i] | |
// Printf.fprintf writer "%d,%f\n" time pa2 | |
printf "%d回目 %d回 accuracy:%f% precision:%f recall:%f f-value:%f\n" (i+1) time accuracy precision recall (2.0*precision*recall/(precision+recall)) | |
0 |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
type RejectionMethod(f, rangeMin, rangeMax, maxValue) = | |
let random = new Random() | |
let rangeSize = rangeMax - rangeMin | |
member this.RandomSeq() = | |
seq { | |
let x = random.NextDouble() | |
let y = random.NextDouble() | |
let z = rangeSize * x + rangeMin | |
if y < f(z) / maxValue then | |
yield z | |
yield! this.RandomSeq() } | |
let f x = 1.0 / (1.0 + System.Math.Exp(-x)) | |
let rangeMin = -10.0 | |
let rangeMax = 10.0 | |
let sigmoid = new RejectionMethod(f, rangeMin, rangeMax, 1.0) | |
sigmoid.RandomSeq() | |
|> Seq.take 1000 | |
|> Seq.iter (printfn "%f") |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
open System.Drawing | |
open System.Windows.Forms | |
let drawDot (g : Graphics) a x y = | |
g.DrawLine(new Pen(Color.FromArgb(a, a, a)), x, y, x+1, y+1) | |
type VirusVisualizerForm() as this = | |
inherit Form() | |
do | |
this.Size <- new Size(100, 100) | |
this.Text <- "VirusVisualizer" | |
let label1 = new Label() | |
label1.Size <- new Size(100, 100) | |
let bytes = System.IO.File.ReadAllBytes(@"C:\virus\897.raw") | |
label1.Paint.Add(fun e -> | |
let g = e.Graphics | |
for i in 0 .. 32-1 do | |
for j in 0 .. 32-1 do | |
drawDot g (int bytes.[i*32+j]) (30+i) (30+j) | |
) | |
this.Controls.Add(label1) | |
Application.Run(new VirusVisualizerForm()) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
deep learningとはニューラル・ネットワークのような多層構造の生成モデルを教師なし+教師ありで学習する手法。特徴抽出と識別を合わせたような動作をして、SVMよりも性能がよい(?)らしい。
http://deeplearning.net/