Created
December 11, 2017 23:27
-
-
Save SnowMasaya/0924c28ebc6ea64209b80fef946fa087 to your computer and use it in GitHub Desktop.
'Define by Run'型の深層学習フレームワークが自然言語処理に向いている理由 ref: https://qiita.com/GushiSnow/items/aa660c7228b7024076a8
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
いい夢見ろよ😴的な? 笑 さっ風呂入ろ ♨ ️ | |
南港に沈めたら解決 | |
チロルチョコ | |
: |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
いい夢見ろよ😴的な? 笑 さっ風呂入ろ♨ ️ | |
南港に沈めたら解決!<unk><unk><unk> | |
チロルチョコ!!<unk><unk><unk><unk> | |
: |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
class Net(nn.Module): | |
def __init__(self): | |
super(Net, self).__init__() | |
self.conv1 = nn.Conv2d(1, 10, kernel_size=5) | |
self.conv2 = nn.Conv2d(10, 20, kernel_size=5) | |
self.conv2_drop = nn.Dropout2d() | |
self.fc1 = nn.Linear(320, 50) | |
self.fc2 = nn.Linear(50, 10) | |
self.conv1_2 = nn.Conv2d(10, 20, kernel_size=5) | |
self.conv2_2 = nn.Conv2d(20, 40, kernel_size=5) | |
self.fc1_2 = nn.Linear(360, 50) | |
self.over_size = 28 | |
def forward(self, x): | |
_, _, h, w = x.size() | |
if h > self.over_size: | |
x = F.relu(F.max_pool2d(self.conv1(x), 2)) | |
x = F.relu(F.max_pool2d(self.conv1_2(x), 2)) | |
x = F.relu(F.max_pool2d(self.conv2_drop(self.conv2_2(x)), 2)) | |
x = x.view(-1, 360) | |
x = F.relu(self.fc1_2(x)) | |
else: | |
x = F.relu(F.max_pool2d(self.conv1(x), 2)) | |
x = F.relu(F.max_pool2d(self.conv2_drop(self.conv2(x)), 2)) | |
x = x.view(-1, 320) | |
x = F.relu(self.fc1(x)) | |
x = F.dropout(x, training=self.training) | |
x = self.fc2(x) | |
return F.log_softmax(x) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment