This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import torch | |
import time | |
import os | |
import argparse | |
import shutil | |
import sys | |
def parse_args(): | |
parser = argparse.ArgumentParser(description='Matrix multiplication') | |
parser.add_argument('--gpus', help='gpu amount', required=True, type=int) |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# -*- coding:utf-8 -*- | |
def MRR(ranked_list, ground_truth): | |
""" 平均倒排名 """ | |
rr = 0. | |
for i in range(len(ranked_list)): | |
for j in range(len(ranked_list[i])): | |
# if ground_truth[i][0] == ranked_list[i][j]: | |
if ranked_list[i][j] in ground_truth[i]: |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
set rtp+=~/.vim/bundle/Vundle.vim | |
call vundle#begin() | |
Plugin 'VundleVim/Vundle.vim' | |
Plugin 'scrooloose/nerdtree' | |
Plugin 'flazz/vim-colorschemes' | |
Plugin 'ervandew/supertab' | |
Plugin 'davidhalter/jedi-vim' | |
Plugin 'mattn/emmet-vim' | |
Plugin 'vim-syntastic/syntastic' | |
Plugin 'nvie/vim-flake8' |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def attention_3d_block(inputs): | |
# inputs.shape = (batch_size, time_steps, input_dim) | |
input_dim = int(inputs.shape[2]) | |
a = Permute((2, 1))(inputs) | |
a = Reshape((input_dim, TIME_STEPS))(a) # this line is not useful. It's just to know which dimension is what. | |
a = Dense(TIME_STEPS, activation='softmax')(a) | |
if SINGLE_ATTENTION_VECTOR: | |
a = Lambda(lambda x: K.mean(x, axis=1), name='dim_reduction')(a) | |
a = RepeatVector(input_dim)(a) | |
a_probs = Permute((2, 1), name='attention_vec')(a) |