Created
August 23, 2018 21:28
-
-
Save K-Wu/1c61f3d325c0cceff116291ffae98594 to your computer and use it in GitHub Desktop.
Implementing ReLU using MKL-DNN
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
void relu_impl_ref(float* output,float* data, size_t size) { | |
#pragma omp parallel for | |
for (size_t i = 0; i < size; i++) | |
{ | |
if (data[i] < 0) | |
{ | |
output[i] = 0; | |
} | |
else { | |
output[i] = data[i]; | |
} | |
} | |
} | |
void check_result_equal(float* data1, float* data2, size_t size) { | |
for (size_t i = 0; i < size; i++) { | |
if(data1[i]!=data2[i]) | |
printf("data1 and data2 not equal at idx %d in %s at line %d\n", i, __FILE__, __LINE__); | |
} | |
} | |
void naive_rand(float* data, size_t size) { | |
for (size_t i = 0; i < size; i++) { | |
data[i] = rand()/ (RAND_MAX + 1.0) -0.5; | |
} | |
} | |
//#ifdef USE_DOUBLE | |
//#else | |
//#endif | |
int main() { | |
dnnLayout_t pLayout=NULL,pLayout_diff=NULL; | |
size_t dimension = 3; | |
size_t size[3] = { 1,100,64 }; | |
size_t strides[3] = { 1,1, 100 * 1 }; | |
dnnPrimitiveAttributes_t attributes=NULL; | |
checkMKLDNNErrors(dnnPrimitiveAttributesCreate_F32(&attributes)); | |
checkMKLDNNErrors(dnnLayoutCreate_F32(&pLayout, dimension, size, strides)); | |
checkMKLDNNErrors(dnnLayoutCreate_F32(&pLayout_diff, dimension, size, strides)); | |
dnnPrimitive_t pReLu_Forward, pReLu_Backward; | |
checkMKLDNNErrors(dnnReLUCreateForward_F32(&pReLu_Forward, attributes, pLayout, 0.0)); | |
checkMKLDNNErrors(dnnReLUCreateBackward_F32(&pReLu_Backward, attributes, pLayout_diff, pLayout, 0.0)); | |
float* resources[dnnResourceNumber]; | |
float* resources_backward[dnnResourceNumber]; | |
//dnnResourceNumber == 32 | |
//dnnResourceSrc == 0 | |
//dnnResourceDst == 1 | |
//dnnResourceDiffSrc == 4 | |
//dnnResourceDiffDst == 7 | |
float* data_in = (float*)malloc(sizeof(float) * 64 * 100 * 1); | |
naive_rand(data_in, 64 * 100 * 1); | |
float* data_out = (float*)malloc(sizeof(float) * 64 * 100 * 1); | |
float* data_diff = (float*)malloc(sizeof(float) * 64 * 100 * 1); | |
float* data_check = (float*)malloc(sizeof(float) * 64 * 100 * 1); | |
resources[dnnResourceSrc]= data_in; //ReLU forward input | |
resources[dnnResourceDst] = data_out; //ReLU forward output | |
resources_backward[dnnResourceSrc] = data_in; //ReLU backward refer to data input to calculate gradients | |
resources_backward[dnnResourceDiffDst] = data_out; //ReLU backward input | |
resources_backward[dnnResourceDiffSrc] = data_diff;//ReLU backward output | |
checkMKLDNNErrors(dnnExecute_F32(pReLu_Forward,(void**)resources)); | |
//dnnError_t status = dnnLayoutCreate_F32(&pLayout, dimension, size, strides); | |
relu_impl_ref(data_check, data_in, 64 * 100); | |
check_result_equal(data_out, data_check, 64 * 100 * 1); | |
checkMKLDNNErrors(dnnExecute_F32(pReLu_Backward, (void**)resources_backward)); | |
check_result_equal(data_diff, data_out, 64 * 100 * 1); | |
checkMKLDNNErrors(dnnDelete_F32(pReLu_Forward));//delete operator first, otherwise delete layout will fail | |
checkMKLDNNErrors(dnnDelete_F32(pReLu_Backward)); | |
checkMKLDNNErrors(dnnLayoutDelete_F32(pLayout)); | |
checkMKLDNNErrors(dnnLayoutDelete_F32(pLayout_diff)); | |
checkMKLDNNErrors(dnnPrimitiveAttributesDestroy_F32(attributes)); | |
free(data_in); | |
free(data_out); | |
free(data_diff); | |
free(data_check); | |
return 0; | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment