Skip to content

Instantly share code, notes, and snippets.

@awjuliani
Created October 13, 2016 20:54
Show Gist options
  • Save awjuliani/8378209d6cf5f073087b3c2b9814acec to your computer and use it in GitHub Desktop.
Save awjuliani/8378209d6cf5f073087b3c2b9814acec to your computer and use it in GitHub Desktop.
import tensorflow as tf
import numpy as np
import tensorflow.contrib.slim as slim
total_layers = 25 #Specify how deep we want our network
units_between_stride = total_layers / 5
def resUnit(input_layer,i):
with tf.variable_scope("res_unit"+str(i)):
part1 = slim.batch_norm(input_layer,activation_fn=None)
part2 = tf.nn.relu(part1)
part3 = slim.conv2d(part2,64,[3,3],activation_fn=None)
part4 = slim.batch_norm(part3,activation_fn=None)
part5 = tf.nn.relu(part4)
part6 = slim.conv2d(part5,64,[3,3],activation_fn=None)
output = input_layer + part6
return output
tf.reset_default_graph()
input_layer = tf.placeholder(shape=[None,32,32,3],dtype=tf.float32,name='input')
label_layer = tf.placeholder(shape=[None],dtype=tf.int32)
label_oh = slim.layers.one_hot_encoding(label_layer,10)
layer1 = slim.conv2d(input_layer,64,[3,3],normalizer_fn=slim.batch_norm,scope='conv_'+str(0))
for i in range(5):
for j in range(units_between_stride):
layer1 = resUnit(layer1,j + (i*units_between_stride))
layer1 = slim.conv2d(layer1,64,[3,3],stride=[2,2],normalizer_fn=slim.batch_norm,scope='conv_s_'+str(i))
top = slim.conv2d(layer1,10,[3,3],normalizer_fn=slim.batch_norm,activation_fn=None,scope='conv_top')
output = slim.layers.softmax(slim.layers.flatten(top))
loss = tf.reduce_mean(-tf.reduce_sum(label_oh * tf.log(output) + 1e-10, reduction_indices=[1]))
trainer = tf.train.AdamOptimizer(learning_rate=0.001)
update = trainer.minimize(loss)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment