-
Notifications
You must be signed in to change notification settings - Fork 0
/
fnn.py
71 lines (51 loc) · 2.25 KB
/
fnn.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
import tensorflow as tf
import numpy as np
from scipy.stats import norm
def fnn(x, output_dim):
#weights and biases
w1 = tf.Variable(tf.random_normal([10, 20], stddev=0.35), name="weights1")
b1 = tf.Variable(tf.zeros([20]), name="biases1")
w2 = tf.Variable(tf.random_normal([20, output_dim], stddev=0.35), name="weights2")
b2 = tf.Variable(tf.zeros([20]), name="biases2")
# nn operators
y1 = tf.nn.relu(tf.matmul(x, w1) + b1)
y2 = tf.nn.sigmoid(tf.matmul(y1,w2) + b2)
return y2, [w1, w2]
# Defining the computational graph
x1 = tf.placeholder(tf.float32, shape=(1, 10))
y1, w1 = fnn(x1, 1)
# The second network has different weights and biases
x2 = tf.placeholder(tf.float32, shape=(1, 10))
y2, w2 = fnn(x2, 1)
# Initializing the session
with tf.Session() as sess:
tf.initialize_all_variables().run()
# Feeding and Fetching data
theta1, theta2 = sess.run([w1, w2], {x1: np.random.random([1, 10]), x2: np.random.random([1, 10])})
print(theta1)
print(theta2)
# function for creating nn layers
def linear(x, out_dim, name, activation_fn=None):
with tf.variable_scope(name):
w = tf.get_variable(name='weights', shape=[x.get_shape()[1], out_dim], dtype=tf.float32, initializer=tf.random_normal_initializer())
b = tf.get_variable(name='biases', shape=[out_dim], dtype=tf.float32, initializer=tf.constant_initializer(0.0))
out = tf.matmul(x, w) + b
if activation_fn != None:
out = activation_fn(out)
return out, [w, b]
# Computational Graph
with tf.variable_scope("ffn") as scope:
x1 = tf.placeholder(tf.float32, shape=(1, 10))
y11, theta11 = linear(x1, 10, name="h", activation_fn=tf.nn.relu)
y12, theta12 = linear(y1, 1, name="out", activation_fn=tf.nn.sigmoid)
scope.reuse_variables()
x2 = tf.placeholder(tf.float32, shape=(1, 10))
y21, theta21 = linear(x2, 10, name="h", activation_fn=tf.nn.relu)
y22, theta22 = linear(y1, 1, name="out", activation_fn=tf.nn.sigmoid)
# Initializing the session
with tf.Session() as sess:
tf.initialize_all_variables().run()
# Feeding and Fetching data
theta1, theta2 = sess.run([theta12, theta22], {x1: np.random.random([1, 10]), x2: np.random.random([1, 10])})
print(theta1[0])
print(theta2[0])