0

I am stacking up customized number of layers that is fully connected nusing tensorflow.

import matplotlib.pyplot as plt
import numpy as np
import tensorflow as tf

class Layer: #define Layer - Layer is the comprised concept of weigt * x + b 
    def weight_variable(shape, name): #weight is get_variable 
        return tf.get_variable(name=name, dtype=tf.float32, shape=shape ,initializer=tf.contrib.layers.xavier_initializer_conv2d())
    def bias_variable(shape, name): #bias is also get_variable
        return tf.get_variable(name=name, dtype=tf.float32, shape=shape, initializer=tf.constant_initializer(0))       
    def full_layer(x, W_shape):
        W = Layer.weight_variable(W_shape, "W")
        b = Layer.bias_variable(W_shape[-1], "b")
        full = tf.add(tf.matmul(x,W),b,name="full")
        return full


class Random_aprrox(Layer):
    def __init__(self, N, sigma, bias, slope, learning_rate):
        super().__init__() #when instantiated, the Layer class being inherited into the class Random_aprrox 
        self.N = N
        self.sigma = sigma
        self.bias = bias
        self.slope = slope
        self.x0 = np.array([i for i in range(self.N)])
        self.noise_0 = np.random.normal(self.bias, self.sigma, [self.N])
        self.y0 = self.x0*self.slope+self.noise_0
        self.learning_rate = learning_rate
        print(self.x0, self.y0)
        print(plt.plot(self.x0, self.y0, "rx"))

    def graph_(self, stack_numb):
        #vacanize graph
        tf.reset_default_graph() 

        #make rooms for x and y 
        x = tf.placeholder(shape=[1,None], dtype=tf.float32,name= 'x')
        y = tf.placeholder(shape=[1,None], dtype=tf.float32, name='y')


        ##name stacks
        stack_names = []
        for i in range(0, stack_numb):
            stack_names.append(''.join(["F", str(i+1)]))

        ##graph stacks        
        if stack_numb == 0:
            print("nothing to stack")
        else:
            with tf.variable_scope(stack_numb[0]):
                F1 = Layer.full_layer(x, [1, 100]) #first layer 
                R1 = tf.nn.relu(F1)

        for i in range(1, stack_numb):
            with tf.variable_scope(stack_names[i]):
                F2 = Layer.full_layer(R1, [100, 100])
                R2 = tf.nn.relu(F2)

        with tf.variable_scope("F3"):
            F3 = Layer.full_layer(R2, [100, 100])
            R3 = tf.nn.relu(F3)
        with tf.variable_scope("F4"):
            F4 = Layer.full_layer(R3, [100, 100])
            R4 = tf.nn.relu(F4)
        with tf.variable_scope("F5"):
            F5 = Layer.full_layer(R4, [100, 100])
            R5 = tf.nn.relu(F5)
        with tf.variable_scope("F6"):
            F6 = Layer.full_layer(R5, [100, 100])
            R6 = tf.nn.relu(F6)
        with tf.variable_scope("F7"):
            F7 = Layer.full_layer(R6, [100, 100])
            R7 = tf.nn.relu(F7)
        with tf.variable_scope("F8"):
            F8 = Layer.full_layer(R7, [100, 100])
            R8 = tf.nn.relu(F8)
        with tf.variable_scope("F9"):
            F9 = Layer.full_layer(R8, [100, 100])
            R9 = tf.nn.relu(F9)
        with tf.variable_scope("F10"):
            F10 = Layer.full_layer(R9, [100, 100])
            R10 = tf.nn.relu(F10)
        with tf.variable_scope("F11"):
            F11 = Layer.full_layer(R10, [100, 1])

At the bottom-most of above code, you can check there's iterated 10 layers are placed just with difference on name only.

I'd like to aggregatively express that with loop statement, so that with a given number of stacks, it automatically stacks up as the layers the user intended to.

How can I do this?

1 Answer 1

1

Loops work just fine in tensorflow

import numpy as np
import tensorflow as tf

class Layer: #define Layer - Layer is the comprised concept of weigt * x + b 
    def weight_variable(shape, name): #weight is get_variable 
        return tf.get_variable(name=name, dtype=tf.float32, shape=shape ,initializer=tf.contrib.layers.xavier_initializer_conv2d())
    def bias_variable(shape, name): #bias is also get_variable
        return tf.get_variable(name=name, dtype=tf.float32, shape=shape, initializer=tf.constant_initializer(0))       
    def full_layer(x, W_shape):
        W = Layer.weight_variable(W_shape, "W")
        b = Layer.bias_variable(W_shape[-1], "b")
        full = tf.add(tf.matmul(x,W),b,name="full")
        return full

layers = [tf.placeholder(tf.float32,[1,100])]

# some code here

for i in range(2,11):
    with tf.variable_scope("F"+str(i)):
        f = Layer.full_layer(layers[-1], [100, 100])
        layers.append(tf.nn.relu(f))
Sign up to request clarification or add additional context in comments.

Comments

Your Answer

By clicking “Post Your Answer”, you agree to our terms of service and acknowledge you have read our privacy policy.

Start asking to get answers

Find the answer to your question by asking.

Ask question

Explore related questions

See similar questions with these tags.