如何在循环中嵌入变量生成

问题描述 投票:0回答:1

我正在堆叠使用tensorflow完全连接的自定义层数。

import matplotlib.pyplot as plt
import numpy as np
import tensorflow as tf

class Layer: #define Layer - Layer is the comprised concept of weigt * x + b 
    def weight_variable(shape, name): #weight is get_variable 
        return tf.get_variable(name=name, dtype=tf.float32, shape=shape ,initializer=tf.contrib.layers.xavier_initializer_conv2d())
    def bias_variable(shape, name): #bias is also get_variable
        return tf.get_variable(name=name, dtype=tf.float32, shape=shape, initializer=tf.constant_initializer(0))       
    def full_layer(x, W_shape):
        W = Layer.weight_variable(W_shape, "W")
        b = Layer.bias_variable(W_shape[-1], "b")
        full = tf.add(tf.matmul(x,W),b,name="full")
        return full


class Random_aprrox(Layer):
    def __init__(self, N, sigma, bias, slope, learning_rate):
        super().__init__() #when instantiated, the Layer class being inherited into the class Random_aprrox 
        self.N = N
        self.sigma = sigma
        self.bias = bias
        self.slope = slope
        self.x0 = np.array([i for i in range(self.N)])
        self.noise_0 = np.random.normal(self.bias, self.sigma, [self.N])
        self.y0 = self.x0*self.slope+self.noise_0
        self.learning_rate = learning_rate
        print(self.x0, self.y0)
        print(plt.plot(self.x0, self.y0, "rx"))

    def graph_(self, stack_numb):
        #vacanize graph
        tf.reset_default_graph() 

        #make rooms for x and y 
        x = tf.placeholder(shape=[1,None], dtype=tf.float32,name= 'x')
        y = tf.placeholder(shape=[1,None], dtype=tf.float32, name='y')


        ##name stacks
        stack_names = []
        for i in range(0, stack_numb):
            stack_names.append(''.join(["F", str(i+1)]))

        ##graph stacks        
        if stack_numb == 0:
            print("nothing to stack")
        else:
            with tf.variable_scope(stack_numb[0]):
                F1 = Layer.full_layer(x, [1, 100]) #first layer 
                R1 = tf.nn.relu(F1)

        for i in range(1, stack_numb):
            with tf.variable_scope(stack_names[i]):
                F2 = Layer.full_layer(R1, [100, 100])
                R2 = tf.nn.relu(F2)

        with tf.variable_scope("F3"):
            F3 = Layer.full_layer(R2, [100, 100])
            R3 = tf.nn.relu(F3)
        with tf.variable_scope("F4"):
            F4 = Layer.full_layer(R3, [100, 100])
            R4 = tf.nn.relu(F4)
        with tf.variable_scope("F5"):
            F5 = Layer.full_layer(R4, [100, 100])
            R5 = tf.nn.relu(F5)
        with tf.variable_scope("F6"):
            F6 = Layer.full_layer(R5, [100, 100])
            R6 = tf.nn.relu(F6)
        with tf.variable_scope("F7"):
            F7 = Layer.full_layer(R6, [100, 100])
            R7 = tf.nn.relu(F7)
        with tf.variable_scope("F8"):
            F8 = Layer.full_layer(R7, [100, 100])
            R8 = tf.nn.relu(F8)
        with tf.variable_scope("F9"):
            F9 = Layer.full_layer(R8, [100, 100])
            R9 = tf.nn.relu(F9)
        with tf.variable_scope("F10"):
            F10 = Layer.full_layer(R9, [100, 100])
            R10 = tf.nn.relu(F10)
        with tf.variable_scope("F11"):
            F11 = Layer.full_layer(R10, [100, 1])

在上面最底部的代码中,您可以检查迭代的10个图层是否只是名称上的差异。

我想用循环语句进行聚合表达,这样对于给定数量的堆栈,它会自动堆叠为用户想要的层。

我怎样才能做到这一点?

python loops variables tensorflow
1个回答
1
投票

循环在tensorflow中工作得很好

import numpy as np
import tensorflow as tf

class Layer: #define Layer - Layer is the comprised concept of weigt * x + b 
    def weight_variable(shape, name): #weight is get_variable 
        return tf.get_variable(name=name, dtype=tf.float32, shape=shape ,initializer=tf.contrib.layers.xavier_initializer_conv2d())
    def bias_variable(shape, name): #bias is also get_variable
        return tf.get_variable(name=name, dtype=tf.float32, shape=shape, initializer=tf.constant_initializer(0))       
    def full_layer(x, W_shape):
        W = Layer.weight_variable(W_shape, "W")
        b = Layer.bias_variable(W_shape[-1], "b")
        full = tf.add(tf.matmul(x,W),b,name="full")
        return full

layers = [tf.placeholder(tf.float32,[1,100])]

# some code here

for i in range(2,11):
    with tf.variable_scope("F"+str(i)):
        f = Layer.full_layer(layers[-1], [100, 100])
        layers.append(tf.nn.relu(f))
© www.soinside.com 2019 - 2024. All rights reserved.