|
|
|
"""
|
|
|
|
import pickle
|
|
|
|
# if self.NUM_GPUS > 1 :
|
|
|
|
"""
|
|
|
|
This function is designed to accomodate the uses of the sub-classes outside of a strict dependency model.
|
|
|
|
Because prediction and training can happen independently
|
|
|
|
"""
|
|
|
|
# suffix = "-".join(column) if isinstance(column,list)else column
|
|
|
|
mean, var = tf.nn.moments(inputs, shift, keep_dims=True)
|
|
|
|
shape = inputs.shape[1].value
|
|
|
|
grad_and_var = (grad, v)
|
|
|
|
h2 = tf.nn.relu(h1)
|
|
|
|
x = x + h2
|
|
|
|
tmp_dim = dim
|
|
|
|
i = len(self.G_STRUCTURE) - 1
|
|
|
|
#
|
|
|
|
# This seems to be an extra hidden layer:
|
|
|
|
# It's goal is to map continuous values to discrete values (pre-trained to do this)
|
|
|
|
kernel = self.get.variables(name='W_' + str(i), shape=[tmp_dim, self.G_STRUCTURE[-1]])
|
|
|
|
h1 = self.normalize(inputs=tf.matmul(x, kernel), name='cbn' + str(i),
|
|
|
|
labels=label, n_labels=self.NUM_LABELS)
|
|
|
|
h2 = tf.nn.tanh(h1)
|
|
|
|
x = x + h2
|
|
|
|
# This seems to be the output layer
|
|
|
|
#
|
|
|
|
kernel = self.get.variables(name='W_' + str(i+1), shape=[self.Z_DIM, self.X_SPACE_SIZE])
|
|
|
|
bias = self.get.variables(name='b_' + str(i+1), shape=[self.X_SPACE_SIZE])
|
|
|
|
x = tf.nn.sigmoid(tf.add(tf.matmul(x, kernel), bias))
|
|
|
|
return x
|
|
|
|
x = self.normalize(inputs=x, name='cln' + str(i), shift=1,labels=label, n_labels=self.NUM_LABELS)
|
|
|
|
self.discriminator = Discriminator(**args)
|
|
|
|
self._REAL = args['real']
|
|
|
|
)
|
|
|
|
|
|
|
|
tf.compat.v1.get_variable_scope().reuse_variables()
|
|
|
|
|
|
|
|
sess.run(iterator_d.initializer,
|
|
|
|
|
|
|
|
else:
|
|
|
|
if self._LABEL is not None :
|
|
|
|
# #
|
|
|
|
|
|
|
|
# N = ii.sum()
|
|
|
|
# missing_values = self.MISSING_VALUES if self.MISSING_VALUES else self.values
|
|
|
|
# missing = np.random.choice(missing_values,N)
|
|
|
|
# # missing = []
|
|
|
|
# #
|
|
|
|
# # @TODO:
|
|
|
|
# # Log the findings here in terms of ratio, missing, candidate count
|
|
|
|
# # print ([np.max(ratio),len(missing),len(found),i])
|
|
|
|
# i = np.where(ii == 0)[0]
|
|
|
|
# print (df)
|