You cannot select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

820 lines
44 KiB
Python

"""
5 years ago
import pickle
# os.environ['CUDA_VISIBLE_DEVICES'] = str(self.GPU_CHIPS[0])
4 years ago
if args['real'].shape[0] < PROPOSED_BATCH_PER_GPU :
# if self.logger :
'Z_DIM':self.Z_DIM,
elif not os.path.exists(path):
grad = tf.gradients(y_hat, [x_hat])[0]
slopes = tf.sqrt(tf.reduce_sum(tf.square(grad), 1))
gradient_penalty = tf.reduce_mean((slopes - 1.) ** 2)
#all_regs = tf.get_collection(tf.GraphKeys.REGULARIZATION_LOSSES)
all_regs = tf.compat.v1.get_collection(tf.compat.v1.GraphKeys.REGULARIZATION_LOSSES)
w_distance = -tf.reduce_mean(y_hat_real) + tf.reduce_mean(y_hat_fake)
loss = w_distance + 10 * gradient_penalty + sum(all_regs)
#tf.add_to_collection('dlosses', loss)
tf.compat.v1.add_to_collection('dlosses', loss)
return w_distance, loss
"""
# print (total_loss)
return total_loss, w
def input_fn(self):
"""
This function seems to produce
"""
features_placeholder = tf.compat.v1.placeholder(shape=self._REAL.shape, dtype=tf.float32)
with tf.compat.v1.variable_scope(tf.compat.v1.get_variable_scope()):
format_str = 'epoch: %d, w_distance = %f (%.1f)'
print(format_str % (epoch, -w_sum/(self.STEPS_PER_EPOCH*2), duration))
# print (dir (w_distance))
logs.append({"epoch":epoch,"distance":-w_sum/(self.STEPS_PER_EPOCH*2) })
"""
candidates.append(np.array([np.round(row).astype(int) for row in _matrix]))
# return candidates[0] if len(candidates) == 1 else candidates
return candidates
def _apply(self,**args):
# if len(found) == CANDIDATE_COUNT:
# if INDEX > 0 :
# df.columns = columns
# df = df[columns[0]].append(pd.Series(missing))