|
|
|
"""
|
|
|
|
import pickle
|
|
|
|
self.get.suffix = lambda : "-".join(self.ATTRIBUTES['synthetic']) if isinstance(self.ATTRIBUTES['synthetic'],list) else self.ATTRIBUTES['synthetic']
|
|
|
|
self.logger = args['logger'] if 'logger' in args and args['logger'] else None
|
|
|
|
self.init_logs(**args)
|
|
|
|
|
|
|
|
def init_logs(self,**args):
|
|
|
|
self.log_dir = args['logs'] if 'logs' in args else 'logs'
|
|
|
|
self.mkdir(self.log_dir)
|
|
|
|
#
|
|
|
|
#
|
|
|
|
for key in ['train','output'] :
|
|
|
|
self.mkdir(os.sep.join([self.log_dir,key]))
|
|
|
|
self.mkdir (os.sep.join([self.log_dir,key,self.CONTEXT]))
|
|
|
|
|
|
|
|
self.train_dir = os.sep.join([self.log_dir,'train',self.CONTEXT])
|
|
|
|
self.out_dir = os.sep.join([self.log_dir,'output',self.CONTEXT])
|
|
|
|
for key in attr :
|
|
|
|
root = []
|
|
|
|
labels=label, n_labels=self.NUM_LABELS)
|
|
|
|
h2 = tf.nn.tanh(h1)
|
|
|
|
x = x + h2
|
|
|
|
# This seems to be the output layer
|
|
|
|
#
|
|
|
|
kernel = self.get.variables(name='W_' + str(i+1), shape=[self.Z_DIM, self.X_SPACE_SIZE])
|
|
|
|
bias = self.get.variables(name='b_' + str(i+1), shape=[self.X_SPACE_SIZE])
|
|
|
|
x = tf.nn.sigmoid(tf.add(tf.matmul(x, kernel), bias))
|
|
|
|
return x
|
|
|
|
# self.log (real_shape=list(self._REAL.shape),label_shape = self._LABEL.shape,meta_data=self.meta)
|
|
|
|
def load_meta(self, column):
|
|
|
|
"""
|
|
|
|
This function will delegate the calls to load meta data to it's dependents
|
|
|
|
column name
|
|
|
|
"""
|
|
|
|
super().load_meta(column)
|
|
|
|
self.generator.load_meta(column)
|
|
|
|
self.discriminator.load_meta(column)
|
|
|
|
def loss(self,**args):
|
|
|
|
"""
|
|
|
|
This function will compute a "tower" loss of the generated candidate against real data
|
|
|
|
Training will consist in having both generator and discriminators
|
|
|
|
:scope
|
|
|
|
:stage
|
|
|
|
:real
|
|
|
|
:label
|
|
|
|
"""
|
|
|
|
|
|
|
|
scope = args['scope']
|
|
|
|
stage = args['stage']
|
|
|
|
real = args['real']
|
|
|
|
label = args['label']
|
|
|
|
|
|
|
|
def network(self,**args):
|
|
|
|
stage = args['stage']
|
|
|
|
for _ in range(2):
|
|
|
|
_, w = sess.run([train_d, w_distance])
|
|
|
|
w_sum += w
|
|
|
|
sess.run(train_g)
|
|
|
|
duration = time.time() - start_time
|
|
|
|
|
|
|
|
assert not np.isnan(w_sum), 'Model diverged with loss = NaN'
|
|
|
|
|
|
|
|
format_str = 'epoch: %d, w_distance = %f (%.1f)'
|
|
|
|
print(format_str % (epoch, -w_sum/(self.STEPS_PER_EPOCH*2), duration))
|
|
|
|
# print (dir (w_distance))
|
|
|
|
|
|
|
|
logs.append({"epoch":epoch,"distance":-w_sum/(self.STEPS_PER_EPOCH*2) })
|
|
|
|
|
|
|
|
#
|
|
|
|
|
|
|
|
# df = pd.DataFrame(np.round(f)).astype(np.int32)
|
|
|
|
candidates.append (np.round(_matrix).astype(np.int64))
|
|
|
|
# return candidates[0] if len(candidates) == 1 else candidates
|
|
|
|
|
|
|
|
return candidates
|
|
|
|
|
|
|
|
def _apply(self,**args):
|
|
|
|
# # break
|
|
|
|
# if INDEX > 0 :
|
|
|
|
# info =dict(info ,**{"selected":INDEX, "ratio": ratio[INDEX] })
|
|
|
|
# else :
|