You cannot select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

763 lines
41 KiB
Python

"""
5 years ago
import pickle
self.get.suffix = lambda : "-".join(self.ATTRIBUTES['synthetic']) if isinstance(self.ATTRIBUTES['synthetic'],list) else self.ATTRIBUTES['synthetic']
self.logger = args['logger'] if 'logger' in args and args['logger'] else None
self.init_logs(**args)
def init_logs(self,**args):
self.log_dir = args['logs'] if 'logs' in args else 'logs'
self.mkdir(self.log_dir)
#
#
for key in ['train','output'] :
self.mkdir(os.sep.join([self.log_dir,key]))
self.mkdir (os.sep.join([self.log_dir,key,self.CONTEXT]))
self.train_dir = os.sep.join([self.log_dir,'train',self.CONTEXT])
self.out_dir = os.sep.join([self.log_dir,'output',self.CONTEXT])
if self.logger :
#
# We will clear the logs from the data-store
#
column = self.ATTRIBUTES['synthetic']
db = self.logger.db
if db[column].count() > 0 :
db.backup.insert({'name':column,'logs':list(db[column].find()) })
db[column].drop()
def load_meta(self,column):
"""
This function is designed to accomodate the uses of the sub-classes outside of a strict dependency model.
Because prediction and training can happen independently
"""
# suffix = "-".join(column) if isinstance(column,list)else column
suffix = self.get.suffix()
_name = os.sep.join([self.out_dir,'meta-'+suffix+'.json'])
if os.path.exists(_name) :
attr = json.loads((open(_name)).read())
for key in attr :
value = attr[key]
setattr(self,key,value)
self.train_dir = os.sep.join([self.log_dir,'train',self.CONTEXT])
self.out_dir = os.sep.join([self.log_dir,'output',self.CONTEXT])
def log_meta(self,**args) :
_object = {
# '_id':'meta',
'CONTEXT':self.CONTEXT,
'ATTRIBUTES':self.ATTRIBUTES,
'BATCHSIZE_PER_GPU':self.BATCHSIZE_PER_GPU,
'Z_DIM':self.Z_DIM,
"X_SPACE_SIZE":self.X_SPACE_SIZE,
"D_STRUCTURE":self.D_STRUCTURE,
"G_STRUCTURE":self.G_STRUCTURE,
"NUM_GPUS":self.NUM_GPUS,
"NUM_LABELS":self.NUM_LABELS,
"MAX_EPOCHS":self.MAX_EPOCHS,
"ROW_COUNT":self.ROW_COUNT
}
if args and 'key' in args and 'value' in args :
key = args['key']
value= args['value']
object[key] = value
# suffix = "-".join(self.column) if isinstance(self.column,list) else self.column
suffix = self.get.suffix()
_name = os.sep.join([self.out_dir,'meta-'+suffix])
f = open(_name+'.json','w')
f.write(json.dumps(_object))
return _object
def mkdir (self,path):
if not os.path.exists(path) :
if not os.path.exists(os.sep.join(root)) :
loss = -tf.reduce_mean(y_hat_fake) + sum(all_regs)
#tf.add_to_collection('glosses', loss)
tf.compat.v1.add_to_collection('glosses', loss)
return loss, loss
def load_meta(self, column):
super().load_meta(column)
self.discriminator.load_meta(column)
def network(self,**args) :
"""
This function will build the network that will generate the synthetic candidates
:inputs matrix of data that we need
:dim dimensions of ...
"""
x = args['inputs']
tmp_dim = self.Z_DIM if 'dim' not in args else args['dim']
label = args['label']
x_hat = real + epsilon * (fake - real)
y_hat_fake = self.network(inputs=fake, label=label)
y_hat_real = self.network(inputs=real, label=label)
y_hat = self.network(inputs=x_hat, label=label)
grad = tf.gradients(y_hat, [x_hat])[0]
slopes = tf.sqrt(tf.reduce_sum(tf.square(grad), 1))
gradient_penalty = tf.reduce_mean((slopes - 1.) ** 2)
#all_regs = tf.get_collection(tf.GraphKeys.REGULARIZATION_LOSSES)
all_regs = tf.compat.v1.get_collection(tf.compat.v1.GraphKeys.REGULARIZATION_LOSSES)
w_distance = -tf.reduce_mean(y_hat_real) + tf.reduce_mean(y_hat_fake)
loss = w_distance + 10 * gradient_penalty + sum(all_regs)
#tf.add_to_collection('dlosses', loss)
tf.compat.v1.add_to_collection('dlosses', loss)
return w_distance, loss
total_loss = tf.add_n(losses, name='total_loss')
tower_grads = []
logs.append({"epoch":epoch,"distance":-w_sum/(self.STEPS_PER_EPOCH*2) })
if epoch % self.MAX_EPOCHS == 0:
# suffix = "-".join(self.ATTRIBUTES['synthetic']) if isinstance(self.ATTRIBUTES['synthetic'],list) else self.ATTRIBUTES['synthetic']
suffix = self.get.suffix()
_name = os.sep.join([self.train_dir,suffix])
# saver.save(sess, self.train_dir, write_meta_graph=False, global_step=epoch)
saver.save(sess, _name, write_meta_graph=False, global_step=epoch)
#
#
if self.logger :
ratio = []
info['ratio'] = __ratio
if self.logger :
#
# We should train upon this data