|
|
|
"""
|
|
|
|
(c) 2019 Data Maker, hiplab.mc.vanderbilt.edu
|
|
|
|
version 1.0.0
|
|
|
|
|
|
|
|
This package serves as a proxy to the overall usage of the framework.
|
|
|
|
This package is designed to generate synthetic data from a dataset from an original dataset using deep learning techniques
|
|
|
|
|
|
|
|
@TODO:
|
|
|
|
- Make configurable GPU, EPOCHS
|
|
|
|
"""
|
|
|
|
import pandas as pd
|
|
|
|
import numpy as np
|
|
|
|
ROUND_UP = 2
|
|
|
|
# _BINARY= ContinuousToDiscrete.binary(X,BIN_SIZE)
|
|
|
|
#
|
|
|
|
if 'gpu' in _args :
|
|
|
|
# f = open(os.sep.join([_args['logs'],'output',_args['context'],'map.json']))
|
|
|
|
|
|
|
|
#
|
|
|
|
|
|
|
|
gTrain = gan.Train(**_args)
|
|
|
|
gTrain.apply()
|
|
|
|
|
|
|
|
writer = transport.factory.instance(provider='file',context='write',path=os.sep.join([gTrain.out_dir,'map.json']))
|
|
|
|
writer.write(self._encoder._map,overwrite=True)
|
|
|
|
writer.close()
|
|
|
|
|
|
|
|
#
|
|
|
|
# @TODO: At this point we need to generate another some other objects
|
|
|
|
#
|
|
|
|
_args = {"network_args":self.network_args,"store":self.store,"info":self.info,"candidates":self.candidates,"data":self._df}
|
|
|
|
if self.gpu :
|
|
|
|
_args['gpu'] = self.gpu
|
|
|
|
g = Generator(**_args)
|
|
|
|
# g.run()
|
|
|
|
self.generate = g
|
|
|
|
if self.autopilot :
|
|
|
|
self.generate.run()
|
|
|
|
def generate (self):
|
|
|
|
if self.autopilot :
|
|
|
|
print( "Autopilot is set ... No need to call this function")
|
|
|
|
else:
|
|
|
|
raise Exception( "Autopilot has not been, Wait till training is finished. Use is_alive function on process object")
|
|
|
|
|
|
|
|
class Generator (Learner):
|
|
|
|
def __init__(self,**_args):
|
|
|
|
super().__init__(**_args)
|
|
|
|
#
|
|
|
|
# We need to load the mapping information for the space we are working with ...
|
|
|
|
#
|
|
|
|
self.network_args['candidates'] = int(_args['candidates']) if 'candidates' in _args else 1
|
|
|
|
filename = os.sep.join([self.network_args['logs'],'output',self.network_args['context'],'map.json'])
|
|
|
|
file = open(filename)
|
|
|
|
self._map = json.loads(file.read())
|
|
|
|
file.close()
|
|
|
|
def run(self):
|
|
|
|
self.initalize()
|
|
|
|
#
|
|
|
|
# The values will be returned because we have provided _map information from the constructor
|
|
|
|
#
|
|
|
|
values,_matrix = self._encoder.convert()
|
|
|
|
_args = self.network_args
|
|
|
|
_args['map'] = self._map
|
|
|
|
_args['values'] = np.array(values)
|
|
|
|
_args['row_count'] = self._df.shape[0]
|
|
|
|
|
|
|
|
gHandler = gan.Predict(**_args)
|
|
|
|
gHandler.load_meta(columns=None)
|
|
|
|
_iomatrix = gHandler.apply()
|
|
|
|
_candidates= [ self._encoder.revert(matrix=_item) for _item in _iomatrix]
|
|
|
|
self.post(_candidates)
|
|
|
|
_date = datetime(year=year,month=month,day=day)
|
|
|
|
FORMAT = _args['format'] if 'format' in _args else '%Y-%m-%d'
|
|
|
|
r = []
|
|
|
|
if offset :
|
|
|
|
r = [_date.strftime(FORMAT)]
|
|
|
|
for _delta in offset :
|
|
|
|
_date = _date + timedelta(_delta)
|
|
|
|
r.append(_date.strftime(FORMAT))
|
|
|
|
return r
|
|
|
|
else:
|
|
|
|
return _date.strftime(FORMAT)
|
|
|
|
|
|
|
|
pass
|
|
|
|
:param candidates(default 1) number of candidates to generate
|
|
|
|
:param info {columns,sql,from}
|
|
|
|
:param autopilot will generate output automatically
|
|
|
|
:param batch (default 2k) size of the batch
|
|
|
|
"""
|
|
|
|
return Trainer(**_args)
|