bug fix ...

dev
Steve L. Nyemba 4 years ago
parent 6e45704252
commit d54758aac3

@ -144,20 +144,22 @@ def generate(**_args):
""" """
partition = _args['partition'] if 'partition' in _args else None partition = _args['partition'] if 'partition' in _args else None
if not partition : if not partition :
LOG_DIR = os.sep.join([_args['logs'],'output',_args['context']]) MAP_FLDER = os.sep.join([_args['logs'],'output',_args['context']])
# f = open(os.sep.join([_args['logs'],'output',_args['context'],'map.json'])) # f = open(os.sep.join([_args['logs'],'output',_args['context'],'map.json']))
else: else:
LOG_DIR = os.sep.join([_args['logs'],'output',_args['context'],str(partition)]) MAP_FOLDER = os.sep.join([_args['logs'],'output',_args['context'],str(partition)])
# f = open(os.sep.join([_args['logs'],'output',_args['context'],str(partition),'map.json'])) # f = open(os.sep.join([_args['logs'],'output',_args['context'],str(partition),'map.json']))
f = open(os.sep.join([LOG_DIR,'map.json'])) f = open(os.sep.join([MAP_FOLDER,'map.json']))
_map = json.loads(f.read()) _map = json.loads(f.read())
f.close() f.close()
#
#
# if 'file' in _args : # if 'file' in _args :
# df = pd.read_csv(_args['file']) # df = pd.read_csv(_args['file'])
# else: # else:
# df = _args['data'] if not isinstance(_args['data'],str) else pd.read_csv(_args['data']) # df = _args['data'] if not isinstance(_args['data'],str) else pd.read_csv(_args['data'])
args = {"context":_args['context'],"max_epochs":_args['max_epochs'],"candidates":_args['candidates']} args = {"context":_args['context'],"max_epochs":_args['max_epochs'],"candidates":_args['candidates']}
args['logs'] = LOG_DIR if 'logs' in _args else 'logs' args['logs'] = _args['logs'] if 'logs' in _args else 'logs'
args ['max_epochs'] = _args['max_epochs'] args ['max_epochs'] = _args['max_epochs']
# args['matrix_size'] = _matrix.shape[0] # args['matrix_size'] = _matrix.shape[0]
args['batch_size'] = 2000 args['batch_size'] = 2000
@ -177,7 +179,6 @@ def generate(**_args):
lparams = {'columns':None} lparams = {'columns':None}
if partition : if partition :
lparams['partition'] = partition lparams['partition'] = partition
handler.load_meta(**lparams) handler.load_meta(**lparams)
# #
# Let us now format the matrices by reverting them to a data-frame with values # Let us now format the matrices by reverting them to a data-frame with values

@ -244,7 +244,7 @@ class Components :
writer = factory.instance(**_args['store']) writer = factory.instance(**_args['store'])
_df = _args['data'] _df = _args['data']
if _schema : if _schema :
columns = _df.columns.tolist columns = []
for _item in _schema : for _item in _schema :
name = _item['name'] name = _item['name']
_type = str _type = str

@ -5,7 +5,7 @@ import sys
def read(fname): def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read() return open(os.path.join(os.path.dirname(__file__), fname)).read()
args = {"name":"data-maker", args = {"name":"data-maker",
"version":"1.4.7.3", "version":"1.4.7.4",
"author":"Vanderbilt University Medical Center","author_email":"steve.l.nyemba@vanderbilt.edu","license":"MIT", "author":"Vanderbilt University Medical Center","author_email":"steve.l.nyemba@vanderbilt.edu","license":"MIT",
"packages":find_packages(),"keywords":["healthcare","data","transport","protocol"]} "packages":find_packages(),"keywords":["healthcare","data","transport","protocol"]}
args["install_requires"] = ['data-transport@git+https://dev.the-phi.com/git/steve/data-transport.git','tensorflow==1.15','pandas','pandas-gbq','pymongo'] args["install_requires"] = ['data-transport@git+https://dev.the-phi.com/git/steve/data-transport.git','tensorflow==1.15','pandas','pandas-gbq','pymongo']

Loading…
Cancel
Save