|
|
|
#!/usr/bin/env python3
|
|
|
|
# bounds = Components.split(df,MAX_ROWS,PART_SIZE)
|
|
|
|
# Logging information about the training process for this partition (or not)
|
|
|
|
#
|
|
|
|
info = {"rows":df.shape[0],"cols":df.shape[1], "partition":partition,"logs":_args['logs']}
|
|
|
|
logger.write({"module":"train","action":"train","input":info})
|
|
|
|
data.maker.train(**_args)
|
|
|
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
# @staticmethod
|
|
|
|
def generate(self,args):
|
|
|
|
"""
|
|
|
|
This function will generate data and store it to a given,
|
|
|
|
"""
|
|
|
|
logger = factory.instance(type='mongo.MongoWriter',args={'dbname':'aou','doc':args['context']})
|
|
|
|
log_folder = args['logs'] if 'logs' in args else 'logs'
|
|
|
|
partition = args['partition'] if 'partition' in args else ''
|
|
|
|
lbound = int(bounds[int(partition)].left)
|
|
|
|
prefix = args['notify']+'.'+_args['context']
|
|
|
|
table = '_'.join([prefix,partition,'io']).replace('__','_')
|
|
|
|
folder = os.sep.join([args['logs'],args['context'],partition,'output'])
|
|
|
|
if 'file' in args :
|
|
|
|
|
|
|
|
_fname = os.sep.join([folder,table.replace('_io','_full_io.csv')])
|
|
|
|
_pname = os.sep.join([folder,table])+'.csv'
|
|
|
|
data_comp.to_csv( _pname,index=False)
|
|
|
|
_args['data'].to_csv(_fname,index=False)
|
|
|
|
|
|
|
|
|
|
|
|
else:
|
|
|
|
credentials = service_account.Credentials.from_service_account_file('/home/steve/dev/aou/accounts/curation-prod.json')
|
|
|
|
_pname = os.sep.join([folder,table+'.csv'])
|
|
|
|
_fname = table.replace('_io','_full_io')
|
|
|
|
data_comp.to_gbq(if_exists='replace',destination_table=_pname,credentials='credentials',chunk_size=50000)
|
|
|
|
data_comp.to_csv(_pname,index=False)
|
|
|
|
args['reader'] = lambda: df
|
|
|
|
|
|
|
|
generator.generate(args)
|
|
|
|
else:
|
|
|
|
generator.generate(args)
|
|
|
|
# Components.generate(args)
|
|
|
|
elif 'listen' in args :
|
|
|
|
#
|
|
|
|
# This will start a worker just in case to listen to a queue
|