|
|
|
#!/usr/bin/env python3
|
|
|
|
class KEYS :
|
|
|
|
logger.write({"module":"bigquery","action":"read","input":{"sql":SQL}})
|
|
|
|
if 'autopilot' in ( list(args.keys())) :
|
|
|
|
else:
|
|
|
|
if df[name].unique().size > 0 :
|
|
|
|
columns.append(name)
|
|
|
|
|
|
|
|
x_cols = []
|
|
|
|
_columns = None
|
|
|
|
# else:
|
|
|
|
# Components.lock.acquire()
|
|
|
|
# data_comp.to_gbq(if_exists='append',destination_table=partial,credentials=credentials,chunksize=90000)
|
|
|
|
# _args['data'].to_gbq(if_exists='append',destination_table=complete,credentials=credentials,chunksize=90000)
|
|
|
|
# Components.lock.release()
|
|
|
|
# _id = 'dataset'
|
|
|
|
# info = {"full":{_id:_fname,"rows":_args['data'].shape[0]},"partial":{"path":_pname,"rows":data_comp.shape[0]} }
|
|
|
|
# if partition :
|
|
|
|
# info ['partition'] = int(partition)
|
|
|
|
# logger.write({"module":"generate","action":"write","input":info} )
|
|
|
|
if 'matrix_size' in args :
|
|
|
|
else:
|
|
|
|
agent.train(**args)
|
|
|
|
#
|
|
|
|
# If we have any obs we should wait till they finish
|
|
|
|
#
|
|
|
|
# jobs = [job for job in jobs if job.is_alive()]
|