You cannot select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
data-maker/pipeline.py

705 lines
24 KiB
Python

#!/usr/bin/env python3
5 years ago
class KEYS :
logger.write({"module":"bigquery","action":"read","input":{"sql":SQL}})
if 'autopilot' in ( list(args.keys())) :
else:
if df[name].unique().size > 0 :
job = bq.QueryJobConfig()
schema = [{"name":_item.name,"type":_item.field_type} for _item in schema]
4 years ago
x_cols = args['continuous']
"real":{"min":real_df[_col].min().astype(float),"max":real_df[_col].max().astype(float),"mean":real_df[_col].mean().astype(float),"sd":real_df[_col].values.std().astype(float),"missing": real_df[_col].where(_df[_col] == -1).dropna().count().astype(float),"zeros":real_df[_col].where(_df[_col] == 0).dropna().count().astype(float)}
# if 'dump' in args :
# print (_args['data'].head())
# else:
# Components.lock.acquire()
# data_comp.to_gbq(if_exists='append',destination_table=partial,credentials=credentials,chunksize=90000)
# _args['data'].to_gbq(if_exists='append',destination_table=complete,credentials=credentials,chunksize=90000)
# Components.lock.release()
# _id = 'dataset'
# info = {"full":{_id:_fname,"rows":_args['data'].shape[0]},"partial":{"path":_pname,"rows":data_comp.shape[0]} }
# if partition :
# info ['partition'] = int(partition)
# logger.write({"module":"generate","action":"write","input":info} )
if 'matrix_size' in args :
# The choice of the chip will be made internally
agent = Components()
agent.train(**args)
#
# If we have any obs we should wait till they finish
#