fixed issue around data-types/casting misbehavior with pandas and missing values

dev
Steve L. Nyemba 5 years ago
parent 821cec8dd7
commit 3dde3bf4ef

@ -168,6 +168,7 @@ class Components :
df = args['reader']() if 'reader' in args else args['data'] df = args['reader']() if 'reader' in args else args['data']
if 'slice' in args and 'max_rows' in args['slice']: if 'slice' in args and 'max_rows' in args['slice']:
max_rows = args['slice']['max_rows'] max_rows = args['slice']['max_rows']
if df.shape[0] > max_rows : if df.shape[0] > max_rows :
print (".. slicing ") print (".. slicing ")
@ -175,6 +176,7 @@ class Components :
df = df.iloc[i] df = df.iloc[i]
# bounds = Components.split(df,MAX_ROWS,PART_SIZE) # bounds = Components.split(df,MAX_ROWS,PART_SIZE)
# if partition != '' : # if partition != '' :
# columns = args['columns'] # columns = args['columns']
@ -182,7 +184,7 @@ class Components :
# df = pd.DataFrame(df[ int (partition) ],columns = columns) # df = pd.DataFrame(df[ int (partition) ],columns = columns)
# max_rows = int(args['partition_max_rows']) if 'partition_max_rows' in args else 1000000 # max_rows = int(args['partition_max_rows']) if 'partition_max_rows' in args else 1000000
# N = np.divide(df.shape[0],max_rows).astype(int) + 1 # N = np.divide(df.shape[0],max_rows).astype(int) + 1
info = {"parition":int(partition),"gpu":_args["gpu"],"rows":int(df.shape[0]),"cols":int(df.shape[1]),"part_size":int(PART_SIZE)} info = {"parition":int(partition),"gpu":_args["gpu"],"rows":int(df.shape[0]),"cols":int(df.shape[1]),"space":df[args['columns'][0]].unique().size, "part_size":int(PART_SIZE)}
logger.write({"module":"generate","action":"partition","input":info}) logger.write({"module":"generate","action":"partition","input":info})
_args['partition'] = int(partition) _args['partition'] = int(partition)
_args['continuous']= args['continuous'] if 'continuous' in args else [] _args['continuous']= args['continuous'] if 'continuous' in args else []
@ -256,7 +258,7 @@ class Components :
data_comp.to_gbq(if_exists='append',destination_table=partial,credentials=credentials,chunksize=90000) data_comp.to_gbq(if_exists='append',destination_table=partial,credentials=credentials,chunksize=90000)
INSERT_FLAG = 'replace' if 'partition' not in args or 'segment' not in args else 'append' INSERT_FLAG = 'replace' if 'partition' not in args or 'segment' not in args else 'append'
print (_args['data'].dtypes)
_args['data'].to_gbq(if_exists='append',destination_table=complete,credentials=credentials,chunksize=90000) _args['data'].to_gbq(if_exists='append',destination_table=complete,credentials=credentials,chunksize=90000)
Components.lock.release() Components.lock.release()
_id = 'dataset' _id = 'dataset'

Loading…
Cancel
Save