|
|
|
@ -34,7 +34,7 @@ from datetime import datetime
|
|
|
|
|
import copy
|
|
|
|
|
import requests
|
|
|
|
|
import time
|
|
|
|
|
|
|
|
|
|
from healthcareio.x12 import Parser
|
|
|
|
|
PATH = os.sep.join([os.environ.get('HOME'),'.healthcareio','config.json'])
|
|
|
|
|
|
|
|
|
|
STORE_URI = 'http://healthcareio.the-phi.com/store/healthcareio'
|
|
|
|
@ -82,7 +82,7 @@ def meta(config) :
|
|
|
|
|
key = _cache['key']
|
|
|
|
|
if 'map' in config[key]:
|
|
|
|
|
config[key]['map'][field] = -100
|
|
|
|
|
|
|
|
|
|
add_index = {} #-- tells if we should add _index attribute or not
|
|
|
|
|
for prefix in config :
|
|
|
|
|
# if 'map' in config[prefix] :
|
|
|
|
|
# label = list(set(['label','field']) & set(config[prefix].keys()))
|
|
|
|
@ -95,13 +95,26 @@ def meta(config) :
|
|
|
|
|
|
|
|
|
|
if '@ref' in config[prefix] : #and set(['label','field','map']) & set(config[prefix]['@ref'].keys()):
|
|
|
|
|
for subprefix in config[prefix]['@ref'] :
|
|
|
|
|
|
|
|
|
|
_entry = config[prefix]['@ref'][subprefix]
|
|
|
|
|
_id = list(set(['label','field']) & set(config[prefix]['@ref'][subprefix].keys()))
|
|
|
|
|
_id = _id[0]
|
|
|
|
|
table = config[prefix]['@ref'][subprefix][_id]
|
|
|
|
|
add_index[table] = 1 if _id == 'label' else 0
|
|
|
|
|
if 'map' in _entry :
|
|
|
|
|
|
|
|
|
|
_info += get_field(_entry)
|
|
|
|
|
else:
|
|
|
|
|
_info += list(_entry.keys())
|
|
|
|
|
if set(['label','field','map']) & set(config[prefix].keys()):
|
|
|
|
|
_entry = config[prefix]
|
|
|
|
|
_id = list(set(['label','field']) & set(config[prefix].keys()))
|
|
|
|
|
if _id :
|
|
|
|
|
_id = _id[0]
|
|
|
|
|
table = config[prefix][_id]
|
|
|
|
|
add_index[table] = 1 if _id == 'label' else 0
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if 'map' in _entry :
|
|
|
|
|
_info += get_field(_entry)
|
|
|
|
|
|
|
|
|
@ -110,7 +123,7 @@ def meta(config) :
|
|
|
|
|
#
|
|
|
|
|
# We need to organize the fields appropriately here
|
|
|
|
|
#
|
|
|
|
|
|
|
|
|
|
# print (_info)
|
|
|
|
|
fields = {"main":[],"rel":{}}
|
|
|
|
|
for row in _info :
|
|
|
|
|
if type(row) == str :
|
|
|
|
@ -118,16 +131,36 @@ def meta(config) :
|
|
|
|
|
fields['main'] = list(set(fields['main']))
|
|
|
|
|
fields['main'].sort()
|
|
|
|
|
else :
|
|
|
|
|
_id = list(set(add_index.keys()) & set(row.keys()))
|
|
|
|
|
|
|
|
|
|
if _id :
|
|
|
|
|
|
|
|
|
|
_id = _id[0]
|
|
|
|
|
|
|
|
|
|
if add_index[_id] == 1 :
|
|
|
|
|
row[_id]+= ['_index']
|
|
|
|
|
|
|
|
|
|
if _id not in fields['rel']:
|
|
|
|
|
|
|
|
|
|
fields['rel'][_id] = row[_id]
|
|
|
|
|
else:
|
|
|
|
|
fields['rel'][_id] += row[_id]
|
|
|
|
|
else:
|
|
|
|
|
print ( _entry)
|
|
|
|
|
|
|
|
|
|
_id = list(row.keys())[0]
|
|
|
|
|
fields['rel'][_id] = row[_id] if _id not in fields['rel'] else fields['rel'][_id] + row[_id]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
fields['rel'] = jsonmerge.merge(fields['rel'],row)
|
|
|
|
|
|
|
|
|
|
return fields
|
|
|
|
|
def create (**_args) :
|
|
|
|
|
skip = [] if 'skip' not in _args else _args['skip']
|
|
|
|
|
fields = ([_args['key']] if 'key' in _args else []) + _args['fields']
|
|
|
|
|
fields = ['_id'] + list(set(fields))
|
|
|
|
|
fields.sort()
|
|
|
|
|
table = _args['table']
|
|
|
|
|
sql = ['CREATE TABLE :table ',"(",",\n".join(["\t".join(["\t",name,"VARCHAR(125)"]) for name in fields]),")"]
|
|
|
|
|
sql = ['CREATE TABLE :table ',"(",",\n".join(["\t".join(["\t",name,"VARCHAR(125)"]) for name in fields]),")" ]
|
|
|
|
|
return " ".join(sql)
|
|
|
|
|
def read (**_args) :
|
|
|
|
|
"""
|
|
|
|
@ -141,32 +174,80 @@ def read (**_args) :
|
|
|
|
|
# @TODO: Find a way to write the data into a data-store
|
|
|
|
|
# - use dbi interface with pandas or stream it in
|
|
|
|
|
#
|
|
|
|
|
def init (**_args) :
|
|
|
|
|
def init_sql(**_args):
|
|
|
|
|
"""
|
|
|
|
|
This function expresses how we can generically read data stored in JSON format from a relational table
|
|
|
|
|
:param type 835,837
|
|
|
|
|
:param skip list of fields to be skipped
|
|
|
|
|
"""
|
|
|
|
|
#
|
|
|
|
|
# we should acknowledge global variables CONFIG,CUSTOM_CONFIG
|
|
|
|
|
TYPE = _args['type']
|
|
|
|
|
_config = CONFIG['parser'][TYPE][0]
|
|
|
|
|
TABLE_NAME = 'claims' if TYPE== '837' else 'remits'
|
|
|
|
|
if TYPE in CUSTOM_CONFIG :
|
|
|
|
|
_config = jsonmerge.merge(_config,CUSTOM_CONFIG[TYPE])
|
|
|
|
|
#
|
|
|
|
|
_info = meta(_config)
|
|
|
|
|
_projectSQLite = [] #-- sqlite projection
|
|
|
|
|
for field_name in _info['main'] :
|
|
|
|
|
_projectSQLite += ["json_extract(data,'$."+field_name+"') "+field_name]
|
|
|
|
|
_projectSQLite = ",".join(_projectSQLite) #-- Wrapping up SQLITE projection on main table
|
|
|
|
|
SQL = "SELECT DISTINCT claims.id _id,:fields FROM :table, json_each(data)".replace(":fields",_projectSQLite).replace(":table",TABLE_NAME)
|
|
|
|
|
r = [{"table":TABLE_NAME,"read":{"sql":SQL},"sql":create(table=TABLE_NAME,fields=_info['main'])}]
|
|
|
|
|
for table in _info['rel'] :
|
|
|
|
|
#
|
|
|
|
|
# NOTE: Adding _index to the fields
|
|
|
|
|
fields = _info['rel'][table] #+["_index"]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
project = [TABLE_NAME+".id _id","json_extract(data,'$.claim_id') as claim_id"]
|
|
|
|
|
fn_prefix = "json_extract(x.value,'$." if '_index' not in _info['rel'][table] else "json_extract(i.value,'$."
|
|
|
|
|
for field_name in fields :
|
|
|
|
|
# project += ["json_extract(x.value,'$."+field_name+"') "+field_name]
|
|
|
|
|
project += [fn_prefix+field_name+"') "+field_name]
|
|
|
|
|
SQL = "SELECT DISTINCT :fields FROM "+TABLE_NAME+", json_each(data) x, json_each(x.value) i where x.key = ':table'"
|
|
|
|
|
SQL = SQL.replace(":table",table).replace(":fields",",".join(project))
|
|
|
|
|
r += [{"table":table,"read":{"sql":SQL},"sql":create(table=table,key='claim_id',fields=fields)}]
|
|
|
|
|
return r
|
|
|
|
|
def init(**_args):
|
|
|
|
|
if 'provider' in CONFIG['store'] and CONFIG['store']['provider'] == 'sqlite' :
|
|
|
|
|
return init_sql(**_args)
|
|
|
|
|
else:
|
|
|
|
|
return init_mongo(**_args)
|
|
|
|
|
def init_mongo (**_args) :
|
|
|
|
|
"""
|
|
|
|
|
This function is intended to determine the number of tables to be created, as well as their type.
|
|
|
|
|
:param type {835,837}
|
|
|
|
|
:param skip list of fields to be skipped
|
|
|
|
|
"""
|
|
|
|
|
TYPE = _args['type']
|
|
|
|
|
SKIP = _args['skip'] if 'skip' in _args else []
|
|
|
|
|
# SKIP = _args['skip'] if 'skip' in _args else []
|
|
|
|
|
_config = CONFIG['parser'][TYPE][0]
|
|
|
|
|
if TYPE in CUSTOM_CONFIG :
|
|
|
|
|
_config = jsonmerge.merge(_config,CUSTOM_CONFIG[TYPE])
|
|
|
|
|
_info = meta(_config)
|
|
|
|
|
#
|
|
|
|
|
# @TODO: implement fields to be skipped ...
|
|
|
|
|
#
|
|
|
|
|
TABLE_NAME = 'claims' if TYPE== '837' else 'remits'
|
|
|
|
|
_info = meta(_config)
|
|
|
|
|
|
|
|
|
|
# project = dict.fromkeys(["_id","claim_id"]+_info['main'],1)
|
|
|
|
|
project = {}
|
|
|
|
|
|
|
|
|
|
for field_name in _info['main'] :
|
|
|
|
|
|
|
|
|
|
_name = "".join(["$",field_name])
|
|
|
|
|
project[field_name] = {"$ifNull":[_name,""]}
|
|
|
|
|
|
|
|
|
|
project["_id"] = 1
|
|
|
|
|
project = {"$project":project}
|
|
|
|
|
|
|
|
|
|
r = [{"table":TABLE_NAME,"mongo":{"aggregate":TABLE_NAME,"pipeline":[project],"cursor":{},"allowDiskUse":True},"sql":create(table=TABLE_NAME,fields=_info['main'])}]
|
|
|
|
|
# _projectSQLite = ",".join(_projectSQLite) #-- Wrapping up SQLITE projection on main table
|
|
|
|
|
|
|
|
|
|
r = [{"table":TABLE_NAME,"read":{"mongo":{"aggregate":TABLE_NAME,"pipeline":[project],"cursor":{},"allowDiskUse":True}},"sql":create(table=TABLE_NAME,fields=_info['main'])}]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
for table in _info['rel'] :
|
|
|
|
|
#
|
|
|
|
|
# NOTE: Adding _index to the fields
|
|
|
|
@ -180,7 +261,8 @@ def init (**_args) :
|
|
|
|
|
project["_id"] = 1
|
|
|
|
|
# pipeline = [{"$match":{"procedures":{"$nin":[None,'']}}},{"$unwind":"$"+table},{"$project":project}]
|
|
|
|
|
pipeline = [{"$match": {table: {"$nin": [None, ""]}}},{"$unwind":"$"+table},{"$project":project}]
|
|
|
|
|
r += [{"table":table,"mongo":{"aggregate":TABLE_NAME,"cursor":{},"pipeline":pipeline,"allowDiskUse":True},"sql":create(table=table,key='claim_id',fields=fields)}]
|
|
|
|
|
cmd = {"mongo":{"aggregate":TABLE_NAME,"cursor":{},"pipeline":pipeline,"allowDiskUse":True}}
|
|
|
|
|
r += [{"table":table,"read":cmd,"sql":create(table=table,key='claim_id',fields=fields)}]
|
|
|
|
|
|
|
|
|
|
return r
|
|
|
|
|
|
|
|
|
@ -207,9 +289,14 @@ class Factory:
|
|
|
|
|
global PATH
|
|
|
|
|
global CONFIG
|
|
|
|
|
global CUSTOM_CONFIG
|
|
|
|
|
PATH = os.sep.join([os.environ.get('HOME'),'.healthcareio','config.json'])
|
|
|
|
|
if os.path.exists(PATH):
|
|
|
|
|
CONFIG = json.loads((open(PATH)).read())
|
|
|
|
|
|
|
|
|
|
PATH = _args['config']
|
|
|
|
|
|
|
|
|
|
# if 'config' in _args :
|
|
|
|
|
# PATH = _args['config']
|
|
|
|
|
# else:
|
|
|
|
|
# PATH = os.sep.join([os.environ.get('HOME'),'.healthcareio','config.json'])
|
|
|
|
|
CONFIG = Parser.setup(PATH)
|
|
|
|
|
CUSTOM_PATH = os.sep.join([os.environ.get('HOME'),'.healthcareio','custom'])
|
|
|
|
|
if os.path.exists(CUSTOM_PATH) and os.listdir(CUSTOM_PATH) :
|
|
|
|
|
|
|
|
|
@ -217,31 +304,49 @@ class Factory:
|
|
|
|
|
CUSTOM_CONFIG = json.loads((open(CUSTOM_PATH)).read())
|
|
|
|
|
|
|
|
|
|
_features = Factory.license(email=CONFIG['owner'])
|
|
|
|
|
store = copy.deepcopy(CONFIG['store'])
|
|
|
|
|
store['type']='mongo.MongoReader'
|
|
|
|
|
X12_TYPE = _args['type']
|
|
|
|
|
store = copy.deepcopy(CONFIG['store']) #-- reading the original data
|
|
|
|
|
#
|
|
|
|
|
# Formatting accordingly just in case
|
|
|
|
|
if 'provider' in store :
|
|
|
|
|
if 'table' in store:
|
|
|
|
|
store['table'] = 'claims' if X12_TYPE == '837' else 'remits'
|
|
|
|
|
store['context'] ='read'
|
|
|
|
|
else:
|
|
|
|
|
pass
|
|
|
|
|
# store['type']='mongo.MongoReader'
|
|
|
|
|
|
|
|
|
|
wstore = _args['write_store'] #-- output data store
|
|
|
|
|
TYPE = _args['type']
|
|
|
|
|
PREFIX = 'clm_' if TYPE == '837' else 'era_'
|
|
|
|
|
SCHEMA = '' if 'schema' not in wstore['args'] else wstore['args']['schema']
|
|
|
|
|
_config = CONFIG['parser'][TYPE][0]
|
|
|
|
|
if TYPE in CUSTOM_CONFIG :
|
|
|
|
|
_config = jsonmerge.merge(_config,CUSTOM_CONFIG[TYPE])
|
|
|
|
|
|
|
|
|
|
PREFIX = 'clm_' if X12_TYPE == '837' else 'era_'
|
|
|
|
|
# SCHEMA = '' if 'schema' not in wstore['args'] else wstore['args']['schema']
|
|
|
|
|
SCHEMA = '' if 'schema' not in wstore else wstore['schema']
|
|
|
|
|
_config = CONFIG['parser'][X12_TYPE][0]
|
|
|
|
|
if X12_TYPE in CUSTOM_CONFIG :
|
|
|
|
|
_config = jsonmerge.merge(_config,CUSTOM_CONFIG[X12_TYPE])
|
|
|
|
|
# _info = meta(_config)
|
|
|
|
|
job_args = init(type=TYPE)
|
|
|
|
|
job_args = init(type=X12_TYPE) #-- getting the queries that will generate the objects we are interested in
|
|
|
|
|
# print (json.dumps(job_args))
|
|
|
|
|
_jobs = []
|
|
|
|
|
for row in job_args:
|
|
|
|
|
# _store = json.loads(json.dumps(wstore))
|
|
|
|
|
_store = copy.deepcopy(wstore)
|
|
|
|
|
|
|
|
|
|
_store['args']['table'] = row['table']
|
|
|
|
|
# _store['args']['table'] = row['table']
|
|
|
|
|
if 'type' in _store :
|
|
|
|
|
_store['args']['table'] = row['table']
|
|
|
|
|
else:
|
|
|
|
|
|
|
|
|
|
_store['table'] = row['table']
|
|
|
|
|
_pipe = [
|
|
|
|
|
workers.CreateSQL(prefix=PREFIX,schema=SCHEMA,store=_store,sql=row['sql']),
|
|
|
|
|
workers.Reader(prefix=PREFIX,schema=SCHEMA,store=store,mongo=row['mongo'],max_rows=250000,features=_features,table=row['table']),
|
|
|
|
|
# workers.Reader(prefix=PREFIX,schema=SCHEMA,store=store,mongo=row['mongo'],max_rows=250000,features=_features,table=row['table']),
|
|
|
|
|
workers.Reader(prefix=PREFIX,schema=SCHEMA,store=store,read=row['read'],max_rows=250000,features=_features,table=row['table']),
|
|
|
|
|
|
|
|
|
|
workers.Writer(prefix=PREFIX,schema=SCHEMA,store=_store)
|
|
|
|
|
]
|
|
|
|
|
_jobs += [workers.Subject(observers=_pipe,name=row['table'])]
|
|
|
|
|
|
|
|
|
|
return _jobs
|
|
|
|
|
|
|
|
|
|
# if __name__ == '__main__' :
|
|
|
|
|