|
|
|
@ -32,9 +32,10 @@ Usage :
|
|
|
|
|
from healthcareio.params import SYS_ARGS
|
|
|
|
|
from transport import factory
|
|
|
|
|
import requests
|
|
|
|
|
|
|
|
|
|
from healthcareio import analytics
|
|
|
|
|
from healthcareio import server
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
from healthcareio.parser import get_content
|
|
|
|
|
import os
|
|
|
|
|
import json
|
|
|
|
@ -56,10 +57,10 @@ if not os.path.exists(PATH) :
|
|
|
|
|
import platform
|
|
|
|
|
import sqlite3 as lite
|
|
|
|
|
# PATH = os.sep.join([os.environ['HOME'],'.edi-parser'])
|
|
|
|
|
def register (**args) :
|
|
|
|
|
def signup (**args) :
|
|
|
|
|
"""
|
|
|
|
|
:email user's email address
|
|
|
|
|
:url url of the provider to register
|
|
|
|
|
:url url of the provider to signup
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
email = args['email']
|
|
|
|
@ -203,12 +204,27 @@ def upgrade(**args):
|
|
|
|
|
"""
|
|
|
|
|
url = args['url'] if 'url' in args else URL+"/upgrade"
|
|
|
|
|
headers = {"key":args['key'],"email":args["email"],"url":url}
|
|
|
|
|
def check(**_args):
|
|
|
|
|
"""
|
|
|
|
|
This function will check if there is an update available (versions are in the configuration file)
|
|
|
|
|
:param url
|
|
|
|
|
"""
|
|
|
|
|
url = _args['url'][:-1] if _args['url'].endswith('/') else _args['url']
|
|
|
|
|
url = url + "/version"
|
|
|
|
|
if 'version' not in _args :
|
|
|
|
|
version = {"_id":"version","current":0.0}
|
|
|
|
|
else:
|
|
|
|
|
version = _args['version']
|
|
|
|
|
http = requests.session()
|
|
|
|
|
r = http.get(url)
|
|
|
|
|
return r.json()
|
|
|
|
|
|
|
|
|
|
if __name__ == '__main__' :
|
|
|
|
|
info = init()
|
|
|
|
|
|
|
|
|
|
if 'out-folder' in SYS_ARGS :
|
|
|
|
|
OUTPUT_FOLDER = SYS_ARGS['out-folder']
|
|
|
|
|
SYS_ARGS['url'] = SYS_ARGS['url'] if 'url' in SYS_ARGS else URL
|
|
|
|
|
|
|
|
|
|
if set(list(SYS_ARGS.keys())) & set(['signup','init']):
|
|
|
|
|
#
|
|
|
|
@ -217,10 +233,10 @@ if __name__ == '__main__' :
|
|
|
|
|
#
|
|
|
|
|
|
|
|
|
|
email = SYS_ARGS['signup'].strip() if 'signup' in SYS_ARGS else SYS_ARGS['init']
|
|
|
|
|
url = SYS_ARGS['url'] if 'url' in SYS_ARGS else 'https://healthcareio.the-phi.com'
|
|
|
|
|
url = SYS_ARGS['url'] if 'url' in SYS_ARGS else URL
|
|
|
|
|
store = SYS_ARGS['store'] if 'store' in SYS_ARGS else 'sqlite'
|
|
|
|
|
db='healthcareio' if 'db' not in SYS_ARGS else SYS_ARGS['db']
|
|
|
|
|
register(email=email,url=url,store=store,db=db)
|
|
|
|
|
signup(email=email,url=url,store=store,db=db)
|
|
|
|
|
# else:
|
|
|
|
|
# m = """
|
|
|
|
|
# usage:
|
|
|
|
@ -244,11 +260,17 @@ if __name__ == '__main__' :
|
|
|
|
|
if 'file' in SYS_ARGS :
|
|
|
|
|
files = [SYS_ARGS['file']] if not os.path.isdir(SYS_ARGS['file']) else []
|
|
|
|
|
if 'folder' in SYS_ARGS and os.path.exists(SYS_ARGS['folder']):
|
|
|
|
|
names = os.listdir(SYS_ARGS['folder'])
|
|
|
|
|
files += [os.sep.join([SYS_ARGS['folder'],name]) for name in names if not os.path.isdir(os.sep.join([SYS_ARGS['folder'],name]))]
|
|
|
|
|
for root,_dir,f in os.walk(SYS_ARGS['folder']) :
|
|
|
|
|
|
|
|
|
|
if f :
|
|
|
|
|
files += [os.sep.join([root,name]) for name in f]
|
|
|
|
|
|
|
|
|
|
# names = os.listdir(SYS_ARGS['folder'])
|
|
|
|
|
# files += [os.sep.join([SYS_ARGS['folder'],name]) for name in names if not os.path.isdir(os.sep.join([SYS_ARGS['folder'],name]))]
|
|
|
|
|
else:
|
|
|
|
|
#
|
|
|
|
|
# raise an erro
|
|
|
|
|
# raise an error
|
|
|
|
|
|
|
|
|
|
pass
|
|
|
|
|
#
|
|
|
|
|
# if the user has specified to resume, we should look into the logs and pull the files processed and those that haven't
|
|
|
|
@ -256,40 +278,13 @@ if __name__ == '__main__' :
|
|
|
|
|
if 'resume' in SYS_ARGS :
|
|
|
|
|
store_config = json.loads( (open(os.sep.join([PATH,'config.json']))).read() )
|
|
|
|
|
files = proxy.get.resume(files,store_config )
|
|
|
|
|
print (["Found ",len(files)," files unprocessed"])
|
|
|
|
|
# print (["Found ",len(files)," files unprocessed"])
|
|
|
|
|
#
|
|
|
|
|
# @TODO: Log this here so we know what is being processed or not
|
|
|
|
|
SCOPE = None
|
|
|
|
|
|
|
|
|
|
if files : #and ('claims' in SYS_ARGS['parse'] or 'remits' in SYS_ARGS['parse']):
|
|
|
|
|
# logger = factory.instance(type='disk.DiskWriter',args={'path':os.sep.join([info['out-folder'],SYS_ARGS['parse']+'.log'])})
|
|
|
|
|
# if info['store']['type'] == 'disk.DiskWriter' :
|
|
|
|
|
# info['store']['args']['path'] += (os.sep + 'healthcare-io.json')
|
|
|
|
|
# elif info['store']['type'] == 'disk.SQLiteWriter' :
|
|
|
|
|
# # info['store']['args']['path'] += (os.sep + 'healthcare-io.db3')
|
|
|
|
|
# pass
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# if info['store']['type'] == 'disk.SQLiteWriter' :
|
|
|
|
|
# info['store']['args']['table'] = SYS_ARGS['parse'].strip().lower()
|
|
|
|
|
# _info = json.loads(json.dumps(info['store']))
|
|
|
|
|
# _info['args']['table']='logs'
|
|
|
|
|
# else:
|
|
|
|
|
# #
|
|
|
|
|
# # if we are working with no-sql we will put the logs in it (performance )?
|
|
|
|
|
|
|
|
|
|
# info['store']['args']['doc'] = SYS_ARGS['parse'].strip().lower()
|
|
|
|
|
# _info = json.loads(json.dumps(info['store']))
|
|
|
|
|
# _info['args']['doc'] = 'logs'
|
|
|
|
|
# logger = factory.instance(**_info)
|
|
|
|
|
|
|
|
|
|
# writer = factory.instance(**info['store'])
|
|
|
|
|
|
|
|
|
|
#
|
|
|
|
|
# we need to have batches ready for this in order to run some of these queries in parallel
|
|
|
|
|
# @TODO: Make sure it is with a persistence storage (not disk .. not thread/process safe yet)
|
|
|
|
|
# - Make sure we can leverage this on n-cores later on, for now the assumption is a single core
|
|
|
|
|
#
|
|
|
|
|
BATCH_COUNT = 1 if 'batch' not in SYS_ARGS else int (SYS_ARGS['batch'])
|
|
|
|
|
|
|
|
|
|
files = np.array_split(files,BATCH_COUNT)
|
|
|
|
@ -308,26 +303,6 @@ if __name__ == '__main__' :
|
|
|
|
|
while len(procs) > 0 :
|
|
|
|
|
procs = [proc for proc in procs if proc.is_alive()]
|
|
|
|
|
time.sleep(2)
|
|
|
|
|
# for filename in files :
|
|
|
|
|
|
|
|
|
|
# if filename.strip() == '':
|
|
|
|
|
# continue
|
|
|
|
|
# # content,logs = get_content(filename,CONFIG,CONFIG['SECTION'])
|
|
|
|
|
# #
|
|
|
|
|
# try:
|
|
|
|
|
# content,logs = parse(filename = filename,type=SYS_ARGS['parse'])
|
|
|
|
|
# if content :
|
|
|
|
|
# writer.write(content)
|
|
|
|
|
# if logs :
|
|
|
|
|
# [logger.write(dict(_row,**{"parse":SYS_ARGS['parse']})) for _row in logs]
|
|
|
|
|
# else:
|
|
|
|
|
# logger.write({"parse":SYS_ARGS['parse'],"name":filename,"completed":True,"rows":len(content)})
|
|
|
|
|
# except Exception as e:
|
|
|
|
|
# logger.write({"parse":SYS_ARGS['parse'],"filename":filename,"completed":False,"rows":-1,"msg":e.args[0]})
|
|
|
|
|
# # print ([filename,len(content)])
|
|
|
|
|
# #
|
|
|
|
|
# # @TODO: forward this data to the writer and log engine
|
|
|
|
|
# #
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@ -358,6 +333,28 @@ if __name__ == '__main__' :
|
|
|
|
|
pointer = lambda : server.app.run(host='0.0.0.0',port=PORT,debug=DEBUG,threaded=False)
|
|
|
|
|
pthread = Process(target=pointer,args=())
|
|
|
|
|
pthread.start()
|
|
|
|
|
elif 'check-update' in SYS_ARGS :
|
|
|
|
|
_args = {"url":SYS_ARGS['url']}
|
|
|
|
|
try:
|
|
|
|
|
if os.path.exists(os.sep.join([PATH,'config.json'])) :
|
|
|
|
|
SYS_ARGS['config'] = json.loads((open(os.sep.join([PATH,'config.json']))).read())
|
|
|
|
|
else:
|
|
|
|
|
SYS_ARGS['config'] = {}
|
|
|
|
|
if 'version' in SYS_ARGS['config'] :
|
|
|
|
|
_args['version'] = SYS_ARGS['config']['version']
|
|
|
|
|
version = check(**_args)
|
|
|
|
|
_version = {"current":0.0}if 'version' not in SYS_ARGS['config'] else SYS_ARGS['config']['version']
|
|
|
|
|
if _version['current'] != version['current'] :
|
|
|
|
|
print ()
|
|
|
|
|
print ("You need to upgrade your system to version to ",version['current'])
|
|
|
|
|
print ("\t- signup (for new configuration)")
|
|
|
|
|
print ("\t- use pip to upgrade the codebase")
|
|
|
|
|
else:
|
|
|
|
|
print ()
|
|
|
|
|
print ("You are running the current configuraiton version ",_version.current)
|
|
|
|
|
except Exception as e:
|
|
|
|
|
print (e)
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
elif 'export' in SYS_ARGS:
|
|
|
|
|
#
|
|
|
|
@ -373,11 +370,15 @@ if __name__ == '__main__' :
|
|
|
|
|
cli:
|
|
|
|
|
|
|
|
|
|
healthcare-io.py --<[signup|init]> <email> --store <sqlite|mongo> [--batch <value>]
|
|
|
|
|
healthcare-io.py --parse claims --folder <path> [--batch <value>]
|
|
|
|
|
healthcare-io.py --parse remits --folder <path> [--batch <value>] [--resume]
|
|
|
|
|
|
|
|
|
|
healthcare-io.py --parse --folder <path> [--batch <value>] [--resume]
|
|
|
|
|
healthcare-io.py --check-update
|
|
|
|
|
action :
|
|
|
|
|
--signup|init signup user and get configuration file
|
|
|
|
|
--parse starts parsing
|
|
|
|
|
--check checks for updates
|
|
|
|
|
parameters :
|
|
|
|
|
--<[signup|init]> signup or get a configuration file from a parsing server
|
|
|
|
|
--folder location of the files (the program will recursively traverse it)
|
|
|
|
|
--store data store mongo or sqlite or mongodb
|
|
|
|
|
--resume will attempt to resume if there was an interruption
|
|
|
|
|
"""
|
|
|
|
|