bug fixes and interface

v2.0
Steve Nyemba 11 months ago
parent a863f5e2b9
commit 39a484ee4a

@ -10,8 +10,104 @@ This code is intended to process and parse healthcare x12 837 (claims) and x12 8
The claims/outpout can be forwarded to a NoSQL Data store like couchdb and mongodb The claims/outpout can be forwarded to a NoSQL Data store like couchdb and mongodb
Usage : Usage :
Commandline : Commandline :
python xreader.py --parse claims|remits --config <path> python x12parser <action>
action:
- parser
- create.plugin
- register.plugin
-
Embedded : Embedded :
""" """
# import healthcareio # import healthcareio
import typer
from typing import Optional
from typing_extensions import Annotated
import uuid
import os
import version
import json
import time
from healthcareio import x12
from healthcareio.x12.parser import X12Parser
# import healthcareio
# import healthcareio.x12.util
# from healthcareio.x12.parser import X12Parser
app = typer.Typer()
CONFIG_FOLDER = os.sep.join([os.environ['HOME'],'.healthcareio'])
@app.command(name='init')
def config(email:str,provider:str='sqlite') :
"""
Generate configuration file needed with default data store. For supported data-store providers visit https://hiplab.mc.vanderbilt.edu/git/hiplab/data-transport.git
:email your email
\r:provider data store provider (visit https://hiplab.mc.vanderbilt.edu/git/hiplab/data-transport.git)
"""
_db = "healthcareio"
# _PATH = os.sep.join([os.environ['HOME'],'.healthcareio'])
if not os.path.exists(CONFIG_FOLDER) :
os.mkdir(CONFIG_FOLDER)
if provider in ['sqlite','sqlite3'] :
_db = os.sep.join([CONFIG_FOLDER,_db+'.db3'])
_config = {
"store":{
"provider":provider,"database":_db,"context":"write"
},
"plugins":None,
"system":{
"uid":str(uuid.uuid4()),
"email":email,
"version":version.__version__,
"copyright":version.__author__
}
}
#
# store this on disk
f = open(os.sep.join([CONFIG_FOLDER,'config.json']),'w')
f.write(json.dumps(_config))
f.close()
@app.command(name='about')
def copyright():
for note in [version.__name__,version.__author__,version.__license__]:
print (note)
pass
@app.command()
def parse (claim_folder:str,plugin_folder:str = None,config_path:str = None):
"""
This function will parse 837 and or 835 claims given a location of parsing given claim folder and/or plugin folder
"""
_plugins,_parents = x12.plugins.instance(path=plugin_folder)
_files = x12.util.file.Location.get(path=claim_folder,chunks=10)
_path = config_path if config_path else os.sep.join([CONFIG_FOLDER,'config.json'])
if os.path.exists(_path) :
f = open(_path)
_config = json.loads(f.read())
f.close()
_store = _config['store']
# print (len(_files))
jobs = []
for _chunks in _files:
pthread = X12Parser(plugins=_plugins,parents=_parents,files=_chunks, store=_store)
pthread.start()
jobs.append(pthread)
while jobs :
jobs = [pthread for pthread in jobs if pthread.is_alive()]
time.sleep(1)
pass
else:
pass
#
#
if __name__ == '__main__' :
app()

@ -1,5 +1,5 @@
__author__ = 'The Phi Technology LLC' __author__ = 'The Phi Technology LLC'
__version__ = '1.0' __version__ = '2.0-RC'
__license__ = """ __license__ = """
(c) 2019 EDI Parser Toolkit, (c) 2019 EDI Parser Toolkit,
Health Information Privacy Lab, Vanderbilt University Medical Center & The Phi Technology Health Information Privacy Lab, Vanderbilt University Medical Center & The Phi Technology
@ -8,10 +8,18 @@ Steve L. Nyemba <steve.l.nyemba@vumc.org>
Khanhly Nguyen <khanhly.t.nguyen@gmail.com> Khanhly Nguyen <khanhly.t.nguyen@gmail.com>
This code is intended to process and parse healthcare x12 837 (claims) and x12 835 (remittances) into human readable JSON format. This framework is intended to parse and structure healthcare x12 837 (claims) and x12 835 (remittances) into human readable formats
The claims/outpout can be forwarded to a NoSQL Data store like couchdb and mongodb - ( parse {x12} ) --> ( store as JSON ) --> ( export to database)
The supported databases are mysql, postgresql, sqlite3, mongodb, couchdb ...
More information on supported databases is available at https://hiplab.mc.vanderbilt.edu/git/hiplab/data-transport.git
Sample 835 and 837 claims (zipped) are available for download at https://x12.org/examples/
Usage : Usage :
Commandline : Commandline :
python xreader.py --parse claims|remits --config <path> python xreader.py --parse claims|remits --config <path>
Embedded : Embedded :
""" """
__name__ = "Healthcare/IO::Parser "+__version__

@ -21,6 +21,11 @@ import hashlib
import json import json
import os import os
import sys import sys
# version 2.0
# import util
# from parser import X12Parser
#-- end
from itertools import islice from itertools import islice
from multiprocessing import Process from multiprocessing import Process
import transport import transport

@ -11,8 +11,8 @@ import numpy as np
import transport import transport
import copy import copy
# from healthcareio.x12.util import file as File, document as Document # from healthcareio.x12.util import file as File, document as Document
from datetime import datetime
from healthcareio.logger import X12Logger
import time import time
class BasicParser (Process) : class BasicParser (Process) :
def __init__(self,**_args): def __init__(self,**_args):
@ -21,8 +21,26 @@ class BasicParser (Process) :
self._parents = _args['parents'] self._parents = _args['parents']
self._files = _args['files'] self._files = _args['files']
self._store = _args['store'] self._store = _args['store']
self._template = x12.util.template(plugins=self._plugins)
# self._logger = _args['logger'] if 'logger' in _args else None
self._logger = X12Logger(store = self._store)
if self._logger :
_info = { key:len(self._plugins[key].keys())for key in self._plugins}
_data = {'plugins':_info,'files': len(self._files),'model':self._template}
self._logger.log(module='BasicParser',action='init',data=_data)
def log (self,**_args):
"""
This function logs data into a specified location in JSON format
datetime,module,action,data
"""
pass
def apply(self,**_args): def apply(self,**_args):
"""
:content raw claim i.e CLP/CLM Loops and related content
:x12 file type 837|835
:document document template with attributes pre-populated
"""
_content = _args['content'] _content = _args['content']
_filetype = _args['x12'] _filetype = _args['x12']
_doc = _args['document'] #{} _doc = _args['document'] #{}
@ -51,20 +69,27 @@ class BasicParser (Process) :
def run(self): def run(self):
_handleContent = x12.util.file.Content() _handleContent = x12.util.file.Content()
_handleDocument = x12.util.document.Builder(plugins = self._plugins,parents=self._parents) _handleDocument = x12.util.document.Builder(plugins = self._plugins,parents=self._parents)
_template = _plugins,_parents = x12.util.template(plugins=self._plugins) _template = self._template #x12.util.template(plugins=self._plugins)
#
# @TODO: starting initializing parsing jobs :
# - number of files, plugins meta data
_log = {}
for _absolute_path in self._files : for _absolute_path in self._files :
try: try:
_content = _handleContent.read(filename=_absolute_path) _content = _handleContent.read(filename=_absolute_path)
_content,_filetype = _handleContent.split(_content) _content,_filetype = _handleContent.split(_content)
#
# LOG: filename with claims found in it
# #
# The first row is the header (it will be common to all claims) # The first row is the header (it will be common to all claims)
_header = copy.deepcopy(_template[_filetype]) _header = copy.deepcopy(_template[_filetype])
_header = self.apply(content=_content[0],x12=_filetype, document=_header) _header = self.apply(content=_content[0],x12=_filetype, document=_header)
_docs = [] _docs = []
for _rawclaim in _content[1:] : for _rawclaim in _content[1:] :
_document = copy.deepcopy(_header) #copy.deepcopy(_template[_filetype]) _document = copy.deepcopy(_header) #copy.deepcopy(_template[_filetype])
@ -73,25 +98,29 @@ class BasicParser (Process) :
_document['filename'] = _absolute_path _document['filename'] = _absolute_path
_doc = self.apply(content=_rawclaim,x12=_filetype, document=_document) _doc = self.apply(content=_rawclaim,x12=_filetype, document=_document)
if _doc : if _doc :
#
# @TODO: Make sure the test here is the existence of the primary key
# _doc = _handleDocument.merge(_doc,_header)
_docs.append(_doc) _docs.append(_doc)
else: else:
# print (['wtf ...',_rawclaim]) # print (['wtf ...',_rawclaim])
pass pass
# #
# LOG: information abou the file that has just been processed.
_location = _absolute_path if type(_absolute_path) == str else 'In-Memory'
_data = {'filename':_location, 'available':len(_content[1:]),'x12':_filetype}
_args = {'module':'parse','action':'parse','data':_data}
_data['parsed'] = len(_docs)
self._logger.log(**_args)
#
# Let us submit the batch we have thus far # Let us submit the batch we have thus far
# #
self.post(documents=_docs,type=_filetype) self.post(documents=_docs,x12=_filetype,filename=_location)
except Exception as e: except Exception as e:
#
# LOG: We have filename and segment of the claim within filename
#
print (e) print (e)
def post(self,**_args): def post(self,**_args):
pass pass
@ -107,14 +136,17 @@ class X12Parser(BasicParser):
_documents = _args['documents'] _documents = _args['documents']
if _documents : if _documents :
_store = copy.copy(self._store,**{}) _store = copy.copy(self._store,**{})
TABLE = 'claims' if _args['type'] in ['837','claims'] else 'remits' TABLE = 'claims' if _args['x12'] in ['837','claims'] else 'remits'
_store['table'] = TABLE _store['table'] = TABLE
_writer = transport.factory.instance(**_store) _writer = transport.factory.instance(**_store)
_writer.write(_documents) _writer.write(_documents)
if getattr(_writer,'close') : if getattr(_writer,'close') :
_writer.close() _writer.close()
#
# LOG: report what was written
_data = {'x12':_args['x12'], 'documents':len(_documents),'filename':_args['filename']}
self._logger.log(module='write',action='write',data=_data)
# def instance (**_args): # def instance (**_args):
# """ # """

@ -138,7 +138,7 @@ def instance(**_args):
_map['835'] = _handler.merge(_map['*'],_map['835']) _map['835'] = _handler.merge(_map['*'],_map['835'])
_map['837'] = _handler.merge(_map['*'],_map['837']) _map['837'] = _handler.merge(_map['*'],_map['837'])
if 'path' in _args: if 'path' in _args and _args['path']:
# #
# We can/will override the default modules given the user has provided a location # We can/will override the default modules given the user has provided a location
# _module = imp.load_source('udf',_args['path']) # _module = imp.load_source('udf',_args['path'])
@ -249,331 +249,331 @@ def filter (**_args) :
_map[x12] = {key:_item[key] for key in _found } _map[x12] = {key:_item[key] for key in _found }
return _map return _map
def getTableName(**_args) : # def getTableName(**_args) :
_plugins = _args['plugins'] # _plugins = _args['plugins']
_meta = _args['meta'] # _meta = _args['meta']
_x12 = _meta['x12'] # _x12 = _meta['x12']
_foreignkeys = _args['tableKeys'] # _foreignkeys = _args['tableKeys']
_attributes = list(_meta['map'].values()) if 'map' in _meta else _meta['columns'] # _attributes = list(_meta['map'].values()) if 'map' in _meta else _meta['columns']
if 'field' in _meta or 'container' in _meta: # if 'field' in _meta or 'container' in _meta:
_tableName = _meta['field'] if 'field' in _meta else _meta['container'] # _tableName = _meta['field'] if 'field' in _meta else _meta['container']
# _table = {_id:_attributes}
elif 'anchor' in _meta :
_tableName = _meta['anchor'].values()
# for _name in _meta['anchor'].values() :
# _table[_name] = _attributes
elif 'parent' in _meta and _meta['parent']:
#
# We can have a parent with no field/container/anchor
# We expect either a map or columns ...
#
_parentElement = _meta['parent']
_parentMeta = _plugins[_x12][_parentElement].meta
_parentTable = getTableName(plugins=_plugins,meta = _parentMeta,tableKeys=_foreignkeys)
_tableName = list(_parentTable.keys())[0]
# _table[_id] = _parentTable[_id] + _attributes
_attributes = _parentTable[_tableName] + _attributes
# print (_meta)
else:
#
# baseline tables have no parent, we need to determine the name
#
_tableName = 'claims' if _x12 == '837' else 'remits'
# print (_id,_attributes)
pass
#
# Are there any anchors
if _x12 == '837':
_keys = [_foreignkeys['claims']]
elif _x12 == '835' :
_keys = [_foreignkeys['remits']]
else:
_keys = list(set(_foreignkeys.values()))
_attr = []
for _item in _attributes :
if type(_item) == list :
_attr += _item
else:
_attr.append(_item)
_keys = list(set(_keys) - set(_attr))
_attr = _keys + _attr
# if 'container' in _meta and _meta['container'] == 'procedures' :
# print (_attributes)
_tableName = [_tableName] if type(_tableName) == str else _tableName
return dict.fromkeys(_tableName,_attr)
def _getTableName (**_args):
"""
This function provides a list of attributes associated with an entity
The function infers a relational structure from the JSON representation of a claim and plugin specifications
"""
_meta = _args['meta']
_xattr = list(_meta['map'].values()) if 'map' in _meta else _meta['columns']
_plugins = _args['plugins']
_foreignkeys = _args['tableKeys']
#
# Fix attributes, in case we have an index associated with multiple fields
#
_attr = []
if 'anchor' not in _meta and not _meta['parent']:
for _item in _xattr :
_attr += _item if type(_item) == list else [_item]
_name = None
_info = {}
_infoparent = {}
if 'field' in _meta :
_name = _meta['field']
elif 'container' in _meta :
_name = _meta['container']
elif 'anchor' in _meta :
_name = list(_meta['anchor'].values())
# if _name :
# _name = _name if type(_name) == list else [_name]
# _info = dict.fromkeys(_name,_attr)
if _meta['parent'] :
_parentElement = filter(elements=[_meta['parent']],plugins=_plugins)
_x12 = list(_parentElement.keys())[0]
_id = list(_parentElement[_x12].keys())[0]
_infoparent = getTableName(meta = _parentElement[_x12][_id].meta,plugins=_plugins,tableKeys=_foreignkeys)
if _meta['x12'] == '*' :
_name = ['claims','remits'] if not _name else _name
_attr = list(set(_foreignkeys.values())) + _attr
else:
_name = 'claims' if _meta['x12'] == '837' and not _name else ('remits' if not _name and _meta['x12'] == '835' else _name)
_id = 'claims' if _meta['x12'] == '837' else 'remits'
if _id in _foreignkeys:
_attr = [_foreignkeys[_id]] + _attr
# if not _name :
# if _meta['x12'] == '*' :
# _name = ['claims','remits']
# else:
# _name = 'claims' if _meta['x12'] == '837' else 'remits'
#
# Let us make sure we can get the keys associated here ...
#
# filter (elements = [])
_name = _name if type(_name) == list else [_name]
_info = dict.fromkeys(_name,_attr)
if _infoparent:
_info = dict(_info,**_infoparent)
return _info
# # _table = {_id:_attributes}
def getTableKeys(**_args): # elif 'anchor' in _meta :
_plugins=_args['plugins'] # _tableName = _meta['anchor'].values()
_pointer = filter(elements=['CLM'],plugins=_plugins) # # for _name in _meta['anchor'].values() :
_keys = {} # # _table[_name] = _attributes
for _element in ['CLM','CLP'] : # elif 'parent' in _meta and _meta['parent']:
_pointer = filter(elements=[_element],plugins=_plugins)
if not _pointer :
continue
_pointer = list(_pointer.values())[0]
_meta = _pointer[_element].meta
_name = _meta['map'][1] if 'map' in _meta else _meta['columns'][0]
_id = 'claims' if _element == 'CLM' else 'remits'
_keys[_id] = _name
return _keys
# print (list(_pointer.values())[0]['CLM'].meta)
# print (_pointer.values()[0].meta)
def sql (**_args):
_plugins = _args['plugins']
# _info = {'foreign':{},'keys':{'claims':None,'remits':None}}
_documentHandler = x12.util.document.Builder(plugins=_plugins,parents=_args['parents'])
_tableKeys = getTableKeys(plugins=_plugins)
_schemas = {}
for key in _plugins :
_mpointers = _plugins[key]
for _element in _mpointers :
_pointer = _mpointers[_element]
_meta = _pointer.meta
_info = getTableName(meta=_meta,plugins=_plugins,tableKeys=_tableKeys)
# _schemas = dict(_schemas,**_info)
if _info :
_schemas = _documentHandler.merge(_schemas,_info)
# print (_info)
return _schemas
# if not _info :
# print (_meta)
# continue
# if _meta['x12'] in ['837','837'] :
# _schema_id = 'claims' if _meta['x12'] == '837' else 'remits'
# _schema_id = [_schema_id]
# else:
# _schema_id = ['claims','remits']
# if _info :
# # # #
# # foreign tables need to be placed here # # We can have a parent with no field/container/anchor
# # We expect either a map or columns ...
# for _id in _schema_id : # #
# if type(_info) == list : # _parentElement = _meta['parent']
# _schemas[_id]['attributes'] += _info # _parentMeta = _plugins[_x12][_parentElement].meta
# else: # _parentTable = getTableName(plugins=_plugins,meta = _parentMeta,tableKeys=_foreignkeys)
# _schemas[_id]['foreign'] = dict(_schemas[_id]['foreign'],**_info) # _tableName = list(_parentTable.keys())[0]
# # _table[_id] = _parentTable[_id] + _attributes
# _attributes = _parentTable[_tableName] + _attributes
# # print (_meta)
# else: # else:
# # # #
# # This one goes to the main tables # # baseline tables have no parent, we need to determine the name
# for _id in _schema_id : # #
# print (_info) # _tableName = 'claims' if _x12 == '837' else 'remits'
# _schemas[_id]['attributes'] += list(_info.values()) # # print (_id,_attributes)
# DEFAULT_PLUGINS='healthcareio.x12.plugins.default'
# class MODE :
# TRUST,CHECK,TEST,TEST_AND_CHECK= [0,1,2,3]
# def instance(**_args) :
# pass # pass
# def has(**_args) : # #
# """ # # Are there any anchors
# This function will inspect if a function is valid as a plugin or not # if _x12 == '837':
# name : function name for a given file # _keys = [_foreignkeys['claims']]
# path : python file to examine # elif _x12 == '835' :
# """ # _keys = [_foreignkeys['remits']]
# _pyfile = _args['path'] if 'path' in _args else ''
# _name = _args['name']
# # p = os.path.exists(_pyfile)
# _module = {}
# if os.path.exists(_pyfile):
# _info = IL.utils.spec_from_file_location(_name,_pyfile)
# if _info :
# _module = IL.utils.module_from_spec(_info)
# _info.load.exec(_module)
# else: # else:
# _module = sys.modules[DEFAULT_PLUGINS] # _keys = list(set(_foreignkeys.values()))
# return hasattr(_module,_name) # _attr = []
# def get(**_args) : # for _item in _attributes :
# if type(_item) == list :
# _attr += _item
# else:
# _attr.append(_item)
# _keys = list(set(_keys) - set(_attr))
# _attr = _keys + _attr
# # if 'container' in _meta and _meta['container'] == 'procedures' :
# # print (_attributes)
# _tableName = [_tableName] if type(_tableName) == str else _tableName
# return dict.fromkeys(_tableName,_attr)
# def _getTableName (**_args):
# """ # """
# This function will inspect if a function is valid as a plugin or not # This function provides a list of attributes associated with an entity
# name : function name for a given file # The function infers a relational structure from the JSON representation of a claim and plugin specifications
# path : python file to examine
# """ # """
# _pyfile = _args['path'] if 'path' in _args else '' # _meta = _args['meta']
# _name = _args['name'] # _xattr = list(_meta['map'].values()) if 'map' in _meta else _meta['columns']
# # p = os.path.exists(_pyfile) # _plugins = _args['plugins']
# _module = {} # _foreignkeys = _args['tableKeys']
# if os.path.exists(_pyfile): # #
# _info = IL.utils.spec_from_file_location(_name,_pyfile) # # Fix attributes, in case we have an index associated with multiple fields
# if _info : # #
# _module = IL.utils.module_from_spec(_info) # _attr = []
# _info.load.exec(_module) # if 'anchor' not in _meta and not _meta['parent']:
# for _item in _xattr :
# _attr += _item if type(_item) == list else [_item]
# _name = None
# _info = {}
# _infoparent = {}
# if 'field' in _meta :
# _name = _meta['field']
# elif 'container' in _meta :
# _name = _meta['container']
# elif 'anchor' in _meta :
# _name = list(_meta['anchor'].values())
# # if _name :
# # _name = _name if type(_name) == list else [_name]
# # _info = dict.fromkeys(_name,_attr)
# if _meta['parent'] :
# _parentElement = filter(elements=[_meta['parent']],plugins=_plugins)
# _x12 = list(_parentElement.keys())[0]
# _id = list(_parentElement[_x12].keys())[0]
# _infoparent = getTableName(meta = _parentElement[_x12][_id].meta,plugins=_plugins,tableKeys=_foreignkeys)
# if _meta['x12'] == '*' :
# _name = ['claims','remits'] if not _name else _name
# _attr = list(set(_foreignkeys.values())) + _attr
# else: # else:
# _module = sys.modules[DEFAULT_PLUGINS] # _name = 'claims' if _meta['x12'] == '837' and not _name else ('remits' if not _name and _meta['x12'] == '835' else _name)
# return getattr(_module,_name) if hasattr(_module,_name) else None # _id = 'claims' if _meta['x12'] == '837' else 'remits'
# if _id in _foreignkeys:
# _attr = [_foreignkeys[_id]] + _attr
# def test (**_args): # # if not _name :
# """ # # if _meta['x12'] == '*' :
# This function will test a plugin to insure the plugin conforms to the norm we are setting here # # _name = ['claims','remits']
# :pointer function to call # # else:
# """ # # _name = 'claims' if _meta['x12'] == '837' else 'remits'
# _params = {}
# try:
# if 'pointer' in _args :
# _caller = _args['pointer']
# else:
# _name = _args['name']
# _path = _args['path'] if 'path' in _args else None
# _caller = get(name=_name,path=_path)
# _params = _caller()
# # # #
# # the expected result is a list of field names [field_o,field_i] # # Let us make sure we can get the keys associated here ...
# # # #
# return [_item for _item in _params if _item not in ['',None] and type(_item) == str] # # filter (elements = [])
# except Exception as e : # _name = _name if type(_name) == list else [_name]
# return [] # _info = dict.fromkeys(_name,_attr)
# pass # if _infoparent:
# def inspect(**_args): # _info = dict(_info,**_infoparent)
# _mode = _args['mode']
# _name= _args['name']
# _path= _args['path']
# if _mode == MODE.CHECK :
# _doapply = [has]
# elif _mode == MODE.TEST :
# _doapply = [test]
# elif _mode == MODE.TEST_AND_CHECK :
# _doapply = [has,test]
# _status = True
# _plugin = {"name":_name}
# for _pointer in _doapply :
# _plugin[_pointer.__name__] = _pointer(name=_name,path=_path)
# if not _plugin[_pointer.__name__] :
# _status = False
# break
# _plugin['loaded'] = _status
# return _plugin
# def load(**_args):
# """
# This function will load all the plugins given an set of arguments :
# path file
# name list of functions to export
# mode 1- CHECK ONLY, 2 - TEST ONLY, 3- TEST_AND_CHECK
# """
# _path = _args ['path']
# _names= _args['names']
# _mode= _args ['mode'] if 'mode' in _args else MODE.TEST_AND_CHECK
# _doapply = []
# if _mode == MODE.CHECK :
# _doapply = [has]
# elif _mode == MODE.TEST :
# _doapply = [test]
# elif _mode == MODE.TEST_AND_CHECK :
# _doapply = [has,test]
# # _plugins = []
# _plugins = {}
# for _name in _names :
# _plugin = {"name":_name}
# if 'inspect' in _args and _args['inspect'] :
# _plugin = inspect(name=_name,mode=_mode,path=_path)
# else:
# _plugin["method"] = ""
# _status = True
# _plugin['loaded'] = _status
# if _plugin['loaded'] :
# _plugin['pointer'] = get(name=_name,path=_path)
# else:
# _plugin['pointer'] = None
# # _plugins.append(_plugin) # return _info
# _plugins[_name] = _plugin
# return _plugins
# def parse(**_args):
# """
# This function will apply a function against a given function, and data
# :row claim/remits pre-processed
# :plugins list of plugins
# :conifg configuration associated with
# """
# _row = _args['row']
# _document = _args['document']
# _config = _args['config']
# """
# "apply":"@path:name"
# """
# _info = _args['config']['apply'] # def getTableKeys(**_args):
# _plug_conf = _args['config']['plugin'] if 'plugin' in _args['config'] else {} # _plugins=_args['plugins']
# if _info.startswith('@') : # _pointer = filter(elements=['CLM'],plugins=_plugins)
# _path = '' #-- get this from general configuration # _keys = {}
# elif _info.startswith('!'): # for _element in ['CLM','CLP'] :
# _path = _info.split('!')[0][1:] # _pointer = filter(elements=[_element],plugins=_plugins)
# _name = _info.split(':')[-1] # if not _pointer :
# _name = _args['config']['apply'].split(_path) # continue
# _pointer = list(_pointer.values())[0]
# _meta = _pointer[_element].meta
# _name = _meta['map'][1] if 'map' in _meta else _meta['columns'][0]
# _id = 'claims' if _element == 'CLM' else 'remits'
# _keys[_id] = _name
# return _keys
# # print (list(_pointer.values())[0]['CLM'].meta)
# # print (_pointer.values()[0].meta)
# def sql (**_args):
# _plugins = _args['plugins']
# # _info = {'foreign':{},'keys':{'claims':None,'remits':None}}
# _documentHandler = x12.util.document.Builder(plugins=_plugins,parents=_args['parents'])
# _tableKeys = getTableKeys(plugins=_plugins)
# _schemas = {}
# for key in _plugins :
# _mpointers = _plugins[key]
# for _element in _mpointers :
# _pointer = _mpointers[_element]
# _meta = _pointer.meta
# _info = getTableName(meta=_meta,plugins=_plugins,tableKeys=_tableKeys)
# # _schemas = dict(_schemas,**_info)
# if _info :
# _schemas = _documentHandler.merge(_schemas,_info)
# # print (_info)
# return _schemas
# # if not _info :
# # print (_meta)
# # continue
# # if _meta['x12'] in ['837','837'] :
# # _schema_id = 'claims' if _meta['x12'] == '837' else 'remits'
# # _schema_id = [_schema_id]
# # else:
# # _schema_id = ['claims','remits']
# # if _info :
# # #
# # # foreign tables need to be placed here
# # for _id in _schema_id :
# # if type(_info) == list :
# # _schemas[_id]['attributes'] += _info
# # else:
# # _schemas[_id]['foreign'] = dict(_schemas[_id]['foreign'],**_info)
# # else:
# # #
# # # This one goes to the main tables
# # for _id in _schema_id :
# # print (_info)
# # _schemas[_id]['attributes'] += list(_info.values())
# # DEFAULT_PLUGINS='healthcareio.x12.plugins.default'
# # class MODE :
# # TRUST,CHECK,TEST,TEST_AND_CHECK= [0,1,2,3]
# # def instance(**_args) :
# # pass
# # def has(**_args) :
# # """
# # This function will inspect if a function is valid as a plugin or not
# # name : function name for a given file
# # path : python file to examine
# # """
# # _pyfile = _args['path'] if 'path' in _args else ''
# # _name = _args['name']
# # # p = os.path.exists(_pyfile)
# # _module = {}
# # if os.path.exists(_pyfile):
# # _info = IL.utils.spec_from_file_location(_name,_pyfile)
# # if _info :
# # _module = IL.utils.module_from_spec(_info)
# # _info.load.exec(_module)
# # else:
# # _module = sys.modules[DEFAULT_PLUGINS]
# # return hasattr(_module,_name)
# # def get(**_args) :
# # """
# # This function will inspect if a function is valid as a plugin or not
# # name : function name for a given file
# # path : python file to examine
# # """
# # _pyfile = _args['path'] if 'path' in _args else ''
# # _name = _args['name']
# # # p = os.path.exists(_pyfile)
# # _module = {}
# # if os.path.exists(_pyfile):
# # _info = IL.utils.spec_from_file_location(_name,_pyfile)
# # if _info :
# # _module = IL.utils.module_from_spec(_info)
# # _info.load.exec(_module)
# # else:
# # _module = sys.modules[DEFAULT_PLUGINS]
# # return getattr(_module,_name) if hasattr(_module,_name) else None
# # def test (**_args):
# # """
# # This function will test a plugin to insure the plugin conforms to the norm we are setting here
# # :pointer function to call
# # """
# # _params = {}
# # try:
# # if 'pointer' in _args :
# # _caller = _args['pointer']
# # else:
# # _name = _args['name']
# # _path = _args['path'] if 'path' in _args else None
# # _caller = get(name=_name,path=_path)
# # _params = _caller()
# # #
# # # the expected result is a list of field names [field_o,field_i]
# # #
# # return [_item for _item in _params if _item not in ['',None] and type(_item) == str]
# # except Exception as e :
# # return []
# # pass
# # def inspect(**_args):
# # _mode = _args['mode']
# # _name= _args['name']
# # _path= _args['path']
# # if _mode == MODE.CHECK :
# # _doapply = [has]
# # elif _mode == MODE.TEST :
# # _doapply = [test]
# # elif _mode == MODE.TEST_AND_CHECK :
# # _doapply = [has,test]
# # _status = True
# # _plugin = {"name":_name}
# # for _pointer in _doapply :
# # _plugin[_pointer.__name__] = _pointer(name=_name,path=_path)
# # if not _plugin[_pointer.__name__] :
# # _status = False
# # break
# # _plugin['loaded'] = _status
# # return _plugin
# # def load(**_args):
# # """
# # This function will load all the plugins given an set of arguments :
# # path file
# # name list of functions to export
# # mode 1- CHECK ONLY, 2 - TEST ONLY, 3- TEST_AND_CHECK
# # """
# # _path = _args ['path']
# # _names= _args['names']
# # _mode= _args ['mode'] if 'mode' in _args else MODE.TEST_AND_CHECK
# # _doapply = []
# # if _mode == MODE.CHECK :
# # _doapply = [has]
# # elif _mode == MODE.TEST :
# # _doapply = [test]
# # elif _mode == MODE.TEST_AND_CHECK :
# # _doapply = [has,test]
# # # _plugins = []
# # _plugins = {}
# # for _name in _names :
# # _plugin = {"name":_name}
# # if 'inspect' in _args and _args['inspect'] :
# # _plugin = inspect(name=_name,mode=_mode,path=_path)
# # else:
# # _plugin["method"] = ""
# # _status = True
# # _plugin['loaded'] = _status
# # if _plugin['loaded'] :
# # _plugin['pointer'] = get(name=_name,path=_path)
# # else:
# # _plugin['pointer'] = None
# # # _plugins.append(_plugin)
# # _plugins[_name] = _plugin
# # return _plugins
# # def parse(**_args):
# # """
# # This function will apply a function against a given function, and data
# # :row claim/remits pre-processed
# # :plugins list of plugins
# # :conifg configuration associated with
# # """
# # _row = _args['row']
# # _document = _args['document']
# # _config = _args['config']
# # """
# # "apply":"@path:name"
# # """
# # _info = _args['config']['apply']
# # _plug_conf = _args['config']['plugin'] if 'plugin' in _args['config'] else {}
# # if _info.startswith('@') :
# # _path = '' #-- get this from general configuration
# # elif _info.startswith('!'):
# # _path = _info.split('!')[0][1:]
# # _name = _info.split(':')[-1]
# # _name = _args['config']['apply'].split(_path)

@ -106,11 +106,15 @@ def init(**_args):
_indexes = np.array_split(np.arange(_df.shape[0]),SEGMENTS) _indexes = np.array_split(np.arange(_df.shape[0]),SEGMENTS)
jobs = [] jobs = []
for _ii in _indexes : for _ii in _indexes :
try:
_data = format(rows= _df.iloc[_ii].to_dict(orient='records'),x12=_file_type,primary_key=_pkey) _data = format(rows= _df.iloc[_ii].to_dict(orient='records'),x12=_file_type,primary_key=_pkey)
_thread = Process(target=post,args=({'store':_store['target'],'data':_data,'default':_default,'x12':_file_type},)) _thread = Process(target=post,args=({'store':_store['target'],'data':_data,'default':_default,'x12':_file_type},))
jobs.append(_thread) jobs.append(_thread)
except Exception as e:
#
# Log: sigment,
pass
if jobs : if jobs :
jobs[0].start() jobs[0].start()
jobs[0].join() jobs[0].join()

@ -204,7 +204,6 @@ class Builder:
_parent = None _parent = None
_data = {} _data = {}
# _document = _args['document']
if not _pointer : if not _pointer :
return None,None return None,None
# #

@ -2,21 +2,22 @@
This is a build file for the This is a build file for the
""" """
from setuptools import setup, find_packages from setuptools import setup, find_packages
from healthcareio import version
import os import os
import sys import sys
def read(fname): def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read() return open(os.path.join(os.path.dirname(__file__), fname)).read()
args = { args = {
"name":"healthcareio","version":"1.6.4.8", "name":"healthcareio","version":version.__version__,
"author":"Vanderbilt University Medical Center", "author":version.__author__,
"author_email":"steve.l.nyemba@vumc.org", "author_email":"steve.l.nyemba@vumc.org",
"include_package_data":True, "include_package_data":True,
"license":"MIT", "license":version.__license__,
"packages":find_packages(), "packages":find_packages(),
"keywords":["healthcare","edi","x12","analytics","835","837","data","transport","protocol"] "keywords":["healthcare","edi","x12","analytics","835","837","data","transport","protocol"]
} }
args["install_requires"] = ['flask-socketio','seaborn','jinja2','jsonmerge', 'weasyprint','data-transport@git+https://healthcareio.the-phi.com/git/code/transport.git','pymongo','numpy','cloudant','pika','boto','botocore','flask-session','smart_open','smart-top@git+https://healthcareio.the-phi.com/git/code/smart-top.git@data-collector'] args["install_requires"] = ['typer','flask-socketio','seaborn','jinja2','jsonmerge', 'weasyprint','data-transport@git+https://healthcareio.the-phi.com/git/code/transport.git','pymongo','numpy','cloudant','pika','boto','botocore','flask-session','smart_open','smart-top@git+https://healthcareio.the-phi.com/git/code/smart-top.git@data-collector']
args['url'] = 'https://hiplab.mc.vanderbilt.edu' args['url'] = 'https://hiplab.mc.vanderbilt.edu'
args['scripts']= ['healthcareio/healthcare-io.py'] args['scripts']= ['healthcareio/healthcare-io.py']
# args['entry_points'] = { # args['entry_points'] = {

Loading…
Cancel
Save