parent
47060b5147
commit
47fb0a9993
@ -0,0 +1,37 @@
|
||||
#
|
||||
# Let us create an image for healthcareio
|
||||
# The image will contain the {X12} Parser and the
|
||||
# FROM ubuntu:bionic-20200403
|
||||
FROM ubuntu:focal
|
||||
RUN ["apt-get","update","--fix-missing"]
|
||||
RUN ["apt-get","upgrade","-y"]
|
||||
|
||||
RUN ["apt-get","-y","install","apt-utils"]
|
||||
|
||||
RUN ["apt-get","update","--fix-missing"]
|
||||
RUN ["apt-get","upgrade","-y"]
|
||||
RUN ["apt-get","install","-y","mongodb","sqlite3","sqlite3-pcre","libsqlite3-dev","python3-dev","python3","python3-pip","git","python3-virtualenv","wget"]
|
||||
#
|
||||
#
|
||||
RUN ["pip3","install","--upgrade","pip"]
|
||||
RUN ["pip3","install","numpy","pandas","git+https://dev.the-phi.com/git/steve/data-transport","botocore","matplotlib"]
|
||||
# RUN ["pip3","install","git+https://healthcare.the-phi.com/git/code/parser.git","botocore"]
|
||||
# RUN ["useradd", "-ms", "/bin/bash", "health-user"]
|
||||
# USER health-user
|
||||
#
|
||||
# This volume is where the data will be loaded from (otherwise it is assumed the user will have it in the container somehow)
|
||||
#
|
||||
VOLUME ["/data","/app/healthcareio"]
|
||||
WORKDIR /app
|
||||
ENV PYTHONPATH="/app"
|
||||
|
||||
#
|
||||
# This is the port from which some degree of monitoring can/will happen
|
||||
EXPOSE 80
|
||||
EXPOSE 27017
|
||||
# wget https://healthcareio.the-phi.com/git/code/parser.git/bootup.sh
|
||||
COPY bootup.sh bootup.sh
|
||||
ENTRYPOINT ["bash","-C"]
|
||||
CMD ["bootup.sh"]
|
||||
# VOLUME ["/home/health-user/healthcare-io/","/home-healthuser/.healthcareio"]
|
||||
# RUN ["pip3","install","git+https://healthcareio.the-phi.com/git"]
|
@ -0,0 +1,10 @@
|
||||
set -e
|
||||
/etc/init.d/mongodb start
|
||||
cd /app
|
||||
export
|
||||
export PYTHONPATH=$PWD
|
||||
ls
|
||||
# python3 healthcareio/healthcare-io.py --signup $EMAIL --store mongo
|
||||
# python3 healthcareio/healthcare-io.py --analytics --port 80 --debug
|
||||
|
||||
bash
|
@ -0,0 +1,170 @@
|
||||
"""
|
||||
This file serves as proxy to healthcare-io, it will be embedded into the API
|
||||
"""
|
||||
import os
|
||||
import transport
|
||||
import numpy as np
|
||||
import x12
|
||||
import pandas as pd
|
||||
import smart
|
||||
from analytics import Apex
|
||||
import time
|
||||
class get :
|
||||
PROCS = []
|
||||
PATH = os.sep.join([os.environ['HOME'],'.healthcareio','config.json'])
|
||||
@staticmethod
|
||||
def resume (files,args):
|
||||
"""
|
||||
This function will determine the appropriate files to be processed by performing a simple complementary set operation against the logs
|
||||
@TODO: Support data-stores other than mongodb
|
||||
:param files list of files within a folder
|
||||
:param _args configuration
|
||||
"""
|
||||
_args = args['store'].copy()
|
||||
if 'mongo' in _args['type'] :
|
||||
_args['type'] = 'mongo.MongoReader'
|
||||
reader = transport.factory.instance(**_args)
|
||||
_files = []
|
||||
try:
|
||||
pipeline = [{"$match":{"completed":{"$eq":True}}},{"$group":{"_id":"$name"}},{"$project":{"name":"$_id","_id":0}}]
|
||||
_args = {"aggregate":"logs","cursor":{},"allowDiskUse":True,"pipeline":pipeline}
|
||||
_files = reader.read(mongo = _args)
|
||||
_files = [item['name'] for item in _files]
|
||||
except Exception as e :
|
||||
pass
|
||||
print (["found ",len(files),"\tProcessed ",len(_files)])
|
||||
return list(set(files) - set(_files))
|
||||
|
||||
@staticmethod
|
||||
def processes(_args):
|
||||
_info = pd.DataFrame(smart.top.read(name='healthcare-io.py'))[['name','cpu','mem']]
|
||||
|
||||
if _info.shape[0] == 0 :
|
||||
_info = pd.DataFrame({"name":["healthcare-io.py"],"cpu":[0],"mem":[0]})
|
||||
# _info = pd.DataFrame(_info.groupby(['name']).sum())
|
||||
# _info['name'] = ['healthcare-io.py']
|
||||
m = {'cpu':'CPU','mem':'RAM','name':'name'}
|
||||
_info.columns = [m[name] for name in _info.columns.tolist()]
|
||||
_info.index = np.arange(_info.shape[0])
|
||||
|
||||
charts = []
|
||||
for label in ['CPU','RAM'] :
|
||||
value = _info[label].sum()
|
||||
df = pd.DataFrame({"name":[label],label:[value]})
|
||||
charts.append (
|
||||
Apex.apply(
|
||||
{"data":df, "chart":{"type":"radial","axis":{"x":label,"y":"name"}}}
|
||||
)['apex']
|
||||
)
|
||||
#
|
||||
# This will update the counts for the processes, upon subsequent requests so as to show the change
|
||||
#
|
||||
N = 0
|
||||
lprocs = []
|
||||
for proc in get.PROCS :
|
||||
if proc.is_alive() :
|
||||
lprocs.append(proc)
|
||||
N = len(lprocs)
|
||||
get.PROCS = lprocs
|
||||
return {"process":{"chart":charts,"counts":N}}
|
||||
@staticmethod
|
||||
def files (_args):
|
||||
_info = smart.folder.read(path='/data')
|
||||
N = _info.files.tolist()[0]
|
||||
if 'mongo' in _args['store']['type'] :
|
||||
store_args = dict(_args['store'].copy(),**{"type":"mongo.MongoReader"})
|
||||
# reader = transport.factory.instance(**_args)
|
||||
|
||||
pipeline = [{"$group":{"_id":"$name","count":{"$sum":{"$cond":[{"$eq":["$completed",True]},1,0]}} }},{"$group":{"_id":None,"count":{"$sum":"$count"}}},{"$project":{"_id":0,"status":"completed","count":1}}]
|
||||
query = {"mongo":{"aggregate":"logs","allowDiskUse":True,"cursor":{},"pipeline":pipeline}}
|
||||
# _info = pd.DataFrame(reader.read(mongo={"aggregate":"logs","allowDiskUse":True,"cursor":{},"pipeline":pipeline}))
|
||||
pipeline = [{"$group":{"_id":"$parse","claims":{"$addToSet":"$name"}}},{"$project":{"_id":0,"type":"$_id","count":{"$size":"$claims"}}}]
|
||||
_query = {"mongo":{"aggregate":"logs","cursor":{},"allowDiskUse":True,"pipeline":pipeline}} #-- distribution claims/remits
|
||||
|
||||
|
||||
else:
|
||||
store_args = dict(_args['store'].copy(),**{"type":"disk.SQLiteReader"})
|
||||
store_args['args']['table'] = 'logs'
|
||||
query= {"sql":"select count(distinct json_extract(data,'$.name')) as count, 'completed' status from logs where json_extract(data,'$.completed') = true"}
|
||||
_query={"sql":"select json_extract(data,'$.parse') as type,count(distinct json_extract(data,'$.name')) as count from logs group by type"} #-- distribution claim/remits
|
||||
reader = transport.factory.instance(**store_args)
|
||||
_info = pd.DataFrame(reader.read(**query))
|
||||
if not _info.shape[0] :
|
||||
_info = pd.DataFrame({"status":["completed"],"count":[0]})
|
||||
_info['count'] = np.round( (_info['count'] * 100 )/N,2)
|
||||
|
||||
charts = [Apex.apply({"data":_info,"chart":{"type":"radial","axis":{"y":"status","x":"count"}}})['apex']]
|
||||
#
|
||||
# Let us classify the files now i.e claims / remits
|
||||
#
|
||||
|
||||
|
||||
# pipeline = [{"$group":{"_id":"$parse","claims":{"$addToSet":"$name"}}},{"$project":{"_id":0,"type":"$_id","count":{"$size":"$claims"}}}]
|
||||
# _args = {"aggregate":"logs","cursor":{},"allowDiskUse":True,"pipeline":pipeline}
|
||||
# r = pd.DataFrame(reader.read(mongo=_args))
|
||||
r = pd.DataFrame(reader.read(**_query)) #-- distribution claims/remits
|
||||
r = Apex.apply({"chart":{"type":"donut","axis":{"x":"count","y":"type"}},"data":r})['apex']
|
||||
r['chart']['height'] = '100%'
|
||||
r['legend']['position'] = 'bottom'
|
||||
|
||||
charts += [r]
|
||||
|
||||
|
||||
return {"files":{"counts":N,"chart":charts}}
|
||||
|
||||
pass
|
||||
#
|
||||
# Process handling ....
|
||||
|
||||
|
||||
def run (_args) :
|
||||
"""
|
||||
This function will run the jobs and insure as processes (as daemons).
|
||||
:param _args system configuration
|
||||
"""
|
||||
FILES = []
|
||||
BATCH = int(_args['args']['batch']) #-- number of processes (poorly named variable)
|
||||
|
||||
for root,_dir,f in os.walk(_args['args']['folder']) :
|
||||
if f :
|
||||
FILES += [os.sep.join([root,name]) for name in f]
|
||||
FILES = get.resume(FILES,_args)
|
||||
FILES = np.array_split(FILES,BATCH)
|
||||
|
||||
for FILE_GROUP in FILES :
|
||||
|
||||
FILE_GROUP = FILE_GROUP.tolist()
|
||||
# logger.write({"process":index,"parse":_args['parse'],"file_count":len(row)})
|
||||
# proc = Process(target=apply,args=(row,info['store'],_info,))
|
||||
parser = x12.Parser(get.PATH) #os.sep.join([PATH,'config.json']))
|
||||
parser.set.files(FILE_GROUP)
|
||||
parser.daemon = True
|
||||
parser.start()
|
||||
get.PROCS.append(parser)
|
||||
time.sleep(3)
|
||||
#
|
||||
# @TODO:consider submitting an update to clients via publish/subscribe framework
|
||||
#
|
||||
return get.PROCS
|
||||
def stop(_args):
|
||||
for job in get.PROCS :
|
||||
if job.is_alive() :
|
||||
job.terminate()
|
||||
get.PROCS = []
|
||||
#
|
||||
# @TODO: consider submitting an update to clients via publish/subscribe framework
|
||||
pass
|
||||
def write(src_args,dest_args,files) :
|
||||
#
|
||||
# @TODO: Support for SQLite
|
||||
pass
|
||||
def publish (src_args,dest_args,folder="/data"):
|
||||
FILES = []
|
||||
for root,_dir,f in os.walk(folder) :
|
||||
if f :
|
||||
FILES += [os.sep.join([root,name]) for name in f]
|
||||
#
|
||||
# @TODO: Add support for SQLite ....
|
||||
|
||||
FILES = np.array_split(FILES,4)
|
||||
|
@ -0,0 +1,75 @@
|
||||
/***
|
||||
* This file will handle the dialog boxes as they and their associated configurations and function binding
|
||||
*/
|
||||
if (!dialog){
|
||||
var dialog = {}
|
||||
}
|
||||
|
||||
dialog.open = function(title,msg,pointer){
|
||||
if (sessionStorage.dialog == null){
|
||||
|
||||
|
||||
var http = HttpClient.instance()
|
||||
http.get(sessionStorage.io_context+'/static/dialog.html',function(x){
|
||||
var html = x.responseText
|
||||
jx.modal.show({html:html,id:'dialog'})
|
||||
$('.dialog .title').text(title)
|
||||
$('.dialog .message .text').text(msg)
|
||||
dialog.status.ask()
|
||||
$('.dialog .action .active-button').on('click',pointer)
|
||||
$('.dialog .title-bar .close').on('click',function(){dialog.close(0)})
|
||||
|
||||
})
|
||||
}else{
|
||||
var html = sessionStorage.dialog
|
||||
jx.modal.show({html:html,id:'dialog'})
|
||||
dialog.status.ask()
|
||||
$('.dialog .action .active-button').on('click',pointer)
|
||||
$('.dialog .title-bar .close').on('click',function(){dialog.close(0)})
|
||||
|
||||
}
|
||||
}
|
||||
dialog.bind = function(pointer){
|
||||
if (pointer == null){
|
||||
pointer = dialog.close
|
||||
}
|
||||
$('.dialog .action .active-button').off()
|
||||
$('.dialog .action .active-button').on('click',pointer)
|
||||
}
|
||||
dialog.close = function(delay){
|
||||
delay = (delay == null)?1750:delay
|
||||
setTimeout(function(){
|
||||
if ( $('.dialog').length > 0){
|
||||
jx.modal.close()
|
||||
}
|
||||
},delay)
|
||||
}
|
||||
dialog.status = {}
|
||||
dialog.status.wait = function(){
|
||||
$('.dialog .action .active-button').hide()
|
||||
}
|
||||
dialog.status.confirm = function(){
|
||||
$('.dialog .action .active-button').show()
|
||||
}
|
||||
dialog.status.busy = function(){
|
||||
$('.dialog .message #msg-icon').removeClass()
|
||||
$('.dialog .message #msg-icon').addClass('fas fa-cog fa-4x fa-spin')
|
||||
|
||||
}
|
||||
dialog.status.fail = function(){
|
||||
$('.dialog .message #msg-icon').removeClass()
|
||||
$('.dialog .message #msg-icon').addClass('fas fa-times fa-4x')
|
||||
}
|
||||
dialog.status.ask = function(){
|
||||
$('.dialog .message #msg-icon').removeClass()
|
||||
$('.dialog .message #msg-icon').addClass('far fa-question-circle fa-4x')
|
||||
}
|
||||
dialog.status.warn = function(){
|
||||
$('.dialog .message #msg-icon').removeClass()
|
||||
$('.dialog .message #msg-icon').addClass('fas fa-exclamation-triangle fa-4x')
|
||||
}
|
||||
dialog.status.success = function(){
|
||||
$('.dialog .message #msg-icon').removeClass()
|
||||
$('.dialog .message #msg-icon').addClass('fas fa-check fa-4x')
|
||||
}
|
||||
|
@ -0,0 +1,15 @@
|
||||
if (!healthcare) {
|
||||
var healthcare = {io:{}}
|
||||
}
|
||||
healthcare.io = {'dialog':dialog,'confirmed':confirmed,'reset':reset,'update':update,'run':run,'publish':publish}
|
||||
healthcare.io.apply = function(){
|
||||
var value = $('.input-form .item .procs').val()
|
||||
var folder= $('.input-form .item .folder').val()
|
||||
$('.code .batch').html(value)
|
||||
var http = HttpClient.instance()
|
||||
http.setData({"batch":value,"resume":true,"folder":folder},"application/json")
|
||||
http.post(sessionStorage.io_context+'/io/params',function(x){})
|
||||
}
|
||||
|
||||
|
||||
|
@ -0,0 +1,182 @@
|
||||
/**
|
||||
* This file will depend on dialog.js (soft dependency). Some functions here will make calls to resources in dialog.js
|
||||
*/
|
||||
|
||||
var reset = function(){
|
||||
dialog.open('Healthcare/IO::Parser', 'Are you sure you would like to delete all data parsed? Click Ok to confirm',confirmed.reset)
|
||||
|
||||
}
|
||||
var update= function(){
|
||||
dialog.open('Healthcare/IO::Parser','Update will change parsing configuration. Would you like to continue ?',confirmed.update)
|
||||
}
|
||||
var run = function(){
|
||||
dialog.open('Healthcare/IO::Parser','Preparing parser, confirm to continue',confirmed.run)
|
||||
}
|
||||
var _queue = {socket:null}
|
||||
var confirmed = {}
|
||||
confirmed.run = function(){
|
||||
dialog.status.busy()
|
||||
dialog.status.wait()
|
||||
$('.dialog .message .text').html('Initiating Parsing ...')
|
||||
setTimeout(function(){
|
||||
var http = HttpClient.instance()
|
||||
http.post(sessionStorage.io_context+'/io/run',function(x){
|
||||
// dialog.handler = setInterval(function(){monitor.data()},750)
|
||||
monitor.data()
|
||||
//dialog.close()
|
||||
|
||||
})
|
||||
|
||||
},1000)
|
||||
}
|
||||
confirmed.reset = function(){
|
||||
var uri = sessionStorage.io_context+'/reset'
|
||||
var http= HttpClient.instance()
|
||||
dialog.status.busy()
|
||||
dialog.status.wait()
|
||||
http.post(uri,function(x){
|
||||
setTimeout(function(){
|
||||
if (x.status == 200 && x.responseText == "1"){
|
||||
dialog.status.success()
|
||||
$('.dialog .message .text').html('Reset Healthcare/IO::Parser was successful!<br><div align="center">Dialog will be closing</div>')
|
||||
dialog.close()
|
||||
}else{
|
||||
dialog.status.fail()
|
||||
|
||||
|
||||
}
|
||||
|
||||
},2000)
|
||||
})
|
||||
}
|
||||
|
||||
confirmed.update = function(){
|
||||
var uri = sessionStorage.io_context+'/update'
|
||||
var email = $('#email').val()
|
||||
//
|
||||
//-- validate the email
|
||||
if (email.match(/^([^\s]+)@([^\s@]+)\.(org|com|edu|io)$/i)){
|
||||
dialog.status.wait()
|
||||
dialog.status.busy()
|
||||
var http = HttpClient.instance()
|
||||
http.setData({"email":email},"application/son")
|
||||
setTimeout(function(){
|
||||
http.post(uri,function(x){
|
||||
if(x.status == 200 && x.responseText == "1"){
|
||||
dialog.status.success()
|
||||
}else{
|
||||
|
||||
dialog.status.fail()
|
||||
$('.dialog .message .text').html('Error code '+x.status)
|
||||
dialog.bind()
|
||||
dialog.status.confirm()
|
||||
$('.dialog .title-bar .title').html("Error found")
|
||||
|
||||
}
|
||||
})
|
||||
|
||||
},1000)
|
||||
}else{
|
||||
dialog.status.fail()
|
||||
dialog.bind()
|
||||
$('.dialog .title-bar .title').text("Error found")
|
||||
$('.dialog .message .text').html('Invvalid Email entered')
|
||||
dialog.status.confirm()
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* This namespace is designed to export data to either the cloud or to a database
|
||||
*/
|
||||
var publish={set:{}}
|
||||
publish.post = function(){
|
||||
|
||||
if($('.jxmodal').length > 0){
|
||||
jx.modal.close()
|
||||
}
|
||||
dialog.open('Export/ETL','Please wait')
|
||||
dialog.status.busy()
|
||||
|
||||
var http = HttpClient.instance()
|
||||
http.setData(JSON.parse(sessionStorage.export),"application/json")
|
||||
http.post(sessionStorage.io_context+'/export',function(x){
|
||||
if (x.status != 200){
|
||||
setTimeout(function(){
|
||||
$('.dialog .message .text').html('An error occurred with code '+x.status)
|
||||
dialog.status.fail()
|
||||
dialog.status.wait()
|
||||
|
||||
},1500)
|
||||
|
||||
}
|
||||
//
|
||||
// @TODO: Have progress be monitored for this bad boy i.e open the connection to socket and read in ...
|
||||
//
|
||||
})
|
||||
|
||||
}
|
||||
publish.set.file = function(){
|
||||
var file = $('#file')[0].files[0]
|
||||
$('.file .name').html(file.name)
|
||||
var button = $('.cloud input').prop('disabled',true)
|
||||
var div = $('.cloud .file .fa-file-upload')[0]
|
||||
$(div).empty()
|
||||
$(div).addClass('fas fa-cog fa-spin')
|
||||
var reader = new FileReader()
|
||||
reader.readAsText(file)
|
||||
|
||||
|
||||
reader.onload = function(){
|
||||
_args = {"type":$('.cloud .id').html().trim(),"content":reader.result}
|
||||
// _args = JSON.stringify(_args)
|
||||
if (_args.content.match(/^\{.+/i) == null){
|
||||
content = _args.content.split('\n')[1].split(',')
|
||||
_args.content = {'bucket':'healthcareio','access_key':content[0].trim(),'secret_key':content[1].trim()}
|
||||
}
|
||||
sessionStorage.export = JSON.stringify(_args)
|
||||
}
|
||||
|
||||
reader.onloadend = function(){
|
||||
setTimeout(function(){
|
||||
var div = $('.cloud .file .fa-cog')[0]
|
||||
$(div).empty()
|
||||
$(div).addClass('fas fa-check')
|
||||
$(div).removeClass('fa-spin')
|
||||
// jx.modal.close()
|
||||
|
||||
//setTimeout(jx.modal.close,1500)
|
||||
publish.post()
|
||||
},2000)
|
||||
}
|
||||
}
|
||||
publish.database = {}
|
||||
|
||||
publish.database.init = function(id){
|
||||
//
|
||||
// we are expecting id in {mongo,couch,postgresql,mysql,sqlite}
|
||||
// @TODO: Account for cloud service brokers like dropbox, box, one-drive and google-drive
|
||||
sessionStorage.export = "{}"
|
||||
p = {'id':id}
|
||||
if (id.match(/(mongodb|postgresql|mysql|sqlite|couchdb)/i)){
|
||||
var hide_id = '.store .cloud'
|
||||
var show_id = '.store .database'
|
||||
}else{
|
||||
//
|
||||
// @TODO: generate an error message
|
||||
var show_id = '.store .cloud'
|
||||
var hide_id = '.store .database'
|
||||
|
||||
}
|
||||
var http = HttpClient.instance()
|
||||
http.get(sessionStorage.io_context+'/export',function(x){
|
||||
var html = x.responseText
|
||||
jx.modal.show({'html':html,'id':'dialog'})
|
||||
$(hide_id).hide(function(){
|
||||
$(show_id).show()
|
||||
})
|
||||
|
||||
$('.store .id').text(id)
|
||||
})
|
||||
|
||||
}
|
Loading…
Reference in new issue