|
|
|
@ -8,24 +8,51 @@ NOTE: Plugins are converted to a pipeline, so we apply a pipeline when reading o
|
|
|
|
|
from transport.plugins import PluginLoader
|
|
|
|
|
import transport
|
|
|
|
|
from transport import providers
|
|
|
|
|
from multiprocessing import Process
|
|
|
|
|
from multiprocessing import Process, RLock
|
|
|
|
|
import time
|
|
|
|
|
import types
|
|
|
|
|
from . import registry
|
|
|
|
|
|
|
|
|
|
from datetime import datetime
|
|
|
|
|
import pandas as pd
|
|
|
|
|
import os
|
|
|
|
|
import sys
|
|
|
|
|
import itertools
|
|
|
|
|
import json
|
|
|
|
|
|
|
|
|
|
class IO:
|
|
|
|
|
"""
|
|
|
|
|
Base wrapper class for read/write and support for logs
|
|
|
|
|
"""
|
|
|
|
|
def __init__(self,_agent,plugins):
|
|
|
|
|
def __init__(self,_agent,plugins,_logger=None):
|
|
|
|
|
|
|
|
|
|
#
|
|
|
|
|
# We need to initialize the logger here ...
|
|
|
|
|
#
|
|
|
|
|
# registry.init()
|
|
|
|
|
|
|
|
|
|
self._logger = _logger if not type(_agent) in [IReader,IWriter] else _agent._logger #transport.get.writer(label='logger') #if registry.has('logger') else None
|
|
|
|
|
# if not _logger and hasattr(_agent,'_logger') :
|
|
|
|
|
# self._logger = getattr(_agent,'_logger')
|
|
|
|
|
self._agent = _agent
|
|
|
|
|
_date = _date = str(datetime.now())
|
|
|
|
|
self._logTable = 'logs' #'_'.join(['logs',_date[:10]+_date[11:19]]).replace(':','').replace('-','_')
|
|
|
|
|
|
|
|
|
|
if plugins :
|
|
|
|
|
self._init_plugins(plugins)
|
|
|
|
|
else:
|
|
|
|
|
self._plugins = None
|
|
|
|
|
|
|
|
|
|
def setLogger(self,_logger):
|
|
|
|
|
self._logger = _logger
|
|
|
|
|
def log (self,**_args):
|
|
|
|
|
if self._logger :
|
|
|
|
|
_date = str(datetime.now())
|
|
|
|
|
_data = dict({'pid':os.getpid(),'date':_date[:10],'time':_date[11:19]},**_args)
|
|
|
|
|
for key in _data :
|
|
|
|
|
_data[key] = str(_data[key]) if type(_data[key]) not in [list,dict] else json.dumps(_data[key])
|
|
|
|
|
self._logger.write(pd.DataFrame([_data])) #,table=self._logTable)
|
|
|
|
|
else:
|
|
|
|
|
print ([' ********** '])
|
|
|
|
|
print (_args)
|
|
|
|
|
def _init_plugins(self,_items):
|
|
|
|
|
"""
|
|
|
|
|
This function will load pipelined functions as a plugin loader
|
|
|
|
@ -33,6 +60,7 @@ class IO:
|
|
|
|
|
registry.plugins.init()
|
|
|
|
|
self._plugins = PluginLoader(registry=registry.plugins)
|
|
|
|
|
[self._plugins.set(_name) for _name in _items]
|
|
|
|
|
self.log(action='init-plugins',caller='read', input =[_name for _name in _items])
|
|
|
|
|
# if 'path' in _args and 'names' in _args :
|
|
|
|
|
# self._plugins = PluginLoader(**_args)
|
|
|
|
|
# else:
|
|
|
|
@ -69,38 +97,74 @@ class IReader(IO):
|
|
|
|
|
"""
|
|
|
|
|
This is a wrapper for read functionalities
|
|
|
|
|
"""
|
|
|
|
|
def __init__(self,_agent,pipeline=None):
|
|
|
|
|
super().__init__(_agent,pipeline)
|
|
|
|
|
def __init__(self,_agent,_plugins=None,_logger=None):
|
|
|
|
|
super().__init__(_agent,_plugins,_logger)
|
|
|
|
|
|
|
|
|
|
def _stream (self,_data ):
|
|
|
|
|
for _segment in _data :
|
|
|
|
|
|
|
|
|
|
yield self._plugins.apply(_segment)
|
|
|
|
|
# self.log(action='streaming',object=self._agent._engine.name, input= type(_data).__name__)
|
|
|
|
|
_shape = []
|
|
|
|
|
for _segment in _data :
|
|
|
|
|
_shape.append(list(_segment.shape))
|
|
|
|
|
yield self._plugins.apply(_segment,self.log)
|
|
|
|
|
|
|
|
|
|
self.log(action='streaming',object=self._agent._engine.name, input= {'shape':_shape})
|
|
|
|
|
|
|
|
|
|
def read(self,**_args):
|
|
|
|
|
|
|
|
|
|
if 'plugins' in _args :
|
|
|
|
|
|
|
|
|
|
self._init_plugins(_args['plugins'])
|
|
|
|
|
_data = self._agent.read(**_args)
|
|
|
|
|
|
|
|
|
|
if self._plugins and self._plugins.ratio() > 0 :
|
|
|
|
|
|
|
|
|
|
if types.GeneratorType == type(_data):
|
|
|
|
|
|
|
|
|
|
return self._stream(_data)
|
|
|
|
|
_data = self._agent.read(**_args)
|
|
|
|
|
_objectName = '.'.join([self._agent.__class__.__module__,self._agent.__class__.__name__])
|
|
|
|
|
if types.GeneratorType == type(_data):
|
|
|
|
|
if self._plugins :
|
|
|
|
|
return self._stream(_data)
|
|
|
|
|
else:
|
|
|
|
|
_data = self._plugins.apply(_data)
|
|
|
|
|
return _data
|
|
|
|
|
_count = 0
|
|
|
|
|
for _segment in _data :
|
|
|
|
|
_count += 1
|
|
|
|
|
yield _segment
|
|
|
|
|
self.log(action='streaming',object=_objectName, input= {'segments':_count})
|
|
|
|
|
# return _data
|
|
|
|
|
else:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
self.log(action='read',object=_objectName, input=_data.shape)
|
|
|
|
|
if self._plugins :
|
|
|
|
|
_logs = []
|
|
|
|
|
_data = self._plugins.apply(_data,self.log)
|
|
|
|
|
return _data
|
|
|
|
|
|
|
|
|
|
# if self._plugins and self._plugins.ratio() > 0 :
|
|
|
|
|
|
|
|
|
|
# if types.GeneratorType == type(_data):
|
|
|
|
|
|
|
|
|
|
# return self._stream(_data)
|
|
|
|
|
# else:
|
|
|
|
|
# _data = self._plugins.apply(_data)
|
|
|
|
|
# return _data
|
|
|
|
|
# else:
|
|
|
|
|
# self.log(action='read',object=self._agent._engine.name, input=_data.shape)
|
|
|
|
|
# return _data
|
|
|
|
|
class IWriter(IO):
|
|
|
|
|
def __init__(self,_agent,pipeline=None):
|
|
|
|
|
super().__init__(_agent,pipeline)
|
|
|
|
|
lock = RLock()
|
|
|
|
|
def __init__(self,_agent,pipeline=None,_logger=None):
|
|
|
|
|
super().__init__(_agent,pipeline,_logger)
|
|
|
|
|
def write(self,_data,**_args):
|
|
|
|
|
if 'plugins' in _args :
|
|
|
|
|
self._init_plugins(_args['plugins'])
|
|
|
|
|
if self._plugins and self._plugins.ratio() > 0 :
|
|
|
|
|
_data = self._plugins.apply(_data)
|
|
|
|
|
|
|
|
|
|
self._agent.write(_data,**_args)
|
|
|
|
|
_logs = []
|
|
|
|
|
_data = self._plugins.apply(_data,_logs,self.log)
|
|
|
|
|
|
|
|
|
|
# [self.log(**_item) for _item in _logs]
|
|
|
|
|
try:
|
|
|
|
|
# IWriter.lock.acquire()
|
|
|
|
|
self._agent.write(_data,**_args)
|
|
|
|
|
finally:
|
|
|
|
|
# IWriter.lock.release()
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
#
|
|
|
|
|
# The ETL object in its simplest form is an aggregation of read/write objects
|
|
|
|
@ -111,8 +175,13 @@ class IETL(IReader) :
|
|
|
|
|
This class performs an ETL operation by ineriting a read and adding writes as pipeline functions
|
|
|
|
|
"""
|
|
|
|
|
def __init__(self,**_args):
|
|
|
|
|
super().__init__(transport.get.reader(**_args['source']))
|
|
|
|
|
_source = _args['source']
|
|
|
|
|
_plugins = _source['plugins'] if 'plugins' in _source else None
|
|
|
|
|
|
|
|
|
|
# super().__init__(transport.get.reader(**_args['source']))
|
|
|
|
|
super().__init__(transport.get.reader(**_source),_plugins)
|
|
|
|
|
|
|
|
|
|
# _logger =
|
|
|
|
|
if 'target' in _args:
|
|
|
|
|
self._targets = _args['target'] if type(_args['target']) == list else [_args['target']]
|
|
|
|
|
else:
|
|
|
|
@ -121,25 +190,25 @@ class IETL(IReader) :
|
|
|
|
|
#
|
|
|
|
|
# If the parent is already multiprocessing
|
|
|
|
|
self._hasParentProcess = False if 'hasParentProcess' not in _args else _args['hasParentProcess']
|
|
|
|
|
def run(self) :
|
|
|
|
|
"""
|
|
|
|
|
We should apply the etl here, if we are in multiprocessing mode
|
|
|
|
|
"""
|
|
|
|
|
_data = super().read()
|
|
|
|
|
for _kwargs in self._targets :
|
|
|
|
|
self.post(_data,**_kwargs)
|
|
|
|
|
|
|
|
|
|
def read(self,**_args):
|
|
|
|
|
# def run(self) :
|
|
|
|
|
# """
|
|
|
|
|
# We should apply the etl here, if we are in multiprocessing mode
|
|
|
|
|
# """
|
|
|
|
|
# return self.read()
|
|
|
|
|
def run(self,**_args):
|
|
|
|
|
_data = super().read(**_args)
|
|
|
|
|
self._targets = [transport.get.writer(**_kwargs) for _kwargs in self._targets]
|
|
|
|
|
if types.GeneratorType == type(_data):
|
|
|
|
|
_index = 0
|
|
|
|
|
for _segment in _data :
|
|
|
|
|
for _kwars in self._targets :
|
|
|
|
|
self.post(_segment,**_kwargs)
|
|
|
|
|
_index += 1
|
|
|
|
|
for _writer in self._targets :
|
|
|
|
|
self.post(_segment,writer=_writer,index=_index)
|
|
|
|
|
time.sleep(1)
|
|
|
|
|
else:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
for _kwargs in self._targets :
|
|
|
|
|
self.post(_data,**_kwargs)
|
|
|
|
|
for _writer in self._targets :
|
|
|
|
|
self.post(_data,writer=_writer)
|
|
|
|
|
|
|
|
|
|
return _data
|
|
|
|
|
# return _data
|
|
|
|
@ -148,6 +217,19 @@ class IETL(IReader) :
|
|
|
|
|
This function returns an instance of a process that will perform the write operation
|
|
|
|
|
:_args parameters associated with writer object
|
|
|
|
|
"""
|
|
|
|
|
writer = transport.get.writer(**_args)
|
|
|
|
|
writer.write(_data)
|
|
|
|
|
writer.close()
|
|
|
|
|
#writer = transport.get.writer(**_args)
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
_action = 'post'
|
|
|
|
|
_shape = dict(zip(['rows','columns'],_data.shape))
|
|
|
|
|
_index = _args['index'] if 'index' in _args else 0
|
|
|
|
|
writer = _args['writer']
|
|
|
|
|
|
|
|
|
|
writer.write(_data)
|
|
|
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
_action = 'post-error'
|
|
|
|
|
print (e)
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
self.log(action=_action,object=writer._agent.__module__, input= {'shape':_shape,'segment':_index})
|
|
|
|
|