DC - Bug fix by enabling file handling

data-collector
Steve L. Nyemba 7 years ago
parent d080438689
commit c93f7d9cae

@ -1,15 +1,6 @@
{ {
"virtual-env":{ "key":"nyemba@gmail.com",
"class":"Sandbox", "id":"maori",
"config":{ "apps":"chrome,preview,itunes",
"3-launchpad":{"requirements":"/Users/steve/Documents/git/repair-file/required.txt","sandbox":"/Users/steve/Documents/git/sandbox"} "folders":"/Users/steve/tmp"
}
},
"processes":{
"class":"DetailProcess",
"config":{
"system":["postgresql","couchdb","httpd"]
}
}
} }

@ -1,10 +1,19 @@
""" h="""
This is a data-collector client, that is intended to perform data-collection operations and submit them to an endpoint This is a data-collector client, that is intended to perform data-collection operations and submit them to an endpoint
@required: @required:
- key application/service key - key application/service key
- id node identifier - id node identifier
usage :
python data-collector.py --path config.json
The configuration file is structured as JSON object as follows :
{
id: node identifier
key: customer's identification key
apps:"app_1,app_2,...",
folders:"path_1,path_2, ..."
}
""" """
from utils.params import PARAMS as SYS_ARGS from utils.params import PARAMS as SYS_ARGS, Logger
import requests import requests
import pickle import pickle
import json import json
@ -29,16 +38,24 @@ class Collector(Thread) :
headers = {"key":SYS_ARGS["key"],"id":SYS_ARGS["id"]} #,"scope":json.dumps(scope)} headers = {"key":SYS_ARGS["key"],"id":SYS_ARGS["id"]} #,"scope":json.dumps(scope)}
headers['content-type'] = 'application/json' headers['content-type'] = 'application/json'
try: try:
Logger.log(subject='Collector',object='api',action='request',value=ENDPOINT)
url = "/".join([ENDPOINT,"init/collector"]) url = "/".join([ENDPOINT,"init/collector"])
r = requests.post(url,headers=headers,data=json.dumps(SYS_ARGS)) data = {}
for id in SYS_ARGS :
if id not in ['id','key'] :
data[id] = SYS_ARGS[id]
r = requests.post(url,headers=headers,data=json.dumps(data))
r = json.loads(r.text) r = json.loads(r.text)
self.monitor = pickle.loads(r[0]) self.monitor = pickle.loads(r[0])
self.monitor.lock = RLock() self.monitor.lock = RLock()
#:w #:w
#self.monitor.set('lock',RLock()) #self.monitor.set('lock',RLock())
Logger.log(subject='Collector',object='api',action='load',value='')
except Exception,e: except Exception,e:
print e.message
Logger.log(subject='Collector',object='api',action='error',value=str(e))
self.monitor = None self.monitor = None
def run(self): def run(self):
@ -47,7 +64,7 @@ class Collector(Thread) :
""" """
#self.monitor.start() #self.monitor.start()
Logger.log(subject='Collector',object='monitor',action='start',value='')
thread = Thread(target=self.monitor.run) thread = Thread(target=self.monitor.run)
thread.start() thread.start()
# print self.monitor.config['store'] # print self.monitor.config['store']
@ -62,6 +79,17 @@ class Collector(Thread) :
# except Exception,e: # except Exception,e:
# print e # print e
if __name__ == '__main__' : if __name__ == '__main__' and 'path' in SYS_ARGS:
#
#
path = SYS_ARGS['path']
f = open(path)
p = json.loads(f.read())
f.close()
Logger.init('data-collector')
SYS_ARGS = dict(SYS_ARGS,** p)
thread = Collector() thread = Collector()
thread.start() thread.start()
else:
print (h)

Binary file not shown.

Binary file not shown.

@ -16,4 +16,17 @@ if len(sys.argv) > 1:
PARAMS[key] = ('/'+value).replace('//','/') PARAMS[key] = ('/'+value).replace('//','/')
i += 2 i += 2
import logging
import json
from datetime import datetime
class Logger :
@staticmethod
def init(filename):
name = "-".join([filename,datetime.now().strftime('%d-%m-%Y')])+".log"
logging.basicConfig(filename=name,level=logging.INFO,format="%(message)s")
@staticmethod
def log(**args) :
args['date'] = datetime.now().strftime('%d-%m-%Y %M:%H:%S')
logging.info(json.dumps(args))

Loading…
Cancel
Save