diff --git a/requirements.txt b/requirements.txt index fba81d2..10329c5 100755 --- a/requirements.txt +++ b/requirements.txt @@ -31,3 +31,4 @@ socketpool==0.5.3 subprocess32==3.5.2 urllib3==1.23 Werkzeug==0.11.11 +XlsxWriter==1.1.2 diff --git a/src/api/index.py b/src/api/index.py index 64ee68d..c885264 100755 --- a/src/api/index.py +++ b/src/api/index.py @@ -13,20 +13,21 @@ - Add socketio, so that each section of the dashboard updates independently """ -from flask import Flask, session, request, redirect, Response, stream_with_context +from flask import Flask, session, request, redirect, Response, stream_with_context,send_file from flask.templating import render_template import requests from flask_session import Session from datetime import datetime import time - +from werkzeug.datastructures import FileStorage +import tarfile +from io import BytesIO import sys import os import json import re -# import monitor import uuid - +from cStringIO import StringIO # from utils.register import Register from utils.charting import * @@ -233,22 +234,6 @@ def get_clients(): # features = json.loads(session['user-plan']['metadata']['features']) args['client_count'] = features['clients'] return render_template('dashboard/client-keys.html',**args) - # elif request.method == 'POST' : - # clients = request.get_json(silent=True) - # # - # # We can NOT have uncontrolled writes - # # - # # N = session['user-plan'][0]['metadata']['features'] - # # clients = client[:N] if len(clients) > N else clients - - # args = {"type":SYS_STORE['class']['write'],"args":SYS_STORE['args']} - # args['args']['uid'] = session['key'] - # writer = factory.instance(**args) - # # writer.set({"clients":clients}) - # writer.set(clients=clients) - # return ('',200) - - # pass else: return ('',403) @app.route('/1/clients',methods=['POST']) @@ -375,6 +360,103 @@ def client_log(): return "",200 else : return "",403 +@app.route("/1/data//",methods=['POST']) +def manage_data(action,format) : + if 'key' not in session : + return ('',403) + elif action in ['archive','download']: + args = dict({"type":SYS_STORE['class']['read'],"args":SYS_STORE['args']}) + args['args']['uid'] = session['key'] + reader = factory.instance(**args) + data = reader.view('clients/logs',key=session['key']) + logs = [ {"name":id,"data-frame":pd.DataFrame(data[id])} for id in data] + + # return ('',200) + + # + # We need to convert the logs into a giant spreadsheet and archive it + if format in ['csv','xls','xlsx'] : + # + # This will go straight to the cloud ... we get the token and push to the cloud + # + + headers = {"key":session['cloud-info']['user']['access_token']} + + + if format in ['xls','xlsx'] : + filename = "smart-top-logs-" + "-".join([str(time.localtime().tm_year),str(time.localtime().tm_mon),str(time.localtime().tm_mday)]) + stream = StringIO() + writer = pd.ExcelWriter(filename, engine='xlsxwriter') + writer.book.filename = stream + mimetype="application/octet-stream" + else: + mimetype="text/plain; charset=dropbox-cors-hack" + files = {} + ext = '.'+format + + for row in logs : + df = row['data-frame'] + id = row['name'] + if format in ['xls','xlsx'] : + # + # creating sheets in the workbook (excel terminology) + df.to_excel(writer,sheet_name=id) + + else: + filename = "smart-top-logs-"+id+"-"+ "-".join([str(time.localtime().tm_year),str(time.localtime().tm_mon),str(time.localtime().tm_mday)]) + filename = filename + ext + stream = StringIO() + df.to_csv(stream) + stream.seek(0) + files[id] = (FileStorage(stream=stream,filename=filename,name=filename,content_type='text/plain; charset=dropbox-cors-hack')) + + if format in ['xls','xlsx'] : + writer.save() + stream.seek(0) + ext = ".xlsx" + filename = filename+ext + files = {} + files[filename] = FileStorage(stream=stream,filename=filename,name=filename,content_type='application/octet-stream; charset=utf8') + if action == 'archive' : + service = session['cloud-info']['user']['sid'] + # datetime = "-".join([str(time.localtime().tm_year),str(time.localtime().tm_mon),str(time.localtime().tm_mday)]) + + headers['mimetype'] = mimetype # 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet' + # headers['filename'] = "".join(['logs-',datetime])+".xlsx" + headers['folder'] = 'smart-top' + url = "/".join(['http://localhost/cloud-view',service,'upload']) + r = requests.post(url,files=files,headers=headers) + + return ('',200) + else: + # + # Let's download the files (tarball or workbook) + if len(files.keys()) > 1 : + ostream = BytesIO() + tar = tarfile.open(mode='w:gz',fileobj = ostream) + filename = "smart-top-logs-" + "-".join([str(time.localtime().tm_year),str(time.localtime().tm_mon),str(time.localtime().tm_mday)]) + for id in files : + stream = files[id].stream + info = tarfile.TarInfo(name=files[id].filename) + info.size = len(stream.getvalue()) + tar.addfile(tarinfo=info,fileobj=BytesIO(stream.getvalue())) + + tar.close() + ostream.seek(0) + return send_file(ostream,as_attachment=True,attachment_filename=filename+'.gz',mimetype='application/tgz') + else: + # + # returning the file as is spreadsheet + file = files[filename] + return send_file( + file.stream, + as_attachment=True, + attachment_filename=filename, + mimetype=mimetype + ) + + return ('',404) + @app.route("/1/log",methods=['POST']) def log(): key = request.headers['key'] @@ -529,48 +611,6 @@ def set_logs() : key = session['key'] pass -# @app.route('/1/get/logs') -# def get_logs() : -# """ -# @deprecate -# """ -# r = {} -# #session['key'] = 'c259e8b1-e2fb-40df-bf03-f521f8ee352d' -# key = session['key'] if 'key' in session else None -# if key is None and 'key' in request.headers: -# key = request.headers['key'] -# if key is None : -# return json.dumps(r) -# else: -# try: - -# #gReader = factory.instance(type=class_read,args=p) -# #plan = gReader.view('plans/active',key=key) -# plan = session['plan'] -# if plan : -# dbname = plan['name'] -# args = str(json.dumps(p)) -# args = json.loads(args) -# args['dbname'] = dbname -# args['uid'] = key -# # -# # Let us persis this information (as well as the key) -# #session['plan'] = plan['name'] -# session['store']= args -# session['key'] = key - -# scope = ['app_resources','folder_info','app_status_details','app_resource_usage_details'] #,'emails','log_size'] -# gReader = factory.instance(type=class_read,args=args) -# for id in scope : -# view = ('summary/'+id).strip() -# r[id] = gReader.view(view,key=key) -# if 'logs' in session : -# for id in session['logs'] : -# r[id] = session['logs'][id] -# # r[id] = r[node_id] -# except Exception,e: -# print (e) -# return json.dumps(r) @app.route("/1/set/logs",methods=['PUT']) def update_profile(): try: @@ -625,119 +665,6 @@ def get_usage_trend(): return json.dumps(r) -# @app.route("/1/app/usage/trend") -# def get_usage_detail(): -# """ -# @deprecate -# This function returns detailed information about usage per application monitored. It will return the 24 most recent observations in the logs - -# @param node node identifier e.g: apps@zulu.io -# @return {node_x:{app_1:{memory_usage:[],cpu_usage:[]}},...} -# """ -# r = {} -# try: -# if 'id' not in request.args and 'node' not in request.args : -# id = session['default.node'] -# else: -# id = request.args['id'] if 'id' in request.args else request.args.get('node') -# if 'app' not in request.args : -# if 'default.app' in session : -# app_id = session['default.app'] -# else: -# app_id = None -# else: -# app_id = request.args.get('app') -# # -# # removing trailing white spaces - -# gReader = factory.instance(type=class_read,args=p) -# r = gReader.view('summary/app_resource_usage_details',key=p['uid']) -# id = id.strip() -# if app_id is not None : -# app_id = app_id.strip() -# r = r[id][app_id] -# else : -# r = r[id] - -# except Exception,e: -# print ' *** ',(e) -# return json.dumps(r) - - -# @app.route('/1/app/status') -# def app_status() : -# """ -# @deprecate -# This function aggregates the number of crashes/running/idle instances found in the past 24 log entries -# for a particular application -# @param nid node identifier e.g: app@zulu.io -# @param app application identifier e.g: kate, firefox, chrome ... specified in the configuraiton -# """ -# r = [] -# try: -# nid = request.args.get('node') # Node identifier -# aid = request.args.get('app') # application identifier - -# gReader = factory.instance(type=class_read,args=p) -# r = gReader.view('summary/app_status_details',key=p['uid']) -# # -# #@TODO: Once the back-end enables the nodes in which the application is running, uncomment the line below -# # -# print[nid,aid] -# r = r[nid][aid] - -# except Exception,e: -# print e - -# return json.dumps(r) - - -#@app.route('/get/') -#def procs(id): - #try: - #gReader = factory.instance(type=class_read,args=p) - #data = gReader.read() - #ahandler = AnalyzeAnomaly() - #learn = {} - #if 'learn' in data : - #for row in data['learn'] : - #label = row['label'] - #learn[label] = row - #r = {} - #for label in data : - #if label not in ['learn','folders'] : - #index = len(data[label]) - 1 - #row = data[label][index] - #r[label] = row - ## - ## Let us determine if this is a normal operation or not - ## We will update the status of the information ... - ## - - #for row in r[label] : - #index = r[label].index(row) - #if row['label'] in learn: - #id = row['label'] - #px = ahandler.predict([row],learn[id]) - #if px : - - ## row['anomaly'] = px[1]==1 - #print "" - #print label,' *** ',index - #row = dict(row,**px) - #r[label][index] =row - ## - ## @TODO: - ## Compile a report here that will be sent to the mailing list - ## - - #except Exception, e: - #print e - #r = [] - - #return json.dumps(r) - - @app.route('/init/collector',methods=['POST']) def init_collector(): """ @@ -835,65 +762,6 @@ def InitCollector(): print ' init (error) ',str(e) return json.dumps(r) -""" - This function/endpoint will assess n-virtual environments and return the results - @TODO: Should this be stored for future mining (I don't think so but could be wrong) -""" -# @app.route('/sandbox') -# def sandbox(): -# global CONFIG - -# if 'sandbox' in CONFIG: #CONFIG['monitor']: -# #handler = HANDLERS['sandbox']['class'] -# #conf = HANDLERS['sandbox']['config'] -# r = [] -# # p = Factory.instance('sandbox',CONFIG) -# handler = monitor.Sandbox() -# conf = CONFIG['sandbox'] - -# for id in conf: -# try: -# handler.init(conf[id]) -# r.append (dict(handler.composite(),**{"label":id})) -# except Exception as e: -# pass -# else: - -# r = [] - - -# return json.dumps(r) -#@app.route('/trends') -#def trends (): - #id = request.args.get('id') - #app = request.args.get('app').strip() - #p = CONFIG['store']['args'] - #class_read = CONFIG['store']['class']['read'] - - - #gReader = factory.instance(type=class_read,args=p) - #r = gReader.read() - #if id in r: - #r = r[id] #--matrix - #series = [] - - #for row in r: - - #series += [item for item in row if str(item['label'])== app] - #if len(series) > 12 : - #beg = len(series) - 8 - #series = series[beg:] - #return json.dumps(series) - #else: - #return "[]" -#@app.route('/download',methods=['POST']) -#def requirements(): - #stream = request.form['missing'] - #print stream - #stream = "\n".join(json.loads(stream)) - #headers = {"content-disposition":"attachment; filename=requirements.txt"} - #return Response(stream,mimetype='text/plain',headers=headers) - @app.route('/dashboard/') def dashboard(): context = PARAMS['context'] @@ -927,133 +795,6 @@ def user(): title = 'Upgrade' return render_template('user.html',context=context,title=title) - -#""" - #This function is designed to trigger learning for anomaly detection - #@TODO: forward this to a socket i.e non-blocking socket -#""" - -#@app.route('/anomalies/get') -#def learn(): - #global CONFIG - #p = CONFIG['store']['args'] - #class_read = CONFIG['store']['class']['read'] - #gReader = factory.instance(type=class_read,args=p) - #d = gReader.read() - - #if 'learn' in d : - #info = d['learn'] - - #del d['learn'] - #else : - #info = [] - #r = [] - #if 'id' in request.args: - #id = request.args['id'] - #d = d[id] - #params = {} - #for item in info: - - #label = item['label'] - #params[label] = item - - ##apps = list(set(ML.Extract(['label'],d))) - #r = [] - #if params : - ## - ## If we have parameters available - #p = AnomalyDetection() - #apps = params.keys() - #for name in apps : - #if name not in params: - #continue - #_info = params[name] - #try: - #xo = ML.Filter('label',name,d) - #except Exception as e: - #xo = [] - ##print name,e - #if len(xo) == 0: - #continue - #xo = [xo[ len(xo) -1]] - - #value = p.predict(xo,_info)[0] - - #if len(value): - #report = dict(_info,**{'predicton':value}) - #r.append(report) - - - - ##print app,value - ##if value is not None: - ## r.append(value) - - #return json.dumps(r) - - -""" - This function returns anomalies for a given context or group of processes - The information returned is around precision/recall and f-score and parameters -""" -#@app.route('/anomalies/status') -#def anomalies_status(): - #global CONFIG - #p = CONFIG['store']['args'] - #class_read = CONFIG['store']['class']['read'] - #gReader = factory.instance(type=class_read,args=p) - #d = gReader.read() - #if 'learn' in d : - #info = d['learn'] - - #del d['learn'] - #else : - #info = [] - #print info - #r = [] - #if 'id' in request.args: - #id = request.args['id'] - #r = info - #return json.dumps(r) -#@app.route('/folders') -#def get_folders(): - #global CONFIG - #p = CONFIG['store']['args'] - #class_read = CONFIG['store']['class']['read'] - #gReader = factory.instance(type=class_read,args=p) - #d = gReader.read() - #if 'folders' in d: - #d = d['folders'] - #hosts = set([row[0]['id'] for row in d]) - #m = {} - #for id in hosts: - #for row in d: - #if id == row[0]['id'] : - #m[id] = row - #d = m.values() - #for row in d: - #print row[0]['id'] - ## index = len(d) - 1 - ## d = d[index] - - - ## m = {} - ## for row in d : - - ## key = row.keys()[0] - - ## row = row[key] - ## if key not in m: - ## r.append(row) - ## m[key] = len(r) -1 - ## else: - ## index = m[key] - ## r[index] = row - ## d = r - #else: - #d = [] - - #return json.dumps(d) @app.route("/1/plot/",methods=['POST']) def prepare_plot(id): if 'key' in session : diff --git a/src/api/static/css/default.css b/src/api/static/css/default.css index ecdc898..67762fc 100644 --- a/src/api/static/css/default.css +++ b/src/api/static/css/default.css @@ -7,7 +7,7 @@ } @media only screen and (max-width: 1024px){ - body {font-size:14px; font-weight:lighter; color:green} + body {font-size:14px; font-weight:lighter;} } body { /* margin-left:1%; */ diff --git a/src/api/templates/account/data.html b/src/api/templates/account/data.html index ac76665..d436529 100644 --- a/src/api/templates/account/data.html +++ b/src/api/templates/account/data.html @@ -14,6 +14,7 @@ + -
Manage My Data
+
Manage My Data
-
+
Archive
-
+
Archive to your cloud account in a smart-top folder:


Using cloud storage as a device
Share and control access to your data

-
- Archive Now -
+
Format
+
+ + +
+ +
+
CSV
XLSX
+
+ +
+
+
+ Archive Now +
+
+
+ + +
Download
-
+
Download your logs in CSV or excel format:


Conduct personal analysis @@ -65,13 +127,27 @@ $(document).ready(function(){

-
- Download Now +
Format
+
+ + +
+ +
+
CSV
XLSX
+
+ +
+
+
+ Download Now +
+
Delete
-
+
Delete all of your logs from our servers. We recommend :


Either downloading or archiving first diff --git a/src/models/basic.py b/src/models/basic.py index 6c542c0..0f7841e 100644 --- a/src/models/basic.py +++ b/src/models/basic.py @@ -4,11 +4,16 @@ """" class model : + """ + This model provides an overview of the raw data provided a list of variables. If one variable is provided a regression line will be added. + The intent of this model is to allow the user to visualize the distribution and trend of the data as is + """ def __init__(**args): self.data = args['data'] self.node = args['node'] self.months = {1:"Jan",2:"Feb",3:"Mar",4:"Apr",5:"May",6:"Jun",7:"Jul",8:"Aug",9:"Sep",10:"Oct",11:"Nov",12:"Dec"} self.cache = {} + self.set("type","scatter") def can_do(self): return False def format_date(self,row): diff --git a/src/models/free/apps.py b/src/models/free/apps.py index f6bb62f..9d16bed 100644 --- a/src/models/free/apps.py +++ b/src/models/free/apps.py @@ -63,8 +63,8 @@ class trend(model): pass -model = keras.Sequential([keras.layers.Dense(2, activation=tf.nn.relu,input_shape=(x.shape[1],)),keras.layers.Dense(2, activation=tf.nn.relu),keras.layers.Dense(1)]) +# model = keras.Sequential([keras.layers.Dense(2, activation=tf.nn.relu,input_shape=(x.shape[1],)),keras.layers.Dense(2, activation=tf.nn.relu),keras.layers.Dense(1)]) -optimizer = tf.train.RMSPropOptimizer(0.001) +# optimizer = tf.train.RMSPropOptimizer(0.001) -model.compile(loss='mse', optimizer=optimizer,metrics=['mae']) \ No newline at end of file +# model.compile(loss='mse', optimizer=optimizer,metrics=['mae']) \ No newline at end of file