|
|
@ -18,7 +18,17 @@ class ML:
|
|
|
|
#
|
|
|
|
#
|
|
|
|
value = ML.CleanupName(value)
|
|
|
|
value = ML.CleanupName(value)
|
|
|
|
#return [item[0] for item in data if item and attr in item[0] and item[0][attr] == value]
|
|
|
|
#return [item[0] for item in data if item and attr in item[0] and item[0][attr] == value]
|
|
|
|
return [[item for item in row if item[attr] == value][0] for row in data]
|
|
|
|
#return [[item for item in row if item[attr] == value][0] for row in data]
|
|
|
|
|
|
|
|
#
|
|
|
|
|
|
|
|
# We are making the filtering more rescillient, i.e if an item doesn't exist we don't have to throw an exception
|
|
|
|
|
|
|
|
# This is why we expanded the loops ... fully expressive but rescilient
|
|
|
|
|
|
|
|
#
|
|
|
|
|
|
|
|
r = []
|
|
|
|
|
|
|
|
for row in data :
|
|
|
|
|
|
|
|
for item in row :
|
|
|
|
|
|
|
|
if attr in item and item[attr] == value:
|
|
|
|
|
|
|
|
r.append(item)
|
|
|
|
|
|
|
|
return r
|
|
|
|
@staticmethod
|
|
|
|
@staticmethod
|
|
|
|
def Extract(lattr,data):
|
|
|
|
def Extract(lattr,data):
|
|
|
|
if isinstance(lattr,basestring):
|
|
|
|
if isinstance(lattr,basestring):
|
|
|
@ -67,7 +77,9 @@ class AnomalyDetection:
|
|
|
|
yo= ML.Extract([label['name']],xo)
|
|
|
|
yo= ML.Extract([label['name']],xo)
|
|
|
|
xo = ML.Extract(features,xo)
|
|
|
|
xo = ML.Extract(features,xo)
|
|
|
|
yo = self.getLabel(yo,label)
|
|
|
|
yo = self.getLabel(yo,label)
|
|
|
|
|
|
|
|
#
|
|
|
|
|
|
|
|
# @TODO: Insure this can be finetuned, training size matters for learning. It's not obvious to define upfront
|
|
|
|
|
|
|
|
#
|
|
|
|
xo = self.split(xo)
|
|
|
|
xo = self.split(xo)
|
|
|
|
yo = self.split(yo)
|
|
|
|
yo = self.split(yo)
|
|
|
|
p = self.gParameters(xo['train'])
|
|
|
|
p = self.gParameters(xo['train'])
|
|
|
@ -214,7 +226,28 @@ class AnomalyDetection:
|
|
|
|
sigma = [ list(row) for row in sigma]
|
|
|
|
sigma = [ list(row) for row in sigma]
|
|
|
|
return {"cov":sigma,"mean":list(u)}
|
|
|
|
return {"cov":sigma,"mean":list(u)}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class AnalyzeAnomalies(AnomalyDetection):
|
|
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
This analysis function will include a predicted status because an anomaly can either be
|
|
|
|
|
|
|
|
- A downtime i.e end of day
|
|
|
|
|
|
|
|
- A spike and thus a potential imminent crash
|
|
|
|
|
|
|
|
@param xo matrix of variables
|
|
|
|
|
|
|
|
@param info information about what was learnt
|
|
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
def predict(self,xo,info):
|
|
|
|
|
|
|
|
x = xo[len(xo)-1]
|
|
|
|
|
|
|
|
r = AnomalyDetection.predict(x,info)
|
|
|
|
|
|
|
|
#
|
|
|
|
|
|
|
|
# In order to determine what the anomaly is we compute the slope (idle or crash)
|
|
|
|
|
|
|
|
# The slope is computed using the covariance / variance of features
|
|
|
|
|
|
|
|
#
|
|
|
|
|
|
|
|
N = len(info['features'])
|
|
|
|
|
|
|
|
xy = ML.Extract(info['features'],xo)
|
|
|
|
|
|
|
|
xy = np.matrix(xy)
|
|
|
|
|
|
|
|
vxy= [xy[:,i] for i in range(0,N)]
|
|
|
|
|
|
|
|
print N,vxy.shape
|
|
|
|
|
|
|
|
alpha = info['cov'] / vxy
|
|
|
|
|
|
|
|
return r
|
|
|
|
class Regression:
|
|
|
|
class Regression:
|
|
|
|
parameters = {}
|
|
|
|
parameters = {}
|
|
|
|
@staticmethod
|
|
|
|
@staticmethod
|
|
|
|