analysis.py v 1.1.1.001

This commit is contained in:
ltcptgeneral 2019-09-30 13:37:19 -05:00
parent fd991401c4
commit c135ddb856
3 changed files with 16 additions and 12 deletions

View File

@ -7,10 +7,12 @@
# current benchmark of optimization: 1.33 times faster # current benchmark of optimization: 1.33 times faster
# setup: # setup:
__version__ = "1.1.1.000" __version__ = "1.1.1.001"
# changelog should be viewed using print(analysis.__changelog__) # changelog should be viewed using print(analysis.__changelog__)
__changelog__ = """changelog: __changelog__ = """changelog:
1.1.1.001:
- regression_engine() bug fixes, now actaully regresses
1.1.1.000: 1.1.1.000:
- added regression_engine() - added regression_engine()
- added all regressions except polynomial - added all regressions except polynomial
@ -157,7 +159,7 @@ import numba
from numba import jit from numba import jit
import numpy as np import numpy as np
import math import math
import regression from analysis import regression
from sklearn import metrics from sklearn import metrics
from sklearn import preprocessing from sklearn import preprocessing
import torch import torch
@ -224,7 +226,7 @@ def histo_analysis(hist_data):
return basic_stats(derivative)[0], basic_stats(derivative)[3] return basic_stats(derivative)[0], basic_stats(derivative)[3]
@jit(forceobj=True) @jit(forceobj=True)
def regression_engine(device, inputs, outputs, loss = torch.nn.MSELoss(), _iterations = 10000, lr = 0.1, *args): def regression_engine(device, inputs, outputs, args, loss = torch.nn.MSELoss(), _iterations = 10000, lr = 0.01):
regressions = [] regressions = []
@ -234,18 +236,18 @@ def regression_engine(device, inputs, outputs, loss = torch.nn.MSELoss(), _itera
if 'linear' in args: if 'linear' in args:
model = regression.SGDTrain(regression.LinearRegKernel(len(inputs)), torch.tensor(inputs).to(torch.float).cuda(), torch.tensor(outputs).to(torch.float).cuda(), iterations=_iterations, learning_rate=lr, return_losses=True) model = regression.SGDTrain(regression.LinearRegKernel(len(inputs)), torch.tensor(inputs).to(torch.float).cuda(), torch.tensor([outputs]).to(torch.float).cuda(), iterations=_iterations, learning_rate=lr, return_losses=True)
regressions.append([model[0].parameter, model[1][::-1][0]]) regressions.append([model[0].parameters, model[1][::-1][0]])
if 'log' in args: if 'log' in args:
model = regression.SGDTrain(regression.LogRegKernel(len(inputs)), torch.tensor(inputs).to(torch.float).cuda(), torch.tensor(outputs).to(torch.float).cuda(), iterations=_iterations, learning_rate=lr, return_losses=True) model = regression.SGDTrain(regression.LogRegKernel(len(inputs)), torch.tensor(inputs).to(torch.float).cuda(), torch.tensor(outputs).to(torch.float).cuda(), iterations=_iterations, learning_rate=lr, return_losses=True)
regressions.append([model[0].parameter, model[1][::-1][0]]) regressions.append([model[0].parameters, model[1][::-1][0]])
if 'exp' in args: if 'exp' in args:
model = regression.SGDTrain(regression.ExpRegKernel(len(inputs)), torch.tensor(inputs).to(torch.float).cuda(), torch.tensor(outputs).to(torch.float).cuda(), iterations=_iterations, learning_rate=lr, return_losses=True) model = regression.SGDTrain(regression.ExpRegKernel(len(inputs)), torch.tensor(inputs).to(torch.float).cuda(), torch.tensor(outputs).to(torch.float).cuda(), iterations=_iterations, learning_rate=lr, return_losses=True)
regressions.append([model[0].parameter, model[1][::-1][0]]) regressions.append([model[0].parameters, model[1][::-1][0]])
#if 'poly' in args: #if 'poly' in args:
@ -254,7 +256,7 @@ def regression_engine(device, inputs, outputs, loss = torch.nn.MSELoss(), _itera
if 'sig' in args: if 'sig' in args:
model = regression.SGDTrain(regression.SigmoidalRegKernelArthur(len(inputs)), torch.tensor(inputs).to(torch.float).cuda(), torch.tensor(outputs).to(torch.float).cuda(), iterations=_iterations, learning_rate=lr, return_losses=True) model = regression.SGDTrain(regression.SigmoidalRegKernelArthur(len(inputs)), torch.tensor(inputs).to(torch.float).cuda(), torch.tensor(outputs).to(torch.float).cuda(), iterations=_iterations, learning_rate=lr, return_losses=True)
regressions.append([model[0].parameter, model[1][::-1][0]]) regressions.append([model[0].parameters, model[1][::-1][0]])
else: else:
@ -263,17 +265,17 @@ def regression_engine(device, inputs, outputs, loss = torch.nn.MSELoss(), _itera
if 'linear' in args: if 'linear' in args:
model = regression.SGDTrain(regression.LinearRegKernel(len(inputs)), torch.tensor(inputs).to(torch.float), torch.tensor(outputs).to(torch.float), iterations=_iterations, learning_rate=lr, return_losses=True) model = regression.SGDTrain(regression.LinearRegKernel(len(inputs)), torch.tensor(inputs).to(torch.float), torch.tensor(outputs).to(torch.float), iterations=_iterations, learning_rate=lr, return_losses=True)
regressions.append([model[0].parameter, model[1][::-1][0]]) regressions.append([model[0].parameters, model[1][::-1][0]])
if 'log' in args: if 'log' in args:
model = regression.SGDTrain(regression.LogRegKernel(len(inputs)), torch.tensor(inputs).to(torch.float), torch.tensor(outputs).to(torch.float), iterations=_iterations, learning_rate=lr, return_losses=True) model = regression.SGDTrain(regression.LogRegKernel(len(inputs)), torch.tensor(inputs).to(torch.float), torch.tensor(outputs).to(torch.float), iterations=_iterations, learning_rate=lr, return_losses=True)
regressions.append([model[0].parameter, model[1][::-1][0]]) regressions.append([model[0].parameters, model[1][::-1][0]])
if 'exp' in args: if 'exp' in args:
model = regression.SGDTrain(regression.ExpRegKernel(len(inputs)), torch.tensor(inputs).to(torch.float), torch.tensor(outputs).to(torch.float), iterations=_iterations, learning_rate=lr, return_losses=True) model = regression.SGDTrain(regression.ExpRegKernel(len(inputs)), torch.tensor(inputs).to(torch.float), torch.tensor(outputs).to(torch.float), iterations=_iterations, learning_rate=lr, return_losses=True)
regressions.append([model[0].parameter, model[1][::-1][0]]) regressions.append([model[0].parameters, model[1][::-1][0]])
#if 'poly' in args: #if 'poly' in args:
@ -282,7 +284,9 @@ def regression_engine(device, inputs, outputs, loss = torch.nn.MSELoss(), _itera
if 'sig' in args: if 'sig' in args:
model = regression.SGDTrain(regression.SigmoidalRegKernelArthur(len(inputs)), torch.tensor(inputs).to(torch.float), torch.tensor(outputs).to(torch.float), iterations=_iterations, learning_rate=lr, return_losses=True) model = regression.SGDTrain(regression.SigmoidalRegKernelArthur(len(inputs)), torch.tensor(inputs).to(torch.float), torch.tensor(outputs).to(torch.float), iterations=_iterations, learning_rate=lr, return_losses=True)
regressions.append([model[0].parameter, model[1][::-1][0]]) regressions.append([model[0].parameters, model[1][::-1][0]])
return regressions
@jit(forceobj=True) @jit(forceobj=True)
def r_squared(predictions, targets): # assumes equal size inputs def r_squared(predictions, targets): # assumes equal size inputs