1
0
mirror of https://github.com/titanscouting/tra-analysis.git synced 2025-04-02 09:23:25 +00:00

analysis pkg v 1.0.0.005

This commit is contained in:
art 2020-03-05 12:44:09 -06:00
parent a0c90bad2c
commit e488b4a4d1
14 changed files with 80 additions and 25 deletions

@ -1,6 +1,6 @@
Metadata-Version: 2.1
Name: analysis
Version: 1.0.0.4
Version: 1.0.0.5
Summary: analysis package developed by Titan Scouting for The Red Alliance
Home-page: https://github.com/titanscout2022/tr2022-strategy
Author: The Titan Scouting Team

@ -7,10 +7,12 @@
# current benchmark of optimization: 1.33 times faster
# setup:
__version__ = "1.1.12.006"
__version__ = "1.1.13.000"
# changelog should be viewed using print(analysis.__changelog__)
__changelog__ = """changelog:
1.1.13.000:
- fixed all regressions to now properly work
1.1.12.006:
- fixed bg with a division by zero in histo_analysis
1.1.12.005:
@ -268,6 +270,8 @@ import numba
from numba import jit
import numpy as np
import math
import scipy
from scipy import *
import sklearn
from sklearn import *
import torch
@ -346,24 +350,62 @@ def regression(ndevice, inputs, outputs, args, loss = torch.nn.MSELoss(), _itera
if 'lin' in args: # formula: ax + b
model = Regression().SGDTrain(Regression.LinearRegKernel(len(inputs)), torch.tensor(inputs).to(torch.float).to(device), torch.tensor([outputs]).to(torch.float).to(device), iterations=_iterations, learning_rate=lr, return_losses=True)
params = model[0].parameters
params[:] = map(lambda x: x.item(), params)
regressions.append((params, model[1][::-1][0]))
try:
X = np.array(inputs).reshape(-1,1)
y = np.array(outputs)
model = sklearn.linear_model.LinearRegression().fit(X, y)
ret = model.coef_.flatten().tolist()
ret.append(model.intercept_)
regressions.append((ret, model.score(X,y)))
except Exception as e:
print(e)
pass
if 'log' in args: # formula: a log (b(x + c)) + d
model = Regression().SGDTrain(Regression.LogRegKernel(len(inputs)), torch.tensor(inputs).to(torch.float).to(device), torch.tensor(outputs).to(torch.float).to(device), iterations=_iterations, learning_rate=lr, return_losses=True)
params = model[0].parameters
params[:] = map(lambda x: x.item(), params)
regressions.append((params, model[1][::-1][0]))
try:
X = np.array(inputs)
y = np.array(outputs)
def func(x, a, b, c, d):
return a * np.log(b*(x + c)) + d
popt, pcov = scipy.optimize.curve_fit(func, X, y)
regressions.append((popt.flatten().tolist(), None))
except Exception as e:
print(e)
pass
if 'exp' in args: # formula: a e ^ (b(x + c)) + d
model = Regression().SGDTrain(Regression.ExpRegKernel(len(inputs)), torch.tensor(inputs).to(torch.float).to(device), torch.tensor(outputs).to(torch.float).to(device), iterations=_iterations, learning_rate=lr, return_losses=True)
params = model[0].parameters
params[:] = map(lambda x: x.item(), params)
regressions.append((params, model[1][::-1][0]))
try:
X = np.array(inputs)
y = np.array(outputs)
def func(x, a, b, c, d):
return a * np.exp(b*(x + c)) + d
popt, pcov = scipy.optimize.curve_fit(func, X, y)
regressions.append((popt.flatten().tolist(), None))
except Exception as e:
print(e)
pass
if 'ply' in args: # formula: a + bx^1 + cx^2 + dx^3 + ...
@ -385,12 +427,25 @@ def regression(ndevice, inputs, outputs, args, loss = torch.nn.MSELoss(), _itera
regressions.append(plys)
if 'sig' in args: # formula: a sig (b(x + c)) + d | sig() = 1/(1 + e ^ -x)
if 'sig' in args: # formula: a tanh (b(x + c)) + d
model = Regression().SGDTrain(Regression.SigmoidalRegKernelArthur(len(inputs)), torch.tensor(inputs).to(torch.float).to(device), torch.tensor(outputs).to(torch.float).to(device), iterations=_iterations, learning_rate=lr, return_losses=True)
params = model[0].parameters
params[:] = map(lambda x: x.item(), params)
regressions.append((params, model[1][::-1][0]))
try:
X = np.array(inputs)
y = np.array(outputs)
def func(x, a, b, c, d):
return a * np.tanh(b*(x + c)) + d
popt, pcov = scipy.optimize.curve_fit(func, X, y)
regressions.append((popt.flatten().tolist(), None))
except Exception as e:
print(e)
pass
return regressions

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

@ -2,7 +2,7 @@ import setuptools
setuptools.setup(
name="analysis", # Replace with your own username
version="1.0.0.004",
version="1.0.0.005",
author="The Titan Scouting Team",
author_email="titanscout2022@gmail.com",
description="analysis package developed by Titan Scouting for The Red Alliance",

@ -1,6 +1,6 @@
2020ilch
balls-blocked,basic_stats,historical_analysis
balls-collected,basic_stats,historical_analysis
balls-lower,basic_stats,historical_analysis
balls-started,basic_stats,historical_analysis
balls-upper,basic_stats,historical_analysis
balls-blocked,basic_stats,historical_analysis,regression_linear,regression_logarithmic,regression_exponential,regression_polynomial,regression_sigmoidal
balls-collected,basic_stats,historical_analysis,regression_linear,regression_logarithmic,regression_exponential,regression_polynomial,regression_sigmoidal
balls-lower,basic_stats,historical_analysis,regression_linear,regression_logarithmic,regression_exponential,regression_polynomial,regression_sigmoidal
balls-started,basic_stats,historical_analyss,regression_linear,regression_logarithmic,regression_exponential,regression_polynomial,regression_sigmoidal
balls-upper,basic_stats,historical_analysis,regression_linear,regression_logarithmic,regression_exponential,regression_polynomial,regression_sigmoidal
1 2020ilch
2 balls-blocked,basic_stats,historical_analysis balls-blocked,basic_stats,historical_analysis,regression_linear,regression_logarithmic,regression_exponential,regression_polynomial,regression_sigmoidal
3 balls-collected,basic_stats,historical_analysis balls-collected,basic_stats,historical_analysis,regression_linear,regression_logarithmic,regression_exponential,regression_polynomial,regression_sigmoidal
4 balls-lower,basic_stats,historical_analysis balls-lower,basic_stats,historical_analysis,regression_linear,regression_logarithmic,regression_exponential,regression_polynomial,regression_sigmoidal
5 balls-started,basic_stats,historical_analysis balls-started,basic_stats,historical_analyss,regression_linear,regression_logarithmic,regression_exponential,regression_polynomial,regression_sigmoidal
6 balls-upper,basic_stats,historical_analysis balls-upper,basic_stats,historical_analysis,regression_linear,regression_logarithmic,regression_exponential,regression_polynomial,regression_sigmoidal