mirror of
https://github.com/titanscouting/tra-superscript.git
synced 2024-11-09 22:44:44 +00:00
implemented dynamic module loading/running,
fixed minor issue in metric module Signed-off-by: Arthur Lu <learthurgo@gmail.com>
This commit is contained in:
parent
3ac4e96d2d
commit
7be48af85e
@ -150,12 +150,12 @@ class Metric:
|
|||||||
return True, ""
|
return True, ""
|
||||||
|
|
||||||
def load_data(self):
|
def load_data(self):
|
||||||
self.data = d.pull_new_tba_matches(self.apikey, self.competition, self.timestamp)
|
self.data = d.pull_new_tba_matches(self.tbakey, self.competition, self.timestamp)
|
||||||
|
|
||||||
def process_data(self, exec_threads):
|
def process_data(self, exec_threads):
|
||||||
|
|
||||||
elo_N = self.config["elo"]["N"]
|
elo_N = self.config["tests"]["elo"]["N"]
|
||||||
elo_K = self.config["elo"]["K"]
|
elo_K = self.config["tests"]["elo"]["K"]
|
||||||
|
|
||||||
matches = self.data
|
matches = self.data
|
||||||
|
|
||||||
@ -164,8 +164,8 @@ class Metric:
|
|||||||
|
|
||||||
for match in matches:
|
for match in matches:
|
||||||
|
|
||||||
red = d.load_metric(self.apikey, self.competition, match, "red", self.config)
|
red = d.load_metric(self.apikey, self.competition, match, "red", self.config["tests"])
|
||||||
blu = d.load_metric(self.apikey, self.competition, match, "blue", self.config)
|
blu = d.load_metric(self.apikey, self.competition, match, "blue", self.config["tests"])
|
||||||
|
|
||||||
elo_red_total = 0
|
elo_red_total = 0
|
||||||
elo_blu_total = 0
|
elo_blu_total = 0
|
||||||
|
@ -163,8 +163,9 @@ import warnings
|
|||||||
import zmq
|
import zmq
|
||||||
|
|
||||||
from interface import splash, log, ERR, INF, stdout, stderr
|
from interface import splash, log, ERR, INF, stdout, stderr
|
||||||
from data import get_previous_time, pull_new_tba_matches, set_current_time, load_match, push_match, load_pit, push_pit, get_database_config, set_database_config, check_new_database_matches
|
from data import get_previous_time, set_current_time, get_database_config, set_database_config, check_new_database_matches
|
||||||
from processing import matchloop, metricloop, pitloop
|
from module import Match, Metric, Pit
|
||||||
|
#from processing import matchloop, metricloop, pitloop
|
||||||
|
|
||||||
config_path = "config.json"
|
config_path = "config.json"
|
||||||
sample_json = """{
|
sample_json = """{
|
||||||
@ -254,6 +255,8 @@ def main(send, verbose = False, profile = False, debug = False):
|
|||||||
if verbose:
|
if verbose:
|
||||||
splash(__version__)
|
splash(__version__)
|
||||||
|
|
||||||
|
modules = {"match": Match, "metric": Metric, "pit": Pit}
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@ -289,31 +292,23 @@ def main(send, verbose = False, profile = False, debug = False):
|
|||||||
exit_code = 1
|
exit_code = 1
|
||||||
close_all()
|
close_all()
|
||||||
break
|
break
|
||||||
flag, exec_threads, competition, match_tests, metrics_tests, pit_tests = parse_config_variable(send, config)
|
flag, exec_threads, competition, config_modules = parse_config_variable(send, config)
|
||||||
if flag:
|
if flag:
|
||||||
exit_code = 1
|
exit_code = 1
|
||||||
close_all()
|
close_all()
|
||||||
break
|
break
|
||||||
|
|
||||||
start = time.time()
|
for m in config_modules:
|
||||||
send(stdout, INF, "loading match, metric, pit data (this may take a few seconds)")
|
if m in modules:
|
||||||
match_data = load_match(client, competition)
|
start = time.time()
|
||||||
metrics_data = pull_new_tba_matches(tbakey, competition, loop_start)
|
current_module = modules[m](config_modules[m], client, tbakey, loop_start, competition)
|
||||||
pit_data = load_pit(client, competition)
|
valid = current_module.validate_config()
|
||||||
send(stdout, INF, "finished loading match, metric, pit data in "+ str(time.time() - start) + " seconds")
|
if not valid:
|
||||||
|
continue
|
||||||
start = time.time()
|
current_module.load_data()
|
||||||
send(stdout, INF, "performing analysis on match, metrics, pit data")
|
current_module.process_data(exec_threads)
|
||||||
match_results = matchloop(client, competition, match_data, match_tests, exec_threads)
|
current_module.push_results()
|
||||||
metrics_results = metricloop(client, competition, metrics_data, metrics_tests)
|
print(m + " module finished in " + str(time.time() - start) + " seconds")
|
||||||
pit_results = pitloop(client, competition, pit_data, pit_tests)
|
|
||||||
send(stdout, INF, "finished analysis in " + str(time.time() - start) + " seconds")
|
|
||||||
|
|
||||||
start = time.time()
|
|
||||||
send(stdout, INF, "uploading match, metrics, pit results to database")
|
|
||||||
push_match(client, competition, match_results)
|
|
||||||
push_pit(client, competition, pit_results)
|
|
||||||
send(stdout, INF, "finished uploading results in " + str(time.time() - start) + " seconds")
|
|
||||||
|
|
||||||
if debug:
|
if debug:
|
||||||
f = open("matchloop.log", "w+")
|
f = open("matchloop.log", "w+")
|
||||||
@ -439,37 +434,21 @@ def parse_config_variable(send, config):
|
|||||||
send(stderr, ERR, "could not find competition field in config", code = 101)
|
send(stderr, ERR, "could not find competition field in config", code = 101)
|
||||||
exit_flag = True
|
exit_flag = True
|
||||||
try:
|
try:
|
||||||
match_tests = config["variable"]["statistics"]["match"]
|
modules = config["variable"]["modules"]
|
||||||
except:
|
except:
|
||||||
send(stderr, ERR, "could not find match field in config", code = 102)
|
send(stderr, ERR, "could not find modules field in config", code = 102)
|
||||||
exit_flag = True
|
|
||||||
try:
|
|
||||||
metrics_tests = config["variable"]["statistics"]["metric"]
|
|
||||||
except:
|
|
||||||
send(stderr, ERR, "could not find metrics field in config", code = 103)
|
|
||||||
exit_flag = True
|
|
||||||
try:
|
|
||||||
pit_tests = config["variable"]["statistics"]["pit"]
|
|
||||||
except:
|
|
||||||
send(stderr, ERR, "could not find pit field in config", code = 104)
|
|
||||||
exit_flag = True
|
exit_flag = True
|
||||||
|
|
||||||
if competition == None or competition == "":
|
if competition == None or competition == "":
|
||||||
send(stderr, ERR, "competition field in config must not be empty", code = 105)
|
send(stderr, ERR, "competition field in config must not be empty", code = 105)
|
||||||
exit_flag = True
|
exit_flag = True
|
||||||
if match_tests == None:
|
if modules == None:
|
||||||
send(stderr, ERR, "matchfield in config must not be empty", code = 106)
|
send(stderr, ERR, "modules in config must not be empty", code = 106)
|
||||||
exit_flag = True
|
|
||||||
if metrics_tests == None:
|
|
||||||
send(stderr, ERR, "metrics field in config must not be empty", code = 107)
|
|
||||||
exit_flag = True
|
|
||||||
if pit_tests == None:
|
|
||||||
send(stderr, ERR, "pit field in config must not be empty", code = 108)
|
|
||||||
exit_flag = True
|
exit_flag = True
|
||||||
|
|
||||||
send(stdout, INF, "found and loaded competition, match, metrics, pit from config")
|
send(stdout, INF, "found and loaded competition, match, metrics, pit from config")
|
||||||
|
|
||||||
return exit_flag, exec_threads, competition, match_tests, metrics_tests, pit_tests
|
return exit_flag, exec_threads, competition, modules
|
||||||
|
|
||||||
def resolve_config_conflicts(send, client, config, preference, sync):
|
def resolve_config_conflicts(send, client, config, preference, sync):
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user