superscript v 0.8.5

Former-commit-id: 4de011ef45
This commit is contained in:
Arthur Lu 2021-04-10 06:08:18 +00:00
parent 7abfb2d90a
commit 014570930a
3 changed files with 72 additions and 53 deletions

2
.gitignore vendored
View File

@ -7,3 +7,5 @@
**/*.egg-info/ **/*.egg-info/
**/config.json **/config.json
**/tra_analysis/ **/tra_analysis/
**/errorlog.txt

View File

@ -1 +1 @@
e5f402045a28f8602e17dbd7e4ca6641c33ccd65 d7d6e7a11e9acaf8966e4a25edcacad84793282e

View File

@ -3,11 +3,14 @@
# Notes: # Notes:
# setup: # setup:
__version__ = "0.8.4" __version__ = "0.8.5"
# changelog should be viewed using print(analysis.__changelog__) # changelog should be viewed using print(analysis.__changelog__)
__changelog__ = """changelog: __changelog__ = """changelog:
0.84: 0.8.5:
- added more gradeful KeyboardInterrupt exiting
- redirected stderr to errorlog.txt
0.8.4:
- added better error message for missing config.json - added better error message for missing config.json
- added automatic config.json creation - added automatic config.json creation
- added splash text with version and system info - added splash text with version and system info
@ -138,6 +141,7 @@ from pathlib import Path
from multiprocessing import Pool from multiprocessing import Pool
import platform import platform
import matplotlib.pyplot as plt import matplotlib.pyplot as plt
import sys
from concurrent.futures import ThreadPoolExecutor from concurrent.futures import ThreadPoolExecutor
import time import time
import warnings import warnings
@ -148,71 +152,84 @@ def main():
global exec_threads global exec_threads
sys.stderr = open("errorlog.txt", "w")
warnings.filterwarnings("ignore") warnings.filterwarnings("ignore")
splash() splash()
while (True): while (True):
current_time = time.time() try:
print("[OK] time: " + str(current_time))
config = load_config("config.json") current_time = time.time()
competition = config["competition"] print("[OK] time: " + str(current_time))
match_tests = config["statistics"]["match"]
pit_tests = config["statistics"]["pit"]
metrics_tests = config["statistics"]["metric"]
print("[OK] configs loaded")
print("[OK] starting threads") config = load_config("config.json")
cfg_max_threads = config["max-threads"] competition = config["competition"]
sys_max_threads = os.cpu_count() match_tests = config["statistics"]["match"]
if cfg_max_threads > -sys_max_threads and cfg_max_threads < 0 : pit_tests = config["statistics"]["pit"]
alloc_processes = sys_max_threads + cfg_max_threads metrics_tests = config["statistics"]["metric"]
elif cfg_max_threads > 0 and cfg_max_threads < 1: print("[OK] configs loaded")
alloc_processes = math.floor(cfg_max_threads * sys_max_threads)
elif cfg_max_threads > 1 and cfg_max_threads <= sys_max_threads: print("[OK] starting threads")
alloc_processes = cfg_max_threads cfg_max_threads = config["max-threads"]
elif cfg_max_threads == 0: sys_max_threads = os.cpu_count()
alloc_processes = sys_max_threads if cfg_max_threads > -sys_max_threads and cfg_max_threads < 0 :
else: alloc_processes = sys_max_threads + cfg_max_threads
print("[ERROR] Invalid number of processes, must be between -" + str(sys_max_threads) + " and " + str(sys_max_threads)) elif cfg_max_threads > 0 and cfg_max_threads < 1:
alloc_processes = math.floor(cfg_max_threads * sys_max_threads)
elif cfg_max_threads > 1 and cfg_max_threads <= sys_max_threads:
alloc_processes = cfg_max_threads
elif cfg_max_threads == 0:
alloc_processes = sys_max_threads
else:
print("[ERROR] Invalid number of processes, must be between -" + str(sys_max_threads) + " and " + str(sys_max_threads))
exit()
exec_threads = Pool(processes = alloc_processes)
print("[OK] " + str(alloc_processes) + " threads started")
apikey = config["key"]["database"]
tbakey = config["key"]["tba"]
print("[OK] loaded keys")
previous_time = get_previous_time(apikey)
print("[OK] analysis backtimed to: " + str(previous_time))
print("[OK] loading data")
start = time.time()
match_data = load_match(apikey, competition)
pit_data = load_pit(apikey, competition)
print("[OK] loaded data in " + str(time.time() - start) + " seconds")
print("[OK] running match stats")
start = time.time()
matchloop(apikey, competition, match_data, match_tests)
print("[OK] finished match stats in " + str(time.time() - start) + " seconds")
print("[OK] running team metrics")
start = time.time()
metricloop(tbakey, apikey, competition, previous_time, metrics_tests)
print("[OK] finished team metrics in " + str(time.time() - start) + " seconds")
print("[OK] running pit analysis")
start = time.time()
pitloop(apikey, competition, pit_data, pit_tests)
print("[OK] finished pit analysis in " + str(time.time() - start) + " seconds")
set_current_time(apikey, current_time)
print("[OK] finished all tests, looping")
print_hrule()
except KeyboardInterrupt:
print("\n[OK] caught KeyboardInterrupt, killing processes")
exec_threads.terminate()
print("[OK] processes killed, exiting")
exit() exit()
exec_threads = Pool(processes = alloc_processes)
print("[OK] " + str(alloc_processes) + " threads started")
apikey = config["key"]["database"] else:
tbakey = config["key"]["tba"] pass
print("[OK] loaded keys")
previous_time = get_previous_time(apikey)
print("[OK] analysis backtimed to: " + str(previous_time))
print("[OK] loading data")
start = time.time()
match_data = load_match(apikey, competition)
pit_data = load_pit(apikey, competition)
print("[OK] loaded data in " + str(time.time() - start) + " seconds")
print("[OK] running match stats")
start = time.time()
matchloop(apikey, competition, match_data, match_tests)
print("[OK] finished match stats in " + str(time.time() - start) + " seconds")
print("[OK] running team metrics")
start = time.time()
metricloop(tbakey, apikey, competition, previous_time, metrics_tests)
print("[OK] finished team metrics in " + str(time.time() - start) + " seconds")
print("[OK] running pit analysis")
start = time.time()
pitloop(apikey, competition, pit_data, pit_tests)
print("[OK] finished pit analysis in " + str(time.time() - start) + " seconds")
set_current_time(apikey, current_time)
print("[OK] finished all tests, looping")
print_hrule()
#clear() #clear()