From 089ff7ec0171a52c5fb4bc244af4c4eb8c442f60 Mon Sep 17 00:00:00 2001 From: Arthur Lu Date: Sat, 12 Jun 2021 07:09:26 +0000 Subject: [PATCH 01/29] separated gui and cli dev files began refactoring superscript in cli application Former-commit-id: ee309b9f8b6c5317422f1b2593bde027efb7337b --- build/build-CLI.bat | 2 +- build/build-CLI.sh | 2 +- src/cli/cli_interface.py | 44 +++++++ src/{ => cli}/data.py | 0 src/cli/superscript.py | 214 +++++++++++++++++++++++++++++++++ src/{ => cli}/superscript.spec | 0 src/gui/data.py | 129 ++++++++++++++++++++ src/{ => gui}/design.kv | 0 src/{ => gui}/main.py | 0 src/{ => gui}/superscript.py | 0 10 files changed, 389 insertions(+), 2 deletions(-) create mode 100644 src/cli/cli_interface.py rename src/{ => cli}/data.py (100%) create mode 100644 src/cli/superscript.py rename src/{ => cli}/superscript.spec (100%) create mode 100644 src/gui/data.py rename src/{ => gui}/design.kv (100%) rename src/{ => gui}/main.py (100%) rename src/{ => gui}/superscript.py (100%) diff --git a/build/build-CLI.bat b/build/build-CLI.bat index 522e5fc..22dd93c 100644 --- a/build/build-CLI.bat +++ b/build/build-CLI.bat @@ -1,4 +1,4 @@ -set pathtospec="../src/superscript.spec" +set pathtospec="../src/cli/superscript.spec" set pathtodist="../dist/" set pathtowork="temp/" diff --git a/build/build-CLI.sh b/build/build-CLI.sh index 9837f2a..302265b 100644 --- a/build/build-CLI.sh +++ b/build/build-CLI.sh @@ -1,4 +1,4 @@ -pathtospec="../src/superscript.spec" +pathtospec="../src/cli/superscript.spec" pathtodist="../dist/" pathtowork="temp/" diff --git a/src/cli/cli_interface.py b/src/cli/cli_interface.py new file mode 100644 index 0000000..9fd5765 --- /dev/null +++ b/src/cli/cli_interface.py @@ -0,0 +1,44 @@ +import sys +import time +from os import system, name +import platform + +empty_delim = " " +hard_divided_delim = "|" +soft_divided_delim = ":" +l_brack = "[" +r_brack = "]" + +ERR = "[ERR]" +INF = "[INF]" + +stdout = sys.stdout +stderr = sys.stderr + +def log(target, level, message, code = 0): + + message = time.ctime() + empty_delim + str(level) + l_brack + str(code) + r_brack + empty_delim + soft_divided_delim + empty_delim + message + print(message, file = target) + +def clear(): + if name == "nt": + system("cls") + else: + system("clear") + +def splash(version): + + def hrule(): + print("#"+38*"-"+"#") + def box(s): + temp = "|" + temp += s + temp += (40-len(s)-2)*" " + temp += "|" + print(temp) + + hrule() + box(" superscript version: " + version) + box(" os: " + platform.system()) + box(" python: " + platform.python_version()) + hrule() \ No newline at end of file diff --git a/src/data.py b/src/cli/data.py similarity index 100% rename from src/data.py rename to src/cli/data.py diff --git a/src/cli/superscript.py b/src/cli/superscript.py new file mode 100644 index 0000000..dfd0151 --- /dev/null +++ b/src/cli/superscript.py @@ -0,0 +1,214 @@ +# Titan Robotics Team 2022: Superscript Script +# Written by Arthur Lu, Jacob Levine, and Dev Singh +# Notes: +# setup: + +__version__ = "0.8.6" + +# changelog should be viewed using print(analysis.__changelog__) +__changelog__ = """changelog: + 0.8.6: + - added proper main function + 0.8.5: + - added more gradeful KeyboardInterrupt exiting + - redirected stderr to errorlog.txt + 0.8.4: + - added better error message for missing config.json + - added automatic config.json creation + - added splash text with version and system info + 0.8.3: + - updated matchloop with new regression format (requires tra_analysis 3.x) + 0.8.2: + - readded while true to main function + - added more thread config options + 0.8.1: + - optimized matchloop further by bypassing GIL + 0.8.0: + - added multithreading to matchloop + - tweaked user log + 0.7.0: + - finished implementing main function + 0.6.2: + - integrated get_team_rankings.py as get_team_metrics() function + - integrated visualize_pit.py as graph_pit_histogram() function + 0.6.1: + - bug fixes with analysis.Metric() calls + - modified metric functions to use config.json defined default values + 0.6.0: + - removed main function + - changed load_config function + - added save_config function + - added load_match function + - renamed simpleloop to matchloop + - moved simplestats function inside matchloop + - renamed load_metrics to load_metric + - renamed metricsloop to metricloop + - split push to database functions amon push_match, push_metric, push_pit + - moved + 0.5.2: + - made changes due to refactoring of analysis + 0.5.1: + - text fixes + - removed matplotlib requirement + 0.5.0: + - improved user interface + 0.4.2: + - removed unessasary code + 0.4.1: + - fixed bug where X range for regression was determined before sanitization + - better sanitized data + 0.4.0: + - fixed spelling issue in __changelog__ + - addressed nan bug in regression + - fixed errors on line 335 with metrics calling incorrect key "glicko2" + - fixed errors in metrics computing + 0.3.0: + - added analysis to pit data + 0.2.1: + - minor stability patches + - implemented db syncing for timestamps + - fixed bugs + 0.2.0: + - finalized testing and small fixes + 0.1.4: + - finished metrics implement, trueskill is bugged + 0.1.3: + - working + 0.1.2: + - started implement of metrics + 0.1.1: + - cleaned up imports + 0.1.0: + - tested working, can push to database + 0.0.9: + - tested working + - prints out stats for the time being, will push to database later + 0.0.8: + - added data import + - removed tba import + - finished main method + 0.0.7: + - added load_config + - optimized simpleloop for readibility + - added __all__ entries + - added simplestats engine + - pending testing + 0.0.6: + - fixes + 0.0.5: + - imported pickle + - created custom database object + 0.0.4: + - fixed simpleloop to actually return a vector + 0.0.3: + - added metricsloop which is unfinished + 0.0.2: + - added simpleloop which is untested until data is provided + 0.0.1: + - created script + - added analysis, numba, numpy imports +""" + +__author__ = ( + "Arthur Lu ", + "Jacob Levine ", +) + +__all__ = [ + "load_config", + "save_config", + "get_previous_time", + "load_match", + "matchloop", + "load_metric", + "metricloop", + "load_pit", + "pitloop", + "push_match", + "push_metric", + "push_pit", +] + +# imports: + +import json + +from cli_interface import splash, log, ERR, INF, stdout, stderr + +config_path = "config.json" +sample_json = """{ + "max-threads": 0.5, + "team": "", + "competition": "2020ilch", + "key":{ + "database":"", + "tba":"" + }, + "statistics":{ + "match":{ + "balls-blocked":["basic_stats","historical_analysis","regression_linear","regression_logarithmic","regression_exponential","regression_polynomial","regression_sigmoidal"], + "balls-collected":["basic_stats","historical_analysis","regression_linear","regression_logarithmic","regression_exponential","regression_polynomial","regression_sigmoidal"], + "balls-lower-teleop":["basic_stats","historical_analysis","regression_linear","regression_logarithmic","regression_exponential","regression_polynomial","regression_sigmoidal"], + "balls-lower-auto":["basic_stats","historical_analysis","regression_linear","regression_logarithmic","regression_exponential","regression_polynomial","regression_sigmoidal"], + "balls-started":["basic_stats","historical_analyss","regression_linear","regression_logarithmic","regression_exponential","regression_polynomial","regression_sigmoidal"], + "balls-upper-teleop":["basic_stats","historical_analysis","regression_linear","regression_logarithmic","regression_exponential","regression_polynomial","regression_sigmoidal"], + "balls-upper-auto":["basic_stats","historical_analysis","regression_linear","regression_logarithmic","regression_exponential","regression_polynomial","regression_sigmoidal"] + + }, + "metric":{ + "elo":{ + "score":1500, + "N":400, + "K":24 + }, + "gl2":{ + "score":1500, + "rd":250, + "vol":0.06 + }, + "ts":{ + "mu":25, + "sigma":8.33 + } + }, + "pit":{ + "wheel-mechanism":true, + "low-balls":true, + "high-balls":true, + "wheel-success":true, + "strategic-focus":true, + "climb-mechanism":true, + "attitude":true + } + } +}""" + +def main(): + + splash(__version__) + +def load_config(config_vector): + try: + f = open(path, "r") + except: + log(stderr, ERR, "could not find config at <" + path + ">, generating blank config and exiting") + f = open(path, "w") + f.write(sample_json) + f.close() + return 1 + + config_vector = json.load(f) + f.close() + return 0 + +def save_config(path, config_vector): + try: + f = open(path) + json.dump(config_vector) + f.close() + return 0 + except: + return 1 + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/src/superscript.spec b/src/cli/superscript.spec similarity index 100% rename from src/superscript.spec rename to src/cli/superscript.spec diff --git a/src/gui/data.py b/src/gui/data.py new file mode 100644 index 0000000..641aba7 --- /dev/null +++ b/src/gui/data.py @@ -0,0 +1,129 @@ +import requests +import pymongo +import pandas as pd +import time + +def pull_new_tba_matches(apikey, competition, cutoff): + api_key= apikey + x=requests.get("https://www.thebluealliance.com/api/v3/event/"+competition+"/matches/simple", headers={"X-TBA-Auth_Key":api_key}) + out = [] + for i in x.json(): + if i["actual_time"] != None and i["actual_time"]-cutoff >= 0 and i["comp_level"] == "qm": + out.append({"match" : i['match_number'], "blue" : list(map(lambda x: int(x[3:]), i['alliances']['blue']['team_keys'])), "red" : list(map(lambda x: int(x[3:]), i['alliances']['red']['team_keys'])), "winner": i["winning_alliance"]}) + return out + +def get_team_match_data(apikey, competition, team_num): + client = pymongo.MongoClient(apikey) + db = client.data_scouting + mdata = db.matchdata + out = {} + for i in mdata.find({"competition" : competition, "team_scouted": team_num}): + out[i['match']] = i['data'] + return pd.DataFrame(out) + +def get_team_pit_data(apikey, competition, team_num): + client = pymongo.MongoClient(apikey) + db = client.data_scouting + mdata = db.pitdata + out = {} + return mdata.find_one({"competition" : competition, "team_scouted": team_num})["data"] + +def get_team_metrics_data(apikey, competition, team_num): + client = pymongo.MongoClient(apikey) + db = client.data_processing + mdata = db.team_metrics + return mdata.find_one({"competition" : competition, "team": team_num}) + +def get_match_data_formatted(apikey, competition): + client = pymongo.MongoClient(apikey) + db = client.data_scouting + mdata = db.teamlist + x=mdata.find_one({"competition":competition}) + out = {} + for i in x: + try: + out[int(i)] = unkeyify_2l(get_team_match_data(apikey, competition, int(i)).transpose().to_dict()) + except: + pass + return out + +def get_metrics_data_formatted(apikey, competition): + client = pymongo.MongoClient(apikey) + db = client.data_scouting + mdata = db.teamlist + x=mdata.find_one({"competition":competition}) + out = {} + for i in x: + try: + out[int(i)] = d.get_team_metrics_data(apikey, competition, int(i)) + except: + pass + return out + +def get_pit_data_formatted(apikey, competition): + client = pymongo.MongoClient(apikey) + db = client.data_scouting + mdata = db.teamlist + x=mdata.find_one({"competition":competition}) + out = {} + for i in x: + try: + out[int(i)] = get_team_pit_data(apikey, competition, int(i)) + except: + pass + return out + +def get_pit_variable_data(apikey, competition): + client = pymongo.MongoClient(apikey) + db = client.data_processing + mdata = db.team_pit + out = {} + return mdata.find() + +def get_pit_variable_formatted(apikey, competition): + temp = get_pit_variable_data(apikey, competition) + out = {} + for i in temp: + out[i["variable"]] = i["data"] + return out + +def push_team_tests_data(apikey, competition, team_num, data, dbname = "data_processing", colname = "team_tests"): + client = pymongo.MongoClient(apikey) + db = client[dbname] + mdata = db[colname] + mdata.replace_one({"competition" : competition, "team": team_num}, {"_id": competition+str(team_num)+"am", "competition" : competition, "team" : team_num, "data" : data}, True) + +def push_team_metrics_data(apikey, competition, team_num, data, dbname = "data_processing", colname = "team_metrics"): + client = pymongo.MongoClient(apikey) + db = client[dbname] + mdata = db[colname] + mdata.replace_one({"competition" : competition, "team": team_num}, {"_id": competition+str(team_num)+"am", "competition" : competition, "team" : team_num, "metrics" : data}, True) + +def push_team_pit_data(apikey, competition, variable, data, dbname = "data_processing", colname = "team_pit"): + client = pymongo.MongoClient(apikey) + db = client[dbname] + mdata = db[colname] + mdata.replace_one({"competition" : competition, "variable": variable}, {"competition" : competition, "variable" : variable, "data" : data}, True) + +def get_analysis_flags(apikey, flag): + client = pymongo.MongoClient(apikey) + db = client.data_processing + mdata = db.flags + return mdata.find_one({flag:{"$exists":True}}) + +def set_analysis_flags(apikey, flag, data): + client = pymongo.MongoClient(apikey) + db = client.data_processing + mdata = db.flags + return mdata.replace_one({flag:{"$exists":True}}, data, True) + +def unkeyify_2l(layered_dict): + out = {} + for i in layered_dict.keys(): + add = [] + sortkey = [] + for j in layered_dict[i].keys(): + add.append([j,layered_dict[i][j]]) + add.sort(key = lambda x: x[0]) + out[i] = list(map(lambda x: x[1], add)) + return out \ No newline at end of file diff --git a/src/design.kv b/src/gui/design.kv similarity index 100% rename from src/design.kv rename to src/gui/design.kv diff --git a/src/main.py b/src/gui/main.py similarity index 100% rename from src/main.py rename to src/gui/main.py diff --git a/src/superscript.py b/src/gui/superscript.py similarity index 100% rename from src/superscript.py rename to src/gui/superscript.py From b3c26ce2cfc4cb04e776b4a58f06d94883d9dea0 Mon Sep 17 00:00:00 2001 From: Arthur Lu Date: Sat, 12 Jun 2021 07:16:11 +0000 Subject: [PATCH 02/29] renamed cli_interface.py to interface.py Former-commit-id: b94c9125b37bb348a523951f6f6977522da1ab7c --- src/cli/{cli_interface.py => interface.py} | 0 src/cli/superscript.py | 8 ++++++-- 2 files changed, 6 insertions(+), 2 deletions(-) rename src/cli/{cli_interface.py => interface.py} (100%) diff --git a/src/cli/cli_interface.py b/src/cli/interface.py similarity index 100% rename from src/cli/cli_interface.py rename to src/cli/interface.py diff --git a/src/cli/superscript.py b/src/cli/superscript.py index dfd0151..09ef513 100644 --- a/src/cli/superscript.py +++ b/src/cli/superscript.py @@ -3,10 +3,14 @@ # Notes: # setup: -__version__ = "0.8.6" +__version__ = "0.9.0" # changelog should be viewed using print(analysis.__changelog__) __changelog__ = """changelog: + 0.9.0: + - moved printing and logging related functions to interface.py (changelog will stay in this file) + - changed function return files for load_config and save_config to standard C values (0 for success, 1 for error) + - added local variables for config location 0.8.6: - added proper main function 0.8.5: @@ -187,7 +191,7 @@ def main(): splash(__version__) -def load_config(config_vector): +def load_config(path, config_vector): try: f = open(path, "r") except: From 4c65e88903323afe2ef75e2eede861d7b57ad098 Mon Sep 17 00:00:00 2001 From: Arthur Lu Date: Sat, 12 Jun 2021 23:57:16 +0000 Subject: [PATCH 03/29] finished refactor Former-commit-id: b2f2dfe2a4989da5c62c2895eae73e9000760901 --- src/cli/dataset.py | 74 +++++++++++++++ src/cli/interface.py | 4 +- src/cli/processing.py | 191 ++++++++++++++++++++++++++++++++++++++ src/cli/superscript.py | 204 +++++++++++++++++++++++++++++++++++++---- 4 files changed, 455 insertions(+), 18 deletions(-) create mode 100644 src/cli/dataset.py create mode 100644 src/cli/processing.py diff --git a/src/cli/dataset.py b/src/cli/dataset.py new file mode 100644 index 0000000..5a69cc9 --- /dev/null +++ b/src/cli/dataset.py @@ -0,0 +1,74 @@ +import data as d + +def get_previous_time(apikey): + + previous_time = d.get_analysis_flags(apikey, "latest_update") + + if previous_time == None: + + d.set_analysis_flags(apikey, "latest_update", 0) + previous_time = 0 + + else: + + previous_time = previous_time["latest_update"] + + return previous_time + +def set_current_time(apikey, current_time): + + d.set_analysis_flags(apikey, "latest_update", {"latest_update":current_time}) + +def load_match(apikey, competition): + + return d.get_match_data_formatted(apikey, competition) + +def load_metric(apikey, competition, match, group_name, metrics): + + group = {} + + for team in match[group_name]: + + db_data = d.get_team_metrics_data(apikey, competition, team) + + if d.get_team_metrics_data(apikey, competition, team) == None: + + elo = {"score": metrics["elo"]["score"]} + gl2 = {"score": metrics["gl2"]["score"], "rd": metrics["gl2"]["rd"], "vol": metrics["gl2"]["vol"]} + ts = {"mu": metrics["ts"]["mu"], "sigm+a": metrics["ts"]["sigma"]} + + group[team] = {"elo": elo, "gl2": gl2, "ts": ts} + + else: + + metrics = db_data["metrics"] + + elo = metrics["elo"] + gl2 = metrics["gl2"] + ts = metrics["ts"] + + group[team] = {"elo": elo, "gl2": gl2, "ts": ts} + + return group + +def load_pit(apikey, competition): + + return d.get_pit_data_formatted(apikey, competition) + +def push_match(apikey, competition, results): + + for team in results: + + d.push_team_tests_data(apikey, competition, team, results[team]) + +def push_metric(apikey, competition, metric): + + for team in metric: + + d.push_team_metrics_data(apikey, competition, team, metric[team]) + +def push_pit(apikey, competition, pit): + + for variable in pit: + + d.push_team_pit_data(apikey, competition, variable, pit[variable]) \ No newline at end of file diff --git a/src/cli/interface.py b/src/cli/interface.py index 9fd5765..9ee821a 100644 --- a/src/cli/interface.py +++ b/src/cli/interface.py @@ -5,7 +5,7 @@ import platform empty_delim = " " hard_divided_delim = "|" -soft_divided_delim = ":" +soft_divided_delim = "|" l_brack = "[" r_brack = "]" @@ -17,7 +17,7 @@ stderr = sys.stderr def log(target, level, message, code = 0): - message = time.ctime() + empty_delim + str(level) + l_brack + str(code) + r_brack + empty_delim + soft_divided_delim + empty_delim + message + message = time.ctime() + empty_delim + str(level) + l_brack + f"{code:04}" + r_brack + empty_delim + soft_divided_delim + empty_delim + message print(message, file = target) def clear(): diff --git a/src/cli/processing.py b/src/cli/processing.py new file mode 100644 index 0000000..1f7e322 --- /dev/null +++ b/src/cli/processing.py @@ -0,0 +1,191 @@ +import numpy as np + +from tra_analysis import Analysis as an +from dataset import push_metric +from data import pull_new_tba_matches + +def simplestats(data_test): + + data = np.array(data_test[0]) + data = data[np.isfinite(data)] + ranges = list(range(len(data))) + + test = data_test[1] + + if test == "basic_stats": + return an.basic_stats(data) + + if test == "historical_analysis": + return an.histo_analysis([ranges, data]) + + if test == "regression_linear": + return an.regression(ranges, data, ['lin']) + + if test == "regression_logarithmic": + return an.regression(ranges, data, ['log']) + + if test == "regression_exponential": + return an.regression(ranges, data, ['exp']) + + if test == "regression_polynomial": + return an.regression(ranges, data, ['ply']) + + if test == "regression_sigmoidal": + return an.regression(ranges, data, ['sig']) + +def matchloop(apikey, competition, data, tests, exec_threads): + + short_mapping = {"regression_linear": "lin", "regression_logarithmic": "log", "regression_exponential": "exp", "regression_polynomial": "ply", "regression_sigmoidal": "sig"} + + class AutoVivification(dict): + def __getitem__(self, item): + try: + return dict.__getitem__(self, item) + except KeyError: + value = self[item] = type(self)() + return value + + return_vector = {} + + team_filtered = [] + variable_filtered = [] + variable_data = [] + test_filtered = [] + result_filtered = [] + return_vector = AutoVivification() + + for team in data: + + for variable in data[team]: + + if variable in tests: + + for test in tests[variable]: + + team_filtered.append(team) + variable_filtered.append(variable) + variable_data.append((data[team][variable], test)) + test_filtered.append(test) + + result_filtered = exec_threads.map(simplestats, variable_data) + i = 0 + + result_filtered = list(result_filtered) + + for result in result_filtered: + + filtered = test_filtered[i] + + try: + short = short_mapping[filtered] + return_vector[team_filtered[i]][variable_filtered[i]][test_filtered[i]] = result[short] + except KeyError: # not in mapping + return_vector[team_filtered[i]][variable_filtered[i]][test_filtered[i]] = result + i += 1 + + return return_vector + +def metricloop(tbakey, apikey, competition, timestamp, metrics): # listener based metrics update + + elo_N = metrics["elo"]["N"] + elo_K = metrics["elo"]["K"] + + matches = pull_new_tba_matches(tbakey, competition, timestamp) + + red = {} + blu = {} + + for match in matches: + + red = load_metric(apikey, competition, match, "red", metrics) + blu = load_metric(apikey, competition, match, "blue", metrics) + + elo_red_total = 0 + elo_blu_total = 0 + + gl2_red_score_total = 0 + gl2_blu_score_total = 0 + + gl2_red_rd_total = 0 + gl2_blu_rd_total = 0 + + gl2_red_vol_total = 0 + gl2_blu_vol_total = 0 + + for team in red: + + elo_red_total += red[team]["elo"]["score"] + + gl2_red_score_total += red[team]["gl2"]["score"] + gl2_red_rd_total += red[team]["gl2"]["rd"] + gl2_red_vol_total += red[team]["gl2"]["vol"] + + for team in blu: + + elo_blu_total += blu[team]["elo"]["score"] + + gl2_blu_score_total += blu[team]["gl2"]["score"] + gl2_blu_rd_total += blu[team]["gl2"]["rd"] + gl2_blu_vol_total += blu[team]["gl2"]["vol"] + + red_elo = {"score": elo_red_total / len(red)} + blu_elo = {"score": elo_blu_total / len(blu)} + + red_gl2 = {"score": gl2_red_score_total / len(red), "rd": gl2_red_rd_total / len(red), "vol": gl2_red_vol_total / len(red)} + blu_gl2 = {"score": gl2_blu_score_total / len(blu), "rd": gl2_blu_rd_total / len(blu), "vol": gl2_blu_vol_total / len(blu)} + + + if match["winner"] == "red": + + observations = {"red": 1, "blu": 0} + + elif match["winner"] == "blue": + + observations = {"red": 0, "blu": 1} + + else: + + observations = {"red": 0.5, "blu": 0.5} + + red_elo_delta = an.Metric().elo(red_elo["score"], blu_elo["score"], observations["red"], elo_N, elo_K) - red_elo["score"] + blu_elo_delta = an.Metric().elo(blu_elo["score"], red_elo["score"], observations["blu"], elo_N, elo_K) - blu_elo["score"] + + new_red_gl2_score, new_red_gl2_rd, new_red_gl2_vol = an.Metric().glicko2(red_gl2["score"], red_gl2["rd"], red_gl2["vol"], [blu_gl2["score"]], [blu_gl2["rd"]], [observations["red"], observations["blu"]]) + new_blu_gl2_score, new_blu_gl2_rd, new_blu_gl2_vol = an.Metric().glicko2(blu_gl2["score"], blu_gl2["rd"], blu_gl2["vol"], [red_gl2["score"]], [red_gl2["rd"]], [observations["blu"], observations["red"]]) + + red_gl2_delta = {"score": new_red_gl2_score - red_gl2["score"], "rd": new_red_gl2_rd - red_gl2["rd"], "vol": new_red_gl2_vol - red_gl2["vol"]} + blu_gl2_delta = {"score": new_blu_gl2_score - blu_gl2["score"], "rd": new_blu_gl2_rd - blu_gl2["rd"], "vol": new_blu_gl2_vol - blu_gl2["vol"]} + + for team in red: + + red[team]["elo"]["score"] = red[team]["elo"]["score"] + red_elo_delta + + red[team]["gl2"]["score"] = red[team]["gl2"]["score"] + red_gl2_delta["score"] + red[team]["gl2"]["rd"] = red[team]["gl2"]["rd"] + red_gl2_delta["rd"] + red[team]["gl2"]["vol"] = red[team]["gl2"]["vol"] + red_gl2_delta["vol"] + + for team in blu: + + blu[team]["elo"]["score"] = blu[team]["elo"]["score"] + blu_elo_delta + + blu[team]["gl2"]["score"] = blu[team]["gl2"]["score"] + blu_gl2_delta["score"] + blu[team]["gl2"]["rd"] = blu[team]["gl2"]["rd"] + blu_gl2_delta["rd"] + blu[team]["gl2"]["vol"] = blu[team]["gl2"]["vol"] + blu_gl2_delta["vol"] + + temp_vector = {} + temp_vector.update(red) + temp_vector.update(blu) + + push_metric(apikey, competition, temp_vector) + +def pitloop(apikey, competition, pit, tests): + + return_vector = {} + for team in pit: + for variable in pit[team]: + if variable in tests: + if not variable in return_vector: + return_vector[variable] = [] + return_vector[variable].append(pit[team][variable]) + + return return_vector \ No newline at end of file diff --git a/src/cli/superscript.py b/src/cli/superscript.py index 09ef513..0cbe4a0 100644 --- a/src/cli/superscript.py +++ b/src/cli/superscript.py @@ -11,6 +11,8 @@ __changelog__ = """changelog: - moved printing and logging related functions to interface.py (changelog will stay in this file) - changed function return files for load_config and save_config to standard C values (0 for success, 1 for error) - added local variables for config location + - moved dataset getting and setting functions to dataset.py (changelog will stay in this file) + - moved matchloop, metricloop, pitloop and helper functions (simplestats) to processing.py 0.8.6: - added proper main function 0.8.5: @@ -121,23 +123,22 @@ __author__ = ( __all__ = [ "load_config", "save_config", - "get_previous_time", - "load_match", - "matchloop", - "load_metric", - "metricloop", - "load_pit", - "pitloop", - "push_match", - "push_metric", - "push_pit", ] # imports: import json +import multiprocessing +import os +import math +from multiprocessing import Pool +import time +import warnings +import sys -from cli_interface import splash, log, ERR, INF, stdout, stderr +from interface import splash, log, ERR, INF, stdout, stderr +from dataset import get_previous_time, set_current_time, load_match, push_match, load_metric, push_metric, load_pit, push_pit +from processing import matchloop, metricloop, pitloop config_path = "config.json" sample_json = """{ @@ -189,21 +190,190 @@ sample_json = """{ def main(): + warnings.filterwarnings("ignore") + sys.stderr = open("errorlog.txt", "w") + splash(__version__) + loop_exit_code = 0 + loop_stored_exception = None + + while True: + + try: + + loop_start = time.time() + + current_time = time.time() + log(stdout, INF, "current time: " + str(current_time)) + + config = {} + if load_config(config_path, config) == 1: + exit(1) + + error_flag = False + + try: + competition = config["competition"] + except: + log(stderr, ERR, "could not find competition field in config", code = 101) + error_flag = True + try: + match_tests = config["statistics"]["match"] + except: + log(stderr, ERR, "could not find match_tests field in config", code = 102) + error_flag = True + try: + metrics_tests = config["statistics"]["metric"] + except: + log(stderr, ERR, "could not find metrics_tests field in config", code = 103) + error_flag = True + try: + pit_tests = config["statistics"]["pit"] + except: + log(stderr, ERR, "could not find pit_tests field in config", code = 104) + error_flag = True + + if error_flag: + exit(1) + error_flag = False + + if competition == None or competition == "": + log(stderr, ERR, "competition field in config must not be empty", code = 105) + error_flag = True + if match_tests == None: + log(stderr, ERR, "match_tests field in config must not be empty", code = 106) + error_flag = True + if metrics_tests == None: + log(stderr, ERR, "metrics_tests field in config must not be empty", code = 107) + error_flag = True + if pit_tests == None: + log(stderr, ERR, "pit_tests field in config must not be empty", code = 108) + error_flag = True + + if error_flag: + exit(1) + + log(stdout, INF, "found and loaded competition, match_tests, metrics_tests, pit_tests from config") + + sys_max_threads = os.cpu_count() + try: + cfg_max_threads = config["max-threads"] + except: + log(stderr, ERR, "max-threads field in config must not be empty, refer to documentation for configuration options", code = 109) + exit(1) + + if cfg_max_threads > -sys_max_threads and cfg_max_threads < 0 : + alloc_processes = sys_max_threads + cfg_max_threads + elif cfg_max_threads > 0 and cfg_max_threads < 1: + alloc_processes = math.floor(cfg_max_threads * sys_max_threads) + elif cfg_max_threads > 1 and cfg_max_threads <= sys_max_threads: + alloc_processes = cfg_max_threads + elif cfg_max_threads == 0: + alloc_processes = sys_max_threads + else: + log(stderr, ERR, "max-threads must be between -" + str(sys_max_threads) + " and " + str(sys_max_threads) + ", but got " + cfg_max_threads, code = 110) + exit(1) + + log(stdout, INF, "found and loaded max-threads from config") + log(stdout, INF, "attempting to start " + str(alloc_processes) + " threads") + try: + exec_threads = Pool(processes = alloc_processes) + except Exception as e: + log(stderr, ERR, "unable to start threads", code = 200) + log(stderr, INF, e) + exit(1) + log(stdout, INF, "successfully initialized " + str(alloc_processes) + " threads") + + exit_flag = False + + try: + apikey = config["key"]["database"] + except: + log(stderr, ERR, "database key field in config must not be empty, please populate the database key", code = 111) + exit_flag = True + try: + tbakey = config["key"]["tba"] + except: + log(stderr, ERR, "tba key field in config must not be empty, please populate the tba key", code = 112) + exit_flag = True + + if exit_flag: + exit(1) + + log(stdout, INF, "found and loaded database and tba keys") + + previous_time = get_previous_time(apikey) + log(stdout, INF, "analysis backtimed to: " + str(previous_time)) + + start = time.time() + log(stdout, INF, "loading match data") + match_data = load_match(apikey, competition) + log(stdout, INF, "finished loading match data in " + str(time.time() - start) + " seconds") + + start = time.time() + log(stdout, INF, "performing analysis on match data") + results = matchloop(apikey, competition, match_data, match_tests, exec_threads) + log(stdout, INF, "finished match analysis in " + str(time.time() - start) + " seconds") + + start = time.time() + log(stdout, INF, "uploading match results to database") + push_match(apikey, competition, results) + log(stdout, INF, "finished uploading match results in " + str(time.time() - start) + " seconds") + + start = time.time() + log(stdout, INF, "performing analysis on team metrics") + results = metricloop(tbakey, apikey, competition, current_time, metrics_tests) + log(stdout, INF, "finished metric analysis and pushed to database in " + str(time.time() - start) + " seconds") + + start = time.time() + log(stdout, INF, "loading pit data") + pit_data = load_pit(apikey, competition) + log(stdout, INF, "finished loading pit data in " + str(time.time() - start) + " seconds") + + start = time.time() + log(stdout, INF, "performing analysis on pit data") + results = pitloop(apikey, competition, pit_data, pit_tests) + log(stdout, INF, "finished pit analysis in " + str(time.time() - start) + " seconds") + + start = time.time() + log(stdout, INF, "uploading pit results to database") + push_pit(apikey, competition, results) + log(stdout, INF, "finished uploading pit results in " + str(time.time() - start) + " seconds") + + set_current_time(apikey, current_time) + log(stdout, INF, "finished all tests in " + str(time.time() - loop_start) + " seconds, looping") + + except KeyboardInterrupt: + log(stdout, INF, "detected KeyboardInterrupt, killing threads") + if "exec_threads" in locals(): + exec_threads.terminate() + exec_threads.close() + log(stdout, INF, "terminated threads, exiting") + loop_stored_exception = sys.exc_info() + loop_exit_code = 0 + break + except Exception as e: + log(stderr, ERR, "encountered an exception while running") + print(e, file = stderr) + loop_exit_code = 1 + break + + sys.exit(loop_exit_code) + def load_config(path, config_vector): try: f = open(path, "r") + config_vector.update(json.load(f)) + f.close() + log(stdout, INF, "found and opened config at <" + path + ">") + return 0 except: - log(stderr, ERR, "could not find config at <" + path + ">, generating blank config and exiting") + log(stderr, ERR, "could not find config at <" + path + ">, generating blank config and exiting", code = 100) f = open(path, "w") f.write(sample_json) f.close() return 1 - - config_vector = json.load(f) - f.close() - return 0 def save_config(path, config_vector): try: @@ -215,4 +385,6 @@ def save_config(path, config_vector): return 1 if __name__ == "__main__": + if sys.platform.startswith("win"): + multiprocessing.freeze_support() main() \ No newline at end of file From 3c7262498c049cc6666add57ced1d7f534059f46 Mon Sep 17 00:00:00 2001 From: Arthur Lu Date: Sun, 13 Jun 2021 00:49:04 +0000 Subject: [PATCH 04/29] superscript v 0.9.1 Former-commit-id: 246efd524bd0536150858703180ac8a77f1b0587 --- src/cli/superscript.py | 29 +++++++++++++++++++---------- 1 file changed, 19 insertions(+), 10 deletions(-) diff --git a/src/cli/superscript.py b/src/cli/superscript.py index 0cbe4a0..1ad9058 100644 --- a/src/cli/superscript.py +++ b/src/cli/superscript.py @@ -3,10 +3,12 @@ # Notes: # setup: -__version__ = "0.9.0" +__version__ = "0.9.1" # changelog should be viewed using print(analysis.__changelog__) __changelog__ = """changelog: + 0.9.1: + - fixed bugs in configuration item loading exception handling 0.9.0: - moved printing and logging related functions to interface.py (changelog will stay in this file) - changed function return files for load_config and save_config to standard C values (0 for success, 1 for error) @@ -209,7 +211,7 @@ def main(): config = {} if load_config(config_path, config) == 1: - exit(1) + sys.exit(1) error_flag = False @@ -235,7 +237,7 @@ def main(): error_flag = True if error_flag: - exit(1) + sys.exit(1) error_flag = False if competition == None or competition == "": @@ -252,7 +254,7 @@ def main(): error_flag = True if error_flag: - exit(1) + sys.exit(1) log(stdout, INF, "found and loaded competition, match_tests, metrics_tests, pit_tests from config") @@ -261,7 +263,7 @@ def main(): cfg_max_threads = config["max-threads"] except: log(stderr, ERR, "max-threads field in config must not be empty, refer to documentation for configuration options", code = 109) - exit(1) + sys.exit(1) if cfg_max_threads > -sys_max_threads and cfg_max_threads < 0 : alloc_processes = sys_max_threads + cfg_max_threads @@ -273,7 +275,7 @@ def main(): alloc_processes = sys_max_threads else: log(stderr, ERR, "max-threads must be between -" + str(sys_max_threads) + " and " + str(sys_max_threads) + ", but got " + cfg_max_threads, code = 110) - exit(1) + sys.exit(1) log(stdout, INF, "found and loaded max-threads from config") log(stdout, INF, "attempting to start " + str(alloc_processes) + " threads") @@ -282,7 +284,7 @@ def main(): except Exception as e: log(stderr, ERR, "unable to start threads", code = 200) log(stderr, INF, e) - exit(1) + sys.exit(1) log(stdout, INF, "successfully initialized " + str(alloc_processes) + " threads") exit_flag = False @@ -290,16 +292,23 @@ def main(): try: apikey = config["key"]["database"] except: - log(stderr, ERR, "database key field in config must not be empty, please populate the database key", code = 111) + log(stderr, ERR, "database key field in config must be present", code = 111) exit_flag = True try: tbakey = config["key"]["tba"] except: - log(stderr, ERR, "tba key field in config must not be empty, please populate the tba key", code = 112) + log(stderr, ERR, "tba key field in config must be present", code = 112) + exit_flag = True + + if apikey == None or apikey == "": + log(stderr, ERR, "database key field in config must not be empty, please populate the database key") + exit_flag = True + if tbakey == None or tbakey == "": + log(stderr, ERR, "tba key field in config must not be empty, please populate the tba key") exit_flag = True if exit_flag: - exit(1) + sys.exit(1) log(stdout, INF, "found and loaded database and tba keys") From 7e800c9004ddb94748cace28c100ea1603963494 Mon Sep 17 00:00:00 2001 From: Arthur Lu Date: Fri, 16 Jul 2021 06:12:26 +0000 Subject: [PATCH 05/29] added Pool.join to allow threads to exit safely added keyboard interrupt signal ignore in threads Former-commit-id: 836e9ca6be56adffffce9be0ac4cceaed08e6918 --- src/cli/processing.py | 4 ++++ src/cli/superscript.py | 1 + 2 files changed, 5 insertions(+) diff --git a/src/cli/processing.py b/src/cli/processing.py index 1f7e322..3778926 100644 --- a/src/cli/processing.py +++ b/src/cli/processing.py @@ -4,8 +4,12 @@ from tra_analysis import Analysis as an from dataset import push_metric from data import pull_new_tba_matches +import signal + def simplestats(data_test): + signal.signal(signal.SIGINT, signal.SIG_IGN) + data = np.array(data_test[0]) data = data[np.isfinite(data)] ranges = list(range(len(data))) diff --git a/src/cli/superscript.py b/src/cli/superscript.py index 1ad9058..2fe295a 100644 --- a/src/cli/superscript.py +++ b/src/cli/superscript.py @@ -357,6 +357,7 @@ def main(): log(stdout, INF, "detected KeyboardInterrupt, killing threads") if "exec_threads" in locals(): exec_threads.terminate() + exec_threads.join() exec_threads.close() log(stdout, INF, "terminated threads, exiting") loop_stored_exception = sys.exc_info() From 5d95913467e0a79c232f6c1fd5617971148eee49 Mon Sep 17 00:00:00 2001 From: Arthur Lu Date: Wed, 11 Aug 2021 00:33:06 +0000 Subject: [PATCH 06/29] removed duplicate requirements Former-commit-id: f258a768bee1fb81a6baccfd7b2576a66514d946 --- src/requirements.txt | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/requirements.txt b/src/requirements.txt index a3d30e7..f99f4cd 100644 --- a/src/requirements.txt +++ b/src/requirements.txt @@ -5,8 +5,6 @@ tra-analysis dnspython pyinstaller -requests -pymongo numpy scipy From fb2ea60fea879df0ae185567b042f5c24273ad31 Mon Sep 17 00:00:00 2001 From: Arthur Lu Date: Wed, 11 Aug 2021 18:10:04 +0000 Subject: [PATCH 07/29] started socketing superscript statuses Former-commit-id: a703fe486757989822960471a7181c8b72afe9fc --- src/cli/socket-test.html | 20 +++ src/cli/superscript-socket.py | 275 ++++++++++++++++++++++++++++++++++ src/cli/time-test.py | 20 +++ 3 files changed, 315 insertions(+) create mode 100644 src/cli/socket-test.html create mode 100644 src/cli/superscript-socket.py create mode 100644 src/cli/time-test.py diff --git a/src/cli/socket-test.html b/src/cli/socket-test.html new file mode 100644 index 0000000..98f28a1 --- /dev/null +++ b/src/cli/socket-test.html @@ -0,0 +1,20 @@ + + + + WebSocket demo + + + + + \ No newline at end of file diff --git a/src/cli/superscript-socket.py b/src/cli/superscript-socket.py new file mode 100644 index 0000000..b87f636 --- /dev/null +++ b/src/cli/superscript-socket.py @@ -0,0 +1,275 @@ +# testing purposes only, not to be used or run + +import json +import multiprocessing +import os +import math +from multiprocessing import Pool +import time +import warnings +import sys +import asyncio +import websockets + +from interface import splash, log, ERR, INF, stdout, stderr +from dataset import get_previous_time, set_current_time, load_match, push_match, load_metric, push_metric, load_pit, push_pit +from processing import matchloop, metricloop, pitloop + +config_path = "config.json" +sample_json = """{ + "max-threads": 0.5, + "team": "", + "competition": "2020ilch", + "key":{ + "database":"", + "tba":"" + }, + "statistics":{ + "match":{ + "balls-blocked":["basic_stats","historical_analysis","regression_linear","regression_logarithmic","regression_exponential","regression_polynomial","regression_sigmoidal"], + "balls-collected":["basic_stats","historical_analysis","regression_linear","regression_logarithmic","regression_exponential","regression_polynomial","regression_sigmoidal"], + "balls-lower-teleop":["basic_stats","historical_analysis","regression_linear","regression_logarithmic","regression_exponential","regression_polynomial","regression_sigmoidal"], + "balls-lower-auto":["basic_stats","historical_analysis","regression_linear","regression_logarithmic","regression_exponential","regression_polynomial","regression_sigmoidal"], + "balls-started":["basic_stats","historical_analyss","regression_linear","regression_logarithmic","regression_exponential","regression_polynomial","regression_sigmoidal"], + "balls-upper-teleop":["basic_stats","historical_analysis","regression_linear","regression_logarithmic","regression_exponential","regression_polynomial","regression_sigmoidal"], + "balls-upper-auto":["basic_stats","historical_analysis","regression_linear","regression_logarithmic","regression_exponential","regression_polynomial","regression_sigmoidal"] + + }, + "metric":{ + "elo":{ + "score":1500, + "N":400, + "K":24 + }, + "gl2":{ + "score":1500, + "rd":250, + "vol":0.06 + }, + "ts":{ + "mu":25, + "sigma":8.33 + } + }, + "pit":{ + "wheel-mechanism":true, + "low-balls":true, + "high-balls":true, + "wheel-success":true, + "strategic-focus":true, + "climb-mechanism":true, + "attitude":true + } + } +}""" + +async def main(socket, path): + + #warnings.filterwarnings("ignore") + #sys.stderr = open("errorlog.txt", "w") + + #splash(__version__) + + #loop_exit_code = 0 + #loop_stored_exception = None + + while True: + + try: + + loop_start = time.time() + + current_time = time.time() + await socket.send("current time: " + str(current_time)) + + config = {} + if load_config(config_path, config) == 1: + sys.exit(1) + + error_flag = False + + try: + competition = config["competition"] + except: + await socket.send("could not find competition field in config") + error_flag = True + try: + match_tests = config["statistics"]["match"] + except: + await socket.send("could not find match_tests field in config") + error_flag = True + try: + metrics_tests = config["statistics"]["metric"] + except: + await socket.send("could not find metrics_tests field in config") + error_flag = True + try: + pit_tests = config["statistics"]["pit"] + except: + await socket.send("could not find pit_tests field in config") + error_flag = True + + if error_flag: + sys.exit(1) + error_flag = False + + if competition == None or competition == "": + await socket.send("competition field in config must not be empty") + error_flag = True + if match_tests == None: + await socket.send("match_tests field in config must not be empty") + error_flag = True + if metrics_tests == None: + await socket.send("metrics_tests field in config must not be empty") + error_flag = True + if pit_tests == None: + await socket.send("pit_tests field in config must not be empty") + error_flag = True + + if error_flag: + sys.exit(1) + + await socket.send("found and loaded competition, match_tests, metrics_tests, pit_tests from config") + + sys_max_threads = os.cpu_count() + try: + cfg_max_threads = config["max-threads"] + except: + await socket.send("max-threads field in config must not be empty, refer to documentation for configuration options", code = 109) + sys.exit(1) + + if cfg_max_threads > -sys_max_threads and cfg_max_threads < 0 : + alloc_processes = sys_max_threads + cfg_max_threads + elif cfg_max_threads > 0 and cfg_max_threads < 1: + alloc_processes = math.floor(cfg_max_threads * sys_max_threads) + elif cfg_max_threads > 1 and cfg_max_threads <= sys_max_threads: + alloc_processes = cfg_max_threads + elif cfg_max_threads == 0: + alloc_processes = sys_max_threads + else: + await socket.send("max-threads must be between -" + str(sys_max_threads) + " and " + str(sys_max_threads) + ", but got " + cfg_max_threads) + sys.exit(1) + + await socket.send("found and loaded max-threads from config") + await socket.send("attempting to start " + str(alloc_processes) + " threads") + try: + exec_threads = Pool(processes = alloc_processes) + except Exception as e: + await socket.send("unable to start threads") + #log(stderr, INF, e) + sys.exit(1) + await socket.send("successfully initialized " + str(alloc_processes) + " threads") + + exit_flag = False + + try: + apikey = config["key"]["database"] + except: + await socket.send("database key field in config must be present") + exit_flag = True + try: + tbakey = config["key"]["tba"] + except: + await socket.send("tba key field in config must be present") + exit_flag = True + + if apikey == None or apikey == "": + await socket.send("database key field in config must not be empty, please populate the database key") + exit_flag = True + if tbakey == None or tbakey == "": + await socket.send("tba key field in config must not be empty, please populate the tba key") + exit_flag = True + + if exit_flag: + sys.exit(1) + + await socket.send("found and loaded database and tba keys") + + previous_time = get_previous_time(apikey) + await socket.send("analysis backtimed to: " + str(previous_time)) + + start = time.time() + await socket.send("loading match data") + match_data = load_match(apikey, competition) + await socket.send("finished loading match data in " + str(time.time() - start) + " seconds") + + start = time.time() + await socket.send("performing analysis on match data") + results = matchloop(apikey, competition, match_data, match_tests, exec_threads) + await socket.send("finished match analysis in " + str(time.time() - start) + " seconds") + + start = time.time() + await socket.send("uploading match results to database") + push_match(apikey, competition, results) + await socket.send("finished uploading match results in " + str(time.time() - start) + " seconds") + + start = time.time() + await socket.send("performing analysis on team metrics") + results = metricloop(tbakey, apikey, competition, current_time, metrics_tests) + await socket.send("finished metric analysis and pushed to database in " + str(time.time() - start) + " seconds") + + start = time.time() + await socket.send("loading pit data") + pit_data = load_pit(apikey, competition) + await socket.send("finished loading pit data in " + str(time.time() - start) + " seconds") + + start = time.time() + await socket.send("performing analysis on pit data") + results = pitloop(apikey, competition, pit_data, pit_tests) + await socket.send("finished pit analysis in " + str(time.time() - start) + " seconds") + + start = time.time() + await socket.send("uploading pit results to database") + push_pit(apikey, competition, results) + await socket.send("finished uploading pit results in " + str(time.time() - start) + " seconds") + + set_current_time(apikey, current_time) + await socket.send("finished all tests in " + str(time.time() - loop_start) + " seconds, looping") + + except KeyboardInterrupt: + await socket.send("detected KeyboardInterrupt, killing threads") + if "exec_threads" in locals(): + exec_threads.terminate() + exec_threads.join() + exec_threads.close() + await socket.send("terminated threads, exiting") + loop_stored_exception = sys.exc_info() + loop_exit_code = 0 + break + except Exception as e: + await socket.send("encountered an exception while running") + print(e, file = stderr) + loop_exit_code = 1 + break + + sys.exit(loop_exit_code) + +def load_config(path, config_vector): + try: + f = open(path, "r") + config_vector.update(json.load(f)) + f.close() + #socket.send("found and opened config at <" + path + ">") + return 0 + except: + #log(stderr, ERR, "could not find config at <" + path + ">, generating blank config and exiting", code = 100) + f = open(path, "w") + f.write(sample_json) + f.close() + return 1 + +def save_config(path, config_vector): + try: + f = open(path) + json.dump(config_vector) + f.close() + return 0 + except: + return 1 + +if __name__ == "__main__": + if sys.platform.startswith("win"): + multiprocessing.freeze_support() + start_server = websockets.serve(main, "127.0.0.1", 5678) + asyncio.get_event_loop().run_until_complete(start_server) + asyncio.get_event_loop().run_forever() \ No newline at end of file diff --git a/src/cli/time-test.py b/src/cli/time-test.py new file mode 100644 index 0000000..9d2c587 --- /dev/null +++ b/src/cli/time-test.py @@ -0,0 +1,20 @@ +import asyncio +import datetime +import random +import websockets + +async def time(websocket, path): + print(path) + i = 0 + while True: + #now = datetime.datetime.utcnow().isoformat() + "Z" + #await websocket.send(now) + #await asyncio.sleep(random.random() * 3) + i += 1 + await websocket.send(str(i)) + await asyncio.sleep(1) + +start_server = websockets.serve(time, "127.0.0.1", 5678) + +asyncio.get_event_loop().run_until_complete(start_server) +asyncio.get_event_loop().run_forever() \ No newline at end of file From 19bca6967cc5ff118ae1543beb6ec2516e04a779 Mon Sep 17 00:00:00 2001 From: Arthur Lu Date: Wed, 11 Aug 2021 18:21:34 +0000 Subject: [PATCH 08/29] added websockets to requirements.txt Former-commit-id: d9ba7bcb2485c5342ac626d5b5259204568a12d0 --- src/requirements.txt | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/requirements.txt b/src/requirements.txt index f99f4cd..9833941 100644 --- a/src/requirements.txt +++ b/src/requirements.txt @@ -13,4 +13,6 @@ six pyparsing pandas -kivy==2.0.0rc2 \ No newline at end of file +kivy==2.0.0rc2 + +websockets \ No newline at end of file From 91f34a8d740e754101a924246e997be9f8f11d8a Mon Sep 17 00:00:00 2001 From: Arthur Lu Date: Wed, 11 Aug 2021 22:28:24 +0000 Subject: [PATCH 09/29] daemonized superscript socket example added python-daemon to requirements.txt Former-commit-id: 922095ebe0187477e5468d2e9bfa96bd98016d4a --- src/cli/superscript-socket.py | 18 +++++++++++++----- src/requirements.txt | 3 ++- 2 files changed, 15 insertions(+), 6 deletions(-) diff --git a/src/cli/superscript-socket.py b/src/cli/superscript-socket.py index b87f636..24395a2 100644 --- a/src/cli/superscript-socket.py +++ b/src/cli/superscript-socket.py @@ -10,6 +10,7 @@ import warnings import sys import asyncio import websockets +import lockfile from interface import splash, log, ERR, INF, stdout, stderr from dataset import get_previous_time, set_current_time, load_match, push_match, load_metric, push_metric, load_pit, push_pit @@ -267,9 +268,16 @@ def save_config(path, config_vector): except: return 1 +import daemon +from daemon import pidfile + if __name__ == "__main__": - if sys.platform.startswith("win"): - multiprocessing.freeze_support() - start_server = websockets.serve(main, "127.0.0.1", 5678) - asyncio.get_event_loop().run_until_complete(start_server) - asyncio.get_event_loop().run_forever() \ No newline at end of file + with daemon.DaemonContext( + working_directory=os.getcwd(), + pidfile=pidfile.TimeoutPIDLockFile("/var/run/tra-daemon.pid"), + ): + if sys.platform.startswith("win"): + multiprocessing.freeze_support() + start_server = websockets.serve(main, "127.0.0.1", 5678) + asyncio.get_event_loop().run_until_complete(start_server) + asyncio.get_event_loop().run_forever() \ No newline at end of file diff --git a/src/requirements.txt b/src/requirements.txt index 9833941..644ff65 100644 --- a/src/requirements.txt +++ b/src/requirements.txt @@ -15,4 +15,5 @@ pandas kivy==2.0.0rc2 -websockets \ No newline at end of file +websockets +python-daemon \ No newline at end of file From 962061007b49274950335a16dc3bf41ae9b447be Mon Sep 17 00:00:00 2001 From: Arthur Lu Date: Wed, 11 Aug 2021 22:54:24 +0000 Subject: [PATCH 10/29] removed time-test Former-commit-id: c09f4d0897d110bb7207d5aaeafeb6c3cd2d3f9f --- src/cli/time-test.py | 20 -------------------- 1 file changed, 20 deletions(-) delete mode 100644 src/cli/time-test.py diff --git a/src/cli/time-test.py b/src/cli/time-test.py deleted file mode 100644 index 9d2c587..0000000 --- a/src/cli/time-test.py +++ /dev/null @@ -1,20 +0,0 @@ -import asyncio -import datetime -import random -import websockets - -async def time(websocket, path): - print(path) - i = 0 - while True: - #now = datetime.datetime.utcnow().isoformat() + "Z" - #await websocket.send(now) - #await asyncio.sleep(random.random() * 3) - i += 1 - await websocket.send(str(i)) - await asyncio.sleep(1) - -start_server = websockets.serve(time, "127.0.0.1", 5678) - -asyncio.get_event_loop().run_until_complete(start_server) -asyncio.get_event_loop().run_forever() \ No newline at end of file From 3a068654eda9fc116cd3fd9d8b77246f62028a21 Mon Sep 17 00:00:00 2001 From: Arthur Lu Date: Wed, 11 Aug 2021 23:26:52 +0000 Subject: [PATCH 11/29] added start/stop/restart argument functionality slightly buggy Former-commit-id: ba793140af2116a485bd4dfa5df45f407baec753 --- src/cli/superscript-socket.py | 53 +++++++++++++++++++++++++++++++---- 1 file changed, 48 insertions(+), 5 deletions(-) diff --git a/src/cli/superscript-socket.py b/src/cli/superscript-socket.py index 24395a2..7a56d98 100644 --- a/src/cli/superscript-socket.py +++ b/src/cli/superscript-socket.py @@ -270,14 +270,57 @@ def save_config(path, config_vector): import daemon from daemon import pidfile +from signal import SIGTERM -if __name__ == "__main__": +def start(pid_path): + #print("starting") with daemon.DaemonContext( working_directory=os.getcwd(), - pidfile=pidfile.TimeoutPIDLockFile("/var/run/tra-daemon.pid"), + pidfile=pidfile.TimeoutPIDLockFile(pid_path), ): - if sys.platform.startswith("win"): - multiprocessing.freeze_support() start_server = websockets.serve(main, "127.0.0.1", 5678) asyncio.get_event_loop().run_until_complete(start_server) - asyncio.get_event_loop().run_forever() \ No newline at end of file + asyncio.get_event_loop().run_forever() + +def stop(pid_path): + #print("stopping") + try: + pf = open(pid_path, 'r') + pid = int(pf.read().strip()) + pf.close() + except IOError: + sys.stderr.write("pidfile at <" + pid_path + "> does not exist. Daemon not running?\n") + return + + try: + os.kill(pid, SIGTERM) + return + except OSError as err: + if err.find("No such process") > 0: + if os.path.exists(pid_path): + os.remove(pid_path) + else: + print(str(err)) + sys.exit(1) + +def restart(pid_path): + #print("restarting") + stop(pid_path) + start(pid_path) + +if __name__ == "__main__": + pid_path = "/var/run/tra-daemon.pid" + if len(sys.argv) == 2: + if 'start' == sys.argv[1]: + start(pid_path) + elif 'stop' == sys.argv[1]: + stop(pid_path) + elif 'restart' == sys.argv[1]: + restart(pid_path) + else: + print("usage: %s start|stop|restart" % sys.argv[0]) + sys.exit(2) + sys.exit(0) + else: + print("usage: %s start|stop|restart" % sys.argv[0]) + sys.exit(2) \ No newline at end of file From 30641e43d8bedf1ef77087e7df4c27fcd8e75d1a Mon Sep 17 00:00:00 2001 From: Arthur Lu Date: Wed, 11 Aug 2021 23:37:57 +0000 Subject: [PATCH 12/29] fixed delay in daemon stopping Former-commit-id: 1d9fa990582c214f3f40d1a10e97accfc38e3468 --- src/cli/superscript-socket.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/src/cli/superscript-socket.py b/src/cli/superscript-socket.py index 7a56d98..14a96a3 100644 --- a/src/cli/superscript-socket.py +++ b/src/cli/superscript-socket.py @@ -291,11 +291,13 @@ def stop(pid_path): except IOError: sys.stderr.write("pidfile at <" + pid_path + "> does not exist. Daemon not running?\n") return - + try: - os.kill(pid, SIGTERM) - return + while True: + os.kill(pid, SIGTERM) + time.sleep(0.01) except OSError as err: + err = str(err) if err.find("No such process") > 0: if os.path.exists(pid_path): os.remove(pid_path) From b3ab9156dbecd8283597aecd98191effc1c10a2f Mon Sep 17 00:00:00 2001 From: Arthur Lu Date: Thu, 12 Aug 2021 21:53:03 +0000 Subject: [PATCH 13/29] consolidated datraset.py into datas.py removed unessasary time import in data.py added missing import to processing.py Former-commit-id: f74cfb3ae32172ffff7f472301a46a8cc3578918 --- src/cli/data.py | 78 ++++++++++++++++++++++++++++++++++++++-- src/cli/dataset.py | 74 -------------------------------------- src/cli/processing.py | 2 +- src/cli/superscript.spec | 1 - 4 files changed, 76 insertions(+), 79 deletions(-) delete mode 100644 src/cli/dataset.py diff --git a/src/cli/data.py b/src/cli/data.py index 641aba7..fb5dc27 100644 --- a/src/cli/data.py +++ b/src/cli/data.py @@ -1,7 +1,6 @@ import requests import pymongo import pandas as pd -import time def pull_new_tba_matches(apikey, competition, cutoff): api_key= apikey @@ -55,7 +54,7 @@ def get_metrics_data_formatted(apikey, competition): out = {} for i in x: try: - out[int(i)] = d.get_team_metrics_data(apikey, competition, int(i)) + out[int(i)] = get_team_metrics_data(apikey, competition, int(i)) except: pass return out @@ -126,4 +125,77 @@ def unkeyify_2l(layered_dict): add.append([j,layered_dict[i][j]]) add.sort(key = lambda x: x[0]) out[i] = list(map(lambda x: x[1], add)) - return out \ No newline at end of file + return out + +def get_previous_time(apikey): + + previous_time = get_analysis_flags(apikey, "latest_update") + + if previous_time == None: + + set_analysis_flags(apikey, "latest_update", 0) + previous_time = 0 + + else: + + previous_time = previous_time["latest_update"] + + return previous_time + +def set_current_time(apikey, current_time): + + set_analysis_flags(apikey, "latest_update", {"latest_update":current_time}) + +def load_match(apikey, competition): + + return get_match_data_formatted(apikey, competition) + +def load_metric(apikey, competition, match, group_name, metrics): + + group = {} + + for team in match[group_name]: + + db_data = get_team_metrics_data(apikey, competition, team) + + if get_team_metrics_data(apikey, competition, team) == None: + + elo = {"score": metrics["elo"]["score"]} + gl2 = {"score": metrics["gl2"]["score"], "rd": metrics["gl2"]["rd"], "vol": metrics["gl2"]["vol"]} + ts = {"mu": metrics["ts"]["mu"], "sigm+a": metrics["ts"]["sigma"]} + + group[team] = {"elo": elo, "gl2": gl2, "ts": ts} + + else: + + metrics = db_data["metrics"] + + elo = metrics["elo"] + gl2 = metrics["gl2"] + ts = metrics["ts"] + + group[team] = {"elo": elo, "gl2": gl2, "ts": ts} + + return group + +def load_pit(apikey, competition): + + return get_pit_data_formatted(apikey, competition) + +def push_match(apikey, competition, results): + + for team in results: + + push_team_tests_data(apikey, competition, team, results[team]) + +def push_metric(apikey, competition, metric): + + for team in metric: + + push_team_metrics_data(apikey, competition, team, metric[team]) + +def push_pit(apikey, competition, pit): + + for variable in pit: + + push_team_pit_data(apikey, competition, variable, pit[variable]) \ No newline at end of file diff --git a/src/cli/dataset.py b/src/cli/dataset.py deleted file mode 100644 index 5a69cc9..0000000 --- a/src/cli/dataset.py +++ /dev/null @@ -1,74 +0,0 @@ -import data as d - -def get_previous_time(apikey): - - previous_time = d.get_analysis_flags(apikey, "latest_update") - - if previous_time == None: - - d.set_analysis_flags(apikey, "latest_update", 0) - previous_time = 0 - - else: - - previous_time = previous_time["latest_update"] - - return previous_time - -def set_current_time(apikey, current_time): - - d.set_analysis_flags(apikey, "latest_update", {"latest_update":current_time}) - -def load_match(apikey, competition): - - return d.get_match_data_formatted(apikey, competition) - -def load_metric(apikey, competition, match, group_name, metrics): - - group = {} - - for team in match[group_name]: - - db_data = d.get_team_metrics_data(apikey, competition, team) - - if d.get_team_metrics_data(apikey, competition, team) == None: - - elo = {"score": metrics["elo"]["score"]} - gl2 = {"score": metrics["gl2"]["score"], "rd": metrics["gl2"]["rd"], "vol": metrics["gl2"]["vol"]} - ts = {"mu": metrics["ts"]["mu"], "sigm+a": metrics["ts"]["sigma"]} - - group[team] = {"elo": elo, "gl2": gl2, "ts": ts} - - else: - - metrics = db_data["metrics"] - - elo = metrics["elo"] - gl2 = metrics["gl2"] - ts = metrics["ts"] - - group[team] = {"elo": elo, "gl2": gl2, "ts": ts} - - return group - -def load_pit(apikey, competition): - - return d.get_pit_data_formatted(apikey, competition) - -def push_match(apikey, competition, results): - - for team in results: - - d.push_team_tests_data(apikey, competition, team, results[team]) - -def push_metric(apikey, competition, metric): - - for team in metric: - - d.push_team_metrics_data(apikey, competition, team, metric[team]) - -def push_pit(apikey, competition, pit): - - for variable in pit: - - d.push_team_pit_data(apikey, competition, variable, pit[variable]) \ No newline at end of file diff --git a/src/cli/processing.py b/src/cli/processing.py index 3778926..dd5ee2f 100644 --- a/src/cli/processing.py +++ b/src/cli/processing.py @@ -1,7 +1,7 @@ import numpy as np from tra_analysis import Analysis as an -from dataset import push_metric +from dataset import push_metric, load_metric from data import pull_new_tba_matches import signal diff --git a/src/cli/superscript.spec b/src/cli/superscript.spec index 5ffc4bf..04cafa7 100644 --- a/src/cli/superscript.spec +++ b/src/cli/superscript.spec @@ -2,7 +2,6 @@ block_cipher = None - a = Analysis(['superscript.py'], pathex=['/workspaces/tra-data-analysis/src'], binaries=[], From 6819aaf1431c44f5624fefe2040f28da1f31fc72 Mon Sep 17 00:00:00 2001 From: Arthur Lu Date: Thu, 12 Aug 2021 21:58:41 +0000 Subject: [PATCH 14/29] consolidated dataset/data imports Former-commit-id: 6364746d7ab445c2a1164850a5fe3795d3e9688a --- src/cli/processing.py | 3 +-- src/cli/superscript-socket.py | 4 +--- src/cli/superscript.py | 7 +++---- 3 files changed, 5 insertions(+), 9 deletions(-) diff --git a/src/cli/processing.py b/src/cli/processing.py index dd5ee2f..901522e 100644 --- a/src/cli/processing.py +++ b/src/cli/processing.py @@ -1,8 +1,7 @@ import numpy as np from tra_analysis import Analysis as an -from dataset import push_metric, load_metric -from data import pull_new_tba_matches +from data import pull_new_tba_matches, push_metric, load_metric import signal diff --git a/src/cli/superscript-socket.py b/src/cli/superscript-socket.py index 14a96a3..0df6922 100644 --- a/src/cli/superscript-socket.py +++ b/src/cli/superscript-socket.py @@ -1,7 +1,6 @@ # testing purposes only, not to be used or run import json -import multiprocessing import os import math from multiprocessing import Pool @@ -10,10 +9,9 @@ import warnings import sys import asyncio import websockets -import lockfile from interface import splash, log, ERR, INF, stdout, stderr -from dataset import get_previous_time, set_current_time, load_match, push_match, load_metric, push_metric, load_pit, push_pit +from data import get_previous_time, set_current_time, load_match, push_match, load_metric, push_metric, load_pit, push_pit from processing import matchloop, metricloop, pitloop config_path = "config.json" diff --git a/src/cli/superscript.py b/src/cli/superscript.py index 2fe295a..01d2397 100644 --- a/src/cli/superscript.py +++ b/src/cli/superscript.py @@ -130,16 +130,15 @@ __all__ = [ # imports: import json -import multiprocessing import os import math -from multiprocessing import Pool +from multiprocessing import Pool, freeze_support import time import warnings import sys from interface import splash, log, ERR, INF, stdout, stderr -from dataset import get_previous_time, set_current_time, load_match, push_match, load_metric, push_metric, load_pit, push_pit +from data import get_previous_time, set_current_time, load_match, push_match, load_metric, push_metric, load_pit, push_pit from processing import matchloop, metricloop, pitloop config_path = "config.json" @@ -396,5 +395,5 @@ def save_config(path, config_vector): if __name__ == "__main__": if sys.platform.startswith("win"): - multiprocessing.freeze_support() + freeze_support() main() \ No newline at end of file From 2ebd2cba8a4f6fe0a8a5a312c5985660fa960ac4 Mon Sep 17 00:00:00 2001 From: Arthur Lu Date: Thu, 12 Aug 2021 22:26:37 +0000 Subject: [PATCH 15/29] removed socket/daemon test scripts Former-commit-id: d1a9e567038fe705ced9ca8890cc79976326d4f8 --- src/cli/socket-test.html | 20 --- src/cli/superscript-socket.py | 326 ---------------------------------- 2 files changed, 346 deletions(-) delete mode 100644 src/cli/socket-test.html delete mode 100644 src/cli/superscript-socket.py diff --git a/src/cli/socket-test.html b/src/cli/socket-test.html deleted file mode 100644 index 98f28a1..0000000 --- a/src/cli/socket-test.html +++ /dev/null @@ -1,20 +0,0 @@ - - - - WebSocket demo - - - - - \ No newline at end of file diff --git a/src/cli/superscript-socket.py b/src/cli/superscript-socket.py deleted file mode 100644 index 0df6922..0000000 --- a/src/cli/superscript-socket.py +++ /dev/null @@ -1,326 +0,0 @@ -# testing purposes only, not to be used or run - -import json -import os -import math -from multiprocessing import Pool -import time -import warnings -import sys -import asyncio -import websockets - -from interface import splash, log, ERR, INF, stdout, stderr -from data import get_previous_time, set_current_time, load_match, push_match, load_metric, push_metric, load_pit, push_pit -from processing import matchloop, metricloop, pitloop - -config_path = "config.json" -sample_json = """{ - "max-threads": 0.5, - "team": "", - "competition": "2020ilch", - "key":{ - "database":"", - "tba":"" - }, - "statistics":{ - "match":{ - "balls-blocked":["basic_stats","historical_analysis","regression_linear","regression_logarithmic","regression_exponential","regression_polynomial","regression_sigmoidal"], - "balls-collected":["basic_stats","historical_analysis","regression_linear","regression_logarithmic","regression_exponential","regression_polynomial","regression_sigmoidal"], - "balls-lower-teleop":["basic_stats","historical_analysis","regression_linear","regression_logarithmic","regression_exponential","regression_polynomial","regression_sigmoidal"], - "balls-lower-auto":["basic_stats","historical_analysis","regression_linear","regression_logarithmic","regression_exponential","regression_polynomial","regression_sigmoidal"], - "balls-started":["basic_stats","historical_analyss","regression_linear","regression_logarithmic","regression_exponential","regression_polynomial","regression_sigmoidal"], - "balls-upper-teleop":["basic_stats","historical_analysis","regression_linear","regression_logarithmic","regression_exponential","regression_polynomial","regression_sigmoidal"], - "balls-upper-auto":["basic_stats","historical_analysis","regression_linear","regression_logarithmic","regression_exponential","regression_polynomial","regression_sigmoidal"] - - }, - "metric":{ - "elo":{ - "score":1500, - "N":400, - "K":24 - }, - "gl2":{ - "score":1500, - "rd":250, - "vol":0.06 - }, - "ts":{ - "mu":25, - "sigma":8.33 - } - }, - "pit":{ - "wheel-mechanism":true, - "low-balls":true, - "high-balls":true, - "wheel-success":true, - "strategic-focus":true, - "climb-mechanism":true, - "attitude":true - } - } -}""" - -async def main(socket, path): - - #warnings.filterwarnings("ignore") - #sys.stderr = open("errorlog.txt", "w") - - #splash(__version__) - - #loop_exit_code = 0 - #loop_stored_exception = None - - while True: - - try: - - loop_start = time.time() - - current_time = time.time() - await socket.send("current time: " + str(current_time)) - - config = {} - if load_config(config_path, config) == 1: - sys.exit(1) - - error_flag = False - - try: - competition = config["competition"] - except: - await socket.send("could not find competition field in config") - error_flag = True - try: - match_tests = config["statistics"]["match"] - except: - await socket.send("could not find match_tests field in config") - error_flag = True - try: - metrics_tests = config["statistics"]["metric"] - except: - await socket.send("could not find metrics_tests field in config") - error_flag = True - try: - pit_tests = config["statistics"]["pit"] - except: - await socket.send("could not find pit_tests field in config") - error_flag = True - - if error_flag: - sys.exit(1) - error_flag = False - - if competition == None or competition == "": - await socket.send("competition field in config must not be empty") - error_flag = True - if match_tests == None: - await socket.send("match_tests field in config must not be empty") - error_flag = True - if metrics_tests == None: - await socket.send("metrics_tests field in config must not be empty") - error_flag = True - if pit_tests == None: - await socket.send("pit_tests field in config must not be empty") - error_flag = True - - if error_flag: - sys.exit(1) - - await socket.send("found and loaded competition, match_tests, metrics_tests, pit_tests from config") - - sys_max_threads = os.cpu_count() - try: - cfg_max_threads = config["max-threads"] - except: - await socket.send("max-threads field in config must not be empty, refer to documentation for configuration options", code = 109) - sys.exit(1) - - if cfg_max_threads > -sys_max_threads and cfg_max_threads < 0 : - alloc_processes = sys_max_threads + cfg_max_threads - elif cfg_max_threads > 0 and cfg_max_threads < 1: - alloc_processes = math.floor(cfg_max_threads * sys_max_threads) - elif cfg_max_threads > 1 and cfg_max_threads <= sys_max_threads: - alloc_processes = cfg_max_threads - elif cfg_max_threads == 0: - alloc_processes = sys_max_threads - else: - await socket.send("max-threads must be between -" + str(sys_max_threads) + " and " + str(sys_max_threads) + ", but got " + cfg_max_threads) - sys.exit(1) - - await socket.send("found and loaded max-threads from config") - await socket.send("attempting to start " + str(alloc_processes) + " threads") - try: - exec_threads = Pool(processes = alloc_processes) - except Exception as e: - await socket.send("unable to start threads") - #log(stderr, INF, e) - sys.exit(1) - await socket.send("successfully initialized " + str(alloc_processes) + " threads") - - exit_flag = False - - try: - apikey = config["key"]["database"] - except: - await socket.send("database key field in config must be present") - exit_flag = True - try: - tbakey = config["key"]["tba"] - except: - await socket.send("tba key field in config must be present") - exit_flag = True - - if apikey == None or apikey == "": - await socket.send("database key field in config must not be empty, please populate the database key") - exit_flag = True - if tbakey == None or tbakey == "": - await socket.send("tba key field in config must not be empty, please populate the tba key") - exit_flag = True - - if exit_flag: - sys.exit(1) - - await socket.send("found and loaded database and tba keys") - - previous_time = get_previous_time(apikey) - await socket.send("analysis backtimed to: " + str(previous_time)) - - start = time.time() - await socket.send("loading match data") - match_data = load_match(apikey, competition) - await socket.send("finished loading match data in " + str(time.time() - start) + " seconds") - - start = time.time() - await socket.send("performing analysis on match data") - results = matchloop(apikey, competition, match_data, match_tests, exec_threads) - await socket.send("finished match analysis in " + str(time.time() - start) + " seconds") - - start = time.time() - await socket.send("uploading match results to database") - push_match(apikey, competition, results) - await socket.send("finished uploading match results in " + str(time.time() - start) + " seconds") - - start = time.time() - await socket.send("performing analysis on team metrics") - results = metricloop(tbakey, apikey, competition, current_time, metrics_tests) - await socket.send("finished metric analysis and pushed to database in " + str(time.time() - start) + " seconds") - - start = time.time() - await socket.send("loading pit data") - pit_data = load_pit(apikey, competition) - await socket.send("finished loading pit data in " + str(time.time() - start) + " seconds") - - start = time.time() - await socket.send("performing analysis on pit data") - results = pitloop(apikey, competition, pit_data, pit_tests) - await socket.send("finished pit analysis in " + str(time.time() - start) + " seconds") - - start = time.time() - await socket.send("uploading pit results to database") - push_pit(apikey, competition, results) - await socket.send("finished uploading pit results in " + str(time.time() - start) + " seconds") - - set_current_time(apikey, current_time) - await socket.send("finished all tests in " + str(time.time() - loop_start) + " seconds, looping") - - except KeyboardInterrupt: - await socket.send("detected KeyboardInterrupt, killing threads") - if "exec_threads" in locals(): - exec_threads.terminate() - exec_threads.join() - exec_threads.close() - await socket.send("terminated threads, exiting") - loop_stored_exception = sys.exc_info() - loop_exit_code = 0 - break - except Exception as e: - await socket.send("encountered an exception while running") - print(e, file = stderr) - loop_exit_code = 1 - break - - sys.exit(loop_exit_code) - -def load_config(path, config_vector): - try: - f = open(path, "r") - config_vector.update(json.load(f)) - f.close() - #socket.send("found and opened config at <" + path + ">") - return 0 - except: - #log(stderr, ERR, "could not find config at <" + path + ">, generating blank config and exiting", code = 100) - f = open(path, "w") - f.write(sample_json) - f.close() - return 1 - -def save_config(path, config_vector): - try: - f = open(path) - json.dump(config_vector) - f.close() - return 0 - except: - return 1 - -import daemon -from daemon import pidfile -from signal import SIGTERM - -def start(pid_path): - #print("starting") - with daemon.DaemonContext( - working_directory=os.getcwd(), - pidfile=pidfile.TimeoutPIDLockFile(pid_path), - ): - start_server = websockets.serve(main, "127.0.0.1", 5678) - asyncio.get_event_loop().run_until_complete(start_server) - asyncio.get_event_loop().run_forever() - -def stop(pid_path): - #print("stopping") - try: - pf = open(pid_path, 'r') - pid = int(pf.read().strip()) - pf.close() - except IOError: - sys.stderr.write("pidfile at <" + pid_path + "> does not exist. Daemon not running?\n") - return - - try: - while True: - os.kill(pid, SIGTERM) - time.sleep(0.01) - except OSError as err: - err = str(err) - if err.find("No such process") > 0: - if os.path.exists(pid_path): - os.remove(pid_path) - else: - print(str(err)) - sys.exit(1) - -def restart(pid_path): - #print("restarting") - stop(pid_path) - start(pid_path) - -if __name__ == "__main__": - pid_path = "/var/run/tra-daemon.pid" - if len(sys.argv) == 2: - if 'start' == sys.argv[1]: - start(pid_path) - elif 'stop' == sys.argv[1]: - stop(pid_path) - elif 'restart' == sys.argv[1]: - restart(pid_path) - else: - print("usage: %s start|stop|restart" % sys.argv[0]) - sys.exit(2) - sys.exit(0) - else: - print("usage: %s start|stop|restart" % sys.argv[0]) - sys.exit(2) \ No newline at end of file From c2f35f4cb2033c126c8277222f9428c1d5dd4433 Mon Sep 17 00:00:00 2001 From: Arthur Lu Date: Fri, 13 Aug 2021 21:57:03 +0000 Subject: [PATCH 16/29] superscript v 0.9.2 Former-commit-id: e559ced751ae22c8c91c024940f50150e7da28ea --- src/cli/interface.py | 2 +- src/cli/superscript-socket.py | 2 +- src/cli/superscript.py | 7 +++++-- 3 files changed, 7 insertions(+), 4 deletions(-) diff --git a/src/cli/interface.py b/src/cli/interface.py index 9ee821a..3754e77 100644 --- a/src/cli/interface.py +++ b/src/cli/interface.py @@ -17,7 +17,7 @@ stderr = sys.stderr def log(target, level, message, code = 0): - message = time.ctime() + empty_delim + str(level) + l_brack + f"{code:04}" + r_brack + empty_delim + soft_divided_delim + empty_delim + message + message = time.ctime() + empty_delim + str(level) + l_brack + f"{code:+05}" + r_brack + empty_delim + soft_divided_delim + empty_delim + message print(message, file = target) def clear(): diff --git a/src/cli/superscript-socket.py b/src/cli/superscript-socket.py index 0df6922..f6a46ba 100644 --- a/src/cli/superscript-socket.py +++ b/src/cli/superscript-socket.py @@ -309,7 +309,7 @@ def restart(pid_path): start(pid_path) if __name__ == "__main__": - pid_path = "/var/run/tra-daemon.pid" + pid_path = "tra-daemon.pid" if len(sys.argv) == 2: if 'start' == sys.argv[1]: start(pid_path) diff --git a/src/cli/superscript.py b/src/cli/superscript.py index 01d2397..5c146aa 100644 --- a/src/cli/superscript.py +++ b/src/cli/superscript.py @@ -3,10 +3,13 @@ # Notes: # setup: -__version__ = "0.9.1" +__version__ = "0.9.2" # changelog should be viewed using print(analysis.__changelog__) __changelog__ = """changelog: + 0.9.2: + - removed unessasary imports from data + - minor changes to interface 0.9.1: - fixed bugs in configuration item loading exception handling 0.9.0: @@ -138,7 +141,7 @@ import warnings import sys from interface import splash, log, ERR, INF, stdout, stderr -from data import get_previous_time, set_current_time, load_match, push_match, load_metric, push_metric, load_pit, push_pit +from data import get_previous_time, set_current_time, load_match, push_match, load_pit, push_pit from processing import matchloop, metricloop, pitloop config_path = "config.json" From b4c7365bf04e450343e453b213427b223f3cae02 Mon Sep 17 00:00:00 2001 From: Arthur Lu Date: Fri, 13 Aug 2021 23:32:59 +0000 Subject: [PATCH 17/29] added *.pid files to git ignore Former-commit-id: f8f8d6bf2776ef26b79c23aac300756ad10b2781 --- .gitignore | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.gitignore b/.gitignore index 0e3db09..1158381 100644 --- a/.gitignore +++ b/.gitignore @@ -9,6 +9,8 @@ **/tra_analysis/ **/temp/* +**/*.pid + **/errorlog.txt /dist/superscript.* /dist/superscript \ No newline at end of file From bcbb653696dadcd803e46f0d94090b6a20917479 Mon Sep 17 00:00:00 2001 From: Arthur Lu Date: Tue, 17 Aug 2021 21:02:08 +0000 Subject: [PATCH 18/29] daemonized, socketed superscript improved runtime removed superscript-socket.py Former-commit-id: 6fa29f767dcc649c7d049baddaaa1d865c5c3517 --- .gitignore | 2 + src/cli/data.py | 48 ++--- src/cli/processing.py | 12 +- src/cli/superscript-socket.py | 326 ---------------------------------- src/cli/superscript.py | 275 ++++++++++++++++++++++++++-- 5 files changed, 286 insertions(+), 377 deletions(-) delete mode 100644 src/cli/superscript-socket.py diff --git a/.gitignore b/.gitignore index 1158381..97705c5 100644 --- a/.gitignore +++ b/.gitignore @@ -11,6 +11,8 @@ **/*.pid +**/profile + **/errorlog.txt /dist/superscript.* /dist/superscript \ No newline at end of file diff --git a/src/cli/data.py b/src/cli/data.py index fb5dc27..5c35d43 100644 --- a/src/cli/data.py +++ b/src/cli/data.py @@ -11,8 +11,7 @@ def pull_new_tba_matches(apikey, competition, cutoff): out.append({"match" : i['match_number'], "blue" : list(map(lambda x: int(x[3:]), i['alliances']['blue']['team_keys'])), "red" : list(map(lambda x: int(x[3:]), i['alliances']['red']['team_keys'])), "winner": i["winning_alliance"]}) return out -def get_team_match_data(apikey, competition, team_num): - client = pymongo.MongoClient(apikey) +def get_team_match_data(client, competition, team_num): db = client.data_scouting mdata = db.matchdata out = {} @@ -20,98 +19,87 @@ def get_team_match_data(apikey, competition, team_num): out[i['match']] = i['data'] return pd.DataFrame(out) -def get_team_pit_data(apikey, competition, team_num): - client = pymongo.MongoClient(apikey) +def get_team_pit_data(client, competition, team_num): db = client.data_scouting mdata = db.pitdata out = {} return mdata.find_one({"competition" : competition, "team_scouted": team_num})["data"] -def get_team_metrics_data(apikey, competition, team_num): - client = pymongo.MongoClient(apikey) +def get_team_metrics_data(client, competition, team_num): db = client.data_processing mdata = db.team_metrics return mdata.find_one({"competition" : competition, "team": team_num}) -def get_match_data_formatted(apikey, competition): - client = pymongo.MongoClient(apikey) +def get_match_data_formatted(client, competition): db = client.data_scouting mdata = db.teamlist x=mdata.find_one({"competition":competition}) out = {} for i in x: try: - out[int(i)] = unkeyify_2l(get_team_match_data(apikey, competition, int(i)).transpose().to_dict()) + out[int(i)] = unkeyify_2l(get_team_match_data(client, competition, int(i)).transpose().to_dict()) except: pass return out -def get_metrics_data_formatted(apikey, competition): - client = pymongo.MongoClient(apikey) +def get_metrics_data_formatted(client, competition): db = client.data_scouting mdata = db.teamlist x=mdata.find_one({"competition":competition}) out = {} for i in x: try: - out[int(i)] = get_team_metrics_data(apikey, competition, int(i)) + out[int(i)] = get_team_metrics_data(client, competition, int(i)) except: pass return out -def get_pit_data_formatted(apikey, competition): - client = pymongo.MongoClient(apikey) +def get_pit_data_formatted(client, competition): db = client.data_scouting mdata = db.teamlist x=mdata.find_one({"competition":competition}) out = {} for i in x: try: - out[int(i)] = get_team_pit_data(apikey, competition, int(i)) + out[int(i)] = get_team_pit_data(client, competition, int(i)) except: pass return out -def get_pit_variable_data(apikey, competition): - client = pymongo.MongoClient(apikey) +def get_pit_variable_data(client, competition): db = client.data_processing mdata = db.team_pit out = {} return mdata.find() -def get_pit_variable_formatted(apikey, competition): - temp = get_pit_variable_data(apikey, competition) +def get_pit_variable_formatted(client, competition): + temp = get_pit_variable_data(client, competition) out = {} for i in temp: out[i["variable"]] = i["data"] return out -def push_team_tests_data(apikey, competition, team_num, data, dbname = "data_processing", colname = "team_tests"): - client = pymongo.MongoClient(apikey) +def push_team_tests_data(client, competition, team_num, data, dbname = "data_processing", colname = "team_tests"): db = client[dbname] mdata = db[colname] mdata.replace_one({"competition" : competition, "team": team_num}, {"_id": competition+str(team_num)+"am", "competition" : competition, "team" : team_num, "data" : data}, True) -def push_team_metrics_data(apikey, competition, team_num, data, dbname = "data_processing", colname = "team_metrics"): - client = pymongo.MongoClient(apikey) +def push_team_metrics_data(client, competition, team_num, data, dbname = "data_processing", colname = "team_metrics"): db = client[dbname] mdata = db[colname] mdata.replace_one({"competition" : competition, "team": team_num}, {"_id": competition+str(team_num)+"am", "competition" : competition, "team" : team_num, "metrics" : data}, True) -def push_team_pit_data(apikey, competition, variable, data, dbname = "data_processing", colname = "team_pit"): - client = pymongo.MongoClient(apikey) +def push_team_pit_data(client, competition, variable, data, dbname = "data_processing", colname = "team_pit"): db = client[dbname] mdata = db[colname] mdata.replace_one({"competition" : competition, "variable": variable}, {"competition" : competition, "variable" : variable, "data" : data}, True) -def get_analysis_flags(apikey, flag): - client = pymongo.MongoClient(apikey) +def get_analysis_flags(client, flag): db = client.data_processing mdata = db.flags return mdata.find_one({flag:{"$exists":True}}) -def set_analysis_flags(apikey, flag, data): - client = pymongo.MongoClient(apikey) +def set_analysis_flags(client, flag, data): db = client.data_processing mdata = db.flags return mdata.replace_one({flag:{"$exists":True}}, data, True) @@ -158,7 +146,7 @@ def load_metric(apikey, competition, match, group_name, metrics): db_data = get_team_metrics_data(apikey, competition, team) - if get_team_metrics_data(apikey, competition, team) == None: + if db_data == None: elo = {"score": metrics["elo"]["score"]} gl2 = {"score": metrics["gl2"]["score"], "rd": metrics["gl2"]["rd"], "vol": metrics["gl2"]["vol"]} diff --git a/src/cli/processing.py b/src/cli/processing.py index 901522e..fe028e5 100644 --- a/src/cli/processing.py +++ b/src/cli/processing.py @@ -36,7 +36,7 @@ def simplestats(data_test): if test == "regression_sigmoidal": return an.regression(ranges, data, ['sig']) -def matchloop(apikey, competition, data, tests, exec_threads): +def matchloop(client, competition, data, tests, exec_threads): short_mapping = {"regression_linear": "lin", "regression_logarithmic": "log", "regression_exponential": "exp", "regression_polynomial": "ply", "regression_sigmoidal": "sig"} @@ -88,7 +88,7 @@ def matchloop(apikey, competition, data, tests, exec_threads): return return_vector -def metricloop(tbakey, apikey, competition, timestamp, metrics): # listener based metrics update +def metricloop(tbakey, client, competition, timestamp, metrics): # listener based metrics update elo_N = metrics["elo"]["N"] elo_K = metrics["elo"]["K"] @@ -100,8 +100,8 @@ def metricloop(tbakey, apikey, competition, timestamp, metrics): # listener base for match in matches: - red = load_metric(apikey, competition, match, "red", metrics) - blu = load_metric(apikey, competition, match, "blue", metrics) + red = load_metric(client, competition, match, "red", metrics) + blu = load_metric(client, competition, match, "blue", metrics) elo_red_total = 0 elo_blu_total = 0 @@ -179,9 +179,9 @@ def metricloop(tbakey, apikey, competition, timestamp, metrics): # listener base temp_vector.update(red) temp_vector.update(blu) - push_metric(apikey, competition, temp_vector) + push_metric(client, competition, temp_vector) -def pitloop(apikey, competition, pit, tests): +def pitloop(client, competition, pit, tests): return_vector = {} for team in pit: diff --git a/src/cli/superscript-socket.py b/src/cli/superscript-socket.py deleted file mode 100644 index f6a46ba..0000000 --- a/src/cli/superscript-socket.py +++ /dev/null @@ -1,326 +0,0 @@ -# testing purposes only, not to be used or run - -import json -import os -import math -from multiprocessing import Pool -import time -import warnings -import sys -import asyncio -import websockets - -from interface import splash, log, ERR, INF, stdout, stderr -from data import get_previous_time, set_current_time, load_match, push_match, load_metric, push_metric, load_pit, push_pit -from processing import matchloop, metricloop, pitloop - -config_path = "config.json" -sample_json = """{ - "max-threads": 0.5, - "team": "", - "competition": "2020ilch", - "key":{ - "database":"", - "tba":"" - }, - "statistics":{ - "match":{ - "balls-blocked":["basic_stats","historical_analysis","regression_linear","regression_logarithmic","regression_exponential","regression_polynomial","regression_sigmoidal"], - "balls-collected":["basic_stats","historical_analysis","regression_linear","regression_logarithmic","regression_exponential","regression_polynomial","regression_sigmoidal"], - "balls-lower-teleop":["basic_stats","historical_analysis","regression_linear","regression_logarithmic","regression_exponential","regression_polynomial","regression_sigmoidal"], - "balls-lower-auto":["basic_stats","historical_analysis","regression_linear","regression_logarithmic","regression_exponential","regression_polynomial","regression_sigmoidal"], - "balls-started":["basic_stats","historical_analyss","regression_linear","regression_logarithmic","regression_exponential","regression_polynomial","regression_sigmoidal"], - "balls-upper-teleop":["basic_stats","historical_analysis","regression_linear","regression_logarithmic","regression_exponential","regression_polynomial","regression_sigmoidal"], - "balls-upper-auto":["basic_stats","historical_analysis","regression_linear","regression_logarithmic","regression_exponential","regression_polynomial","regression_sigmoidal"] - - }, - "metric":{ - "elo":{ - "score":1500, - "N":400, - "K":24 - }, - "gl2":{ - "score":1500, - "rd":250, - "vol":0.06 - }, - "ts":{ - "mu":25, - "sigma":8.33 - } - }, - "pit":{ - "wheel-mechanism":true, - "low-balls":true, - "high-balls":true, - "wheel-success":true, - "strategic-focus":true, - "climb-mechanism":true, - "attitude":true - } - } -}""" - -async def main(socket, path): - - #warnings.filterwarnings("ignore") - #sys.stderr = open("errorlog.txt", "w") - - #splash(__version__) - - #loop_exit_code = 0 - #loop_stored_exception = None - - while True: - - try: - - loop_start = time.time() - - current_time = time.time() - await socket.send("current time: " + str(current_time)) - - config = {} - if load_config(config_path, config) == 1: - sys.exit(1) - - error_flag = False - - try: - competition = config["competition"] - except: - await socket.send("could not find competition field in config") - error_flag = True - try: - match_tests = config["statistics"]["match"] - except: - await socket.send("could not find match_tests field in config") - error_flag = True - try: - metrics_tests = config["statistics"]["metric"] - except: - await socket.send("could not find metrics_tests field in config") - error_flag = True - try: - pit_tests = config["statistics"]["pit"] - except: - await socket.send("could not find pit_tests field in config") - error_flag = True - - if error_flag: - sys.exit(1) - error_flag = False - - if competition == None or competition == "": - await socket.send("competition field in config must not be empty") - error_flag = True - if match_tests == None: - await socket.send("match_tests field in config must not be empty") - error_flag = True - if metrics_tests == None: - await socket.send("metrics_tests field in config must not be empty") - error_flag = True - if pit_tests == None: - await socket.send("pit_tests field in config must not be empty") - error_flag = True - - if error_flag: - sys.exit(1) - - await socket.send("found and loaded competition, match_tests, metrics_tests, pit_tests from config") - - sys_max_threads = os.cpu_count() - try: - cfg_max_threads = config["max-threads"] - except: - await socket.send("max-threads field in config must not be empty, refer to documentation for configuration options", code = 109) - sys.exit(1) - - if cfg_max_threads > -sys_max_threads and cfg_max_threads < 0 : - alloc_processes = sys_max_threads + cfg_max_threads - elif cfg_max_threads > 0 and cfg_max_threads < 1: - alloc_processes = math.floor(cfg_max_threads * sys_max_threads) - elif cfg_max_threads > 1 and cfg_max_threads <= sys_max_threads: - alloc_processes = cfg_max_threads - elif cfg_max_threads == 0: - alloc_processes = sys_max_threads - else: - await socket.send("max-threads must be between -" + str(sys_max_threads) + " and " + str(sys_max_threads) + ", but got " + cfg_max_threads) - sys.exit(1) - - await socket.send("found and loaded max-threads from config") - await socket.send("attempting to start " + str(alloc_processes) + " threads") - try: - exec_threads = Pool(processes = alloc_processes) - except Exception as e: - await socket.send("unable to start threads") - #log(stderr, INF, e) - sys.exit(1) - await socket.send("successfully initialized " + str(alloc_processes) + " threads") - - exit_flag = False - - try: - apikey = config["key"]["database"] - except: - await socket.send("database key field in config must be present") - exit_flag = True - try: - tbakey = config["key"]["tba"] - except: - await socket.send("tba key field in config must be present") - exit_flag = True - - if apikey == None or apikey == "": - await socket.send("database key field in config must not be empty, please populate the database key") - exit_flag = True - if tbakey == None or tbakey == "": - await socket.send("tba key field in config must not be empty, please populate the tba key") - exit_flag = True - - if exit_flag: - sys.exit(1) - - await socket.send("found and loaded database and tba keys") - - previous_time = get_previous_time(apikey) - await socket.send("analysis backtimed to: " + str(previous_time)) - - start = time.time() - await socket.send("loading match data") - match_data = load_match(apikey, competition) - await socket.send("finished loading match data in " + str(time.time() - start) + " seconds") - - start = time.time() - await socket.send("performing analysis on match data") - results = matchloop(apikey, competition, match_data, match_tests, exec_threads) - await socket.send("finished match analysis in " + str(time.time() - start) + " seconds") - - start = time.time() - await socket.send("uploading match results to database") - push_match(apikey, competition, results) - await socket.send("finished uploading match results in " + str(time.time() - start) + " seconds") - - start = time.time() - await socket.send("performing analysis on team metrics") - results = metricloop(tbakey, apikey, competition, current_time, metrics_tests) - await socket.send("finished metric analysis and pushed to database in " + str(time.time() - start) + " seconds") - - start = time.time() - await socket.send("loading pit data") - pit_data = load_pit(apikey, competition) - await socket.send("finished loading pit data in " + str(time.time() - start) + " seconds") - - start = time.time() - await socket.send("performing analysis on pit data") - results = pitloop(apikey, competition, pit_data, pit_tests) - await socket.send("finished pit analysis in " + str(time.time() - start) + " seconds") - - start = time.time() - await socket.send("uploading pit results to database") - push_pit(apikey, competition, results) - await socket.send("finished uploading pit results in " + str(time.time() - start) + " seconds") - - set_current_time(apikey, current_time) - await socket.send("finished all tests in " + str(time.time() - loop_start) + " seconds, looping") - - except KeyboardInterrupt: - await socket.send("detected KeyboardInterrupt, killing threads") - if "exec_threads" in locals(): - exec_threads.terminate() - exec_threads.join() - exec_threads.close() - await socket.send("terminated threads, exiting") - loop_stored_exception = sys.exc_info() - loop_exit_code = 0 - break - except Exception as e: - await socket.send("encountered an exception while running") - print(e, file = stderr) - loop_exit_code = 1 - break - - sys.exit(loop_exit_code) - -def load_config(path, config_vector): - try: - f = open(path, "r") - config_vector.update(json.load(f)) - f.close() - #socket.send("found and opened config at <" + path + ">") - return 0 - except: - #log(stderr, ERR, "could not find config at <" + path + ">, generating blank config and exiting", code = 100) - f = open(path, "w") - f.write(sample_json) - f.close() - return 1 - -def save_config(path, config_vector): - try: - f = open(path) - json.dump(config_vector) - f.close() - return 0 - except: - return 1 - -import daemon -from daemon import pidfile -from signal import SIGTERM - -def start(pid_path): - #print("starting") - with daemon.DaemonContext( - working_directory=os.getcwd(), - pidfile=pidfile.TimeoutPIDLockFile(pid_path), - ): - start_server = websockets.serve(main, "127.0.0.1", 5678) - asyncio.get_event_loop().run_until_complete(start_server) - asyncio.get_event_loop().run_forever() - -def stop(pid_path): - #print("stopping") - try: - pf = open(pid_path, 'r') - pid = int(pf.read().strip()) - pf.close() - except IOError: - sys.stderr.write("pidfile at <" + pid_path + "> does not exist. Daemon not running?\n") - return - - try: - while True: - os.kill(pid, SIGTERM) - time.sleep(0.01) - except OSError as err: - err = str(err) - if err.find("No such process") > 0: - if os.path.exists(pid_path): - os.remove(pid_path) - else: - print(str(err)) - sys.exit(1) - -def restart(pid_path): - #print("restarting") - stop(pid_path) - start(pid_path) - -if __name__ == "__main__": - pid_path = "tra-daemon.pid" - if len(sys.argv) == 2: - if 'start' == sys.argv[1]: - start(pid_path) - elif 'stop' == sys.argv[1]: - stop(pid_path) - elif 'restart' == sys.argv[1]: - restart(pid_path) - else: - print("usage: %s start|stop|restart" % sys.argv[0]) - sys.exit(2) - sys.exit(0) - else: - print("usage: %s start|stop|restart" % sys.argv[0]) - sys.exit(2) \ No newline at end of file diff --git a/src/cli/superscript.py b/src/cli/superscript.py index 5c146aa..009c579 100644 --- a/src/cli/superscript.py +++ b/src/cli/superscript.py @@ -3,10 +3,17 @@ # Notes: # setup: -__version__ = "0.9.2" +__version__ = "1.0.0" # changelog should be viewed using print(analysis.__changelog__) __changelog__ = """changelog: + 1.0.0: + - superscript now runs in PEP 3143 compliant well behaved daemon on Linux systems + - linux superscript daemon has integrated websocket output to monitor progress/status remotely + - linux daemon now sends stderr to errorlog.txt + 0.9.3: + - improved data loading performance by removing redundant PyMongo client creation (120s to 14s) + - passed singular instance of PyMongo client as standin for apikey parameter in all data.py functions 0.9.2: - removed unessasary imports from data - minor changes to interface @@ -139,9 +146,12 @@ from multiprocessing import Pool, freeze_support import time import warnings import sys +import asyncio +import websockets +import pymongo from interface import splash, log, ERR, INF, stdout, stderr -from data import get_previous_time, set_current_time, load_match, push_match, load_pit, push_pit +from data import get_previous_time, set_current_time, load_match, push_match, load_metric, push_metric, load_pit, push_pit from processing import matchloop, metricloop, pitloop config_path = "config.json" @@ -192,7 +202,181 @@ sample_json = """{ } }""" -def main(): +async def main_lin(socket, path): + + while True: + + try: + + loop_start = time.time() + + current_time = time.time() + await socket.send("current time: " + str(current_time)) + + config = {} + if load_config(config_path, config) == 1: + sys.exit(1) + + error_flag = False + + try: + competition = config["competition"] + except: + await socket.send("could not find competition field in config") + error_flag = True + try: + match_tests = config["statistics"]["match"] + except: + await socket.send("could not find match_tests field in config") + error_flag = True + try: + metrics_tests = config["statistics"]["metric"] + except: + await socket.send("could not find metrics_tests field in config") + error_flag = True + try: + pit_tests = config["statistics"]["pit"] + except: + await socket.send("could not find pit_tests field in config") + error_flag = True + + if error_flag: + sys.exit(1) + error_flag = False + + if competition == None or competition == "": + await socket.send("competition field in config must not be empty") + error_flag = True + if match_tests == None: + await socket.send("match_tests field in config must not be empty") + error_flag = True + if metrics_tests == None: + await socket.send("metrics_tests field in config must not be empty") + error_flag = True + if pit_tests == None: + await socket.send("pit_tests field in config must not be empty") + error_flag = True + + if error_flag: + sys.exit(1) + + await socket.send("found and loaded competition, match_tests, metrics_tests, pit_tests from config") + + sys_max_threads = os.cpu_count() + try: + cfg_max_threads = config["max-threads"] + except: + await socket.send("max-threads field in config must not be empty, refer to documentation for configuration options", code = 109) + sys.exit(1) + + if cfg_max_threads > -sys_max_threads and cfg_max_threads < 0 : + alloc_processes = sys_max_threads + cfg_max_threads + elif cfg_max_threads > 0 and cfg_max_threads < 1: + alloc_processes = math.floor(cfg_max_threads * sys_max_threads) + elif cfg_max_threads > 1 and cfg_max_threads <= sys_max_threads: + alloc_processes = cfg_max_threads + elif cfg_max_threads == 0: + alloc_processes = sys_max_threads + else: + await socket.send("max-threads must be between -" + str(sys_max_threads) + " and " + str(sys_max_threads) + ", but got " + cfg_max_threads) + sys.exit(1) + + await socket.send("found and loaded max-threads from config") + await socket.send("attempting to start " + str(alloc_processes) + " threads") + try: + exec_threads = Pool(processes = alloc_processes) + except Exception as e: + await socket.send("unable to start threads") + sys.exit(1) + await socket.send("successfully initialized " + str(alloc_processes) + " threads") + + exit_flag = False + + try: + apikey = config["key"]["database"] + except: + await socket.send("database key field in config must be present") + exit_flag = True + try: + tbakey = config["key"]["tba"] + except: + await socket.send("tba key field in config must be present") + exit_flag = True + + if apikey == None or apikey == "": + await socket.send("database key field in config must not be empty, please populate the database key") + exit_flag = True + if tbakey == None or tbakey == "": + await socket.send("tba key field in config must not be empty, please populate the tba key") + exit_flag = True + + if exit_flag: + sys.exit(1) + + await socket.send("found and loaded database and tba keys") + + client = pymongo.MongoClient(apikey) + + previous_time = get_previous_time(client) + await socket.send("analysis backtimed to: " + str(previous_time)) + + start = time.time() + await socket.send("loading match data") + match_data = load_match(client, competition) + await socket.send("finished loading match data in " + str(time.time() - start) + " seconds") + + start = time.time() + await socket.send("performing analysis on match data") + results = matchloop(client, competition, match_data, match_tests, exec_threads) + await socket.send("finished match analysis in " + str(time.time() - start) + " seconds") + + start = time.time() + await socket.send("uploading match results to database") + push_match(client, competition, results) + await socket.send("finished uploading match results in " + str(time.time() - start) + " seconds") + + start = time.time() + await socket.send("performing analysis on team metrics") + results = metricloop(tbakey, client, competition, current_time, metrics_tests) + await socket.send("finished metric analysis and pushed to database in " + str(time.time() - start) + " seconds") + + start = time.time() + await socket.send("loading pit data") + pit_data = load_pit(client, competition) + await socket.send("finished loading pit data in " + str(time.time() - start) + " seconds") + + start = time.time() + await socket.send("performing analysis on pit data") + results = pitloop(client, competition, pit_data, pit_tests) + await socket.send("finished pit analysis in " + str(time.time() - start) + " seconds") + + start = time.time() + await socket.send("uploading pit results to database") + push_pit(client, competition, results) + await socket.send("finished uploading pit results in " + str(time.time() - start) + " seconds") + + set_current_time(client, current_time) + await socket.send("finished all tests in " + str(time.time() - loop_start) + " seconds, looping") + + except KeyboardInterrupt: + await socket.send("detected KeyboardInterrupt, killing threads") + if "exec_threads" in locals(): + exec_threads.terminate() + exec_threads.join() + exec_threads.close() + await socket.send("terminated threads, exiting") + loop_stored_exception = sys.exc_info() + loop_exit_code = 0 + break + except Exception as e: + await socket.send("encountered an exception while running") + print(e) + loop_exit_code = 1 + break + + sys.exit(loop_exit_code) + +def main_win(): # windows main function warnings.filterwarnings("ignore") sys.stderr = open("errorlog.txt", "w") @@ -314,45 +498,47 @@ def main(): log(stdout, INF, "found and loaded database and tba keys") - previous_time = get_previous_time(apikey) + client = pymongo.MongoClient(apikey) + + previous_time = get_previous_time(client) log(stdout, INF, "analysis backtimed to: " + str(previous_time)) start = time.time() log(stdout, INF, "loading match data") - match_data = load_match(apikey, competition) + match_data = load_match(client, competition) log(stdout, INF, "finished loading match data in " + str(time.time() - start) + " seconds") start = time.time() log(stdout, INF, "performing analysis on match data") - results = matchloop(apikey, competition, match_data, match_tests, exec_threads) + results = matchloop(client, competition, match_data, match_tests, exec_threads) log(stdout, INF, "finished match analysis in " + str(time.time() - start) + " seconds") start = time.time() log(stdout, INF, "uploading match results to database") - push_match(apikey, competition, results) + push_match(client, competition, results) log(stdout, INF, "finished uploading match results in " + str(time.time() - start) + " seconds") start = time.time() log(stdout, INF, "performing analysis on team metrics") - results = metricloop(tbakey, apikey, competition, current_time, metrics_tests) + results = metricloop(tbakey, client, competition, current_time, metrics_tests) log(stdout, INF, "finished metric analysis and pushed to database in " + str(time.time() - start) + " seconds") start = time.time() log(stdout, INF, "loading pit data") - pit_data = load_pit(apikey, competition) + pit_data = load_pit(client, competition) log(stdout, INF, "finished loading pit data in " + str(time.time() - start) + " seconds") start = time.time() log(stdout, INF, "performing analysis on pit data") - results = pitloop(apikey, competition, pit_data, pit_tests) + results = pitloop(client, competition, pit_data, pit_tests) log(stdout, INF, "finished pit analysis in " + str(time.time() - start) + " seconds") start = time.time() log(stdout, INF, "uploading pit results to database") - push_pit(apikey, competition, results) + push_pit(client, competition, results) log(stdout, INF, "finished uploading pit results in " + str(time.time() - start) + " seconds") - set_current_time(apikey, current_time) + set_current_time(client, current_time) log(stdout, INF, "finished all tests in " + str(time.time() - loop_start) + " seconds, looping") except KeyboardInterrupt: @@ -378,10 +564,10 @@ def load_config(path, config_vector): f = open(path, "r") config_vector.update(json.load(f)) f.close() - log(stdout, INF, "found and opened config at <" + path + ">") + #socket.send("found and opened config at <" + path + ">") return 0 except: - log(stderr, ERR, "could not find config at <" + path + ">, generating blank config and exiting", code = 100) + #log(stderr, ERR, "could not find config at <" + path + ">, generating blank config and exiting", code = 100) f = open(path, "w") f.write(sample_json) f.close() @@ -396,7 +582,66 @@ def save_config(path, config_vector): except: return 1 +import daemon +from daemon import pidfile +from signal import SIGTERM + +def start(pid_path, profile = False): + f = open('errorlog.txt', 'w+') + with daemon.DaemonContext( + working_directory=os.getcwd(), + pidfile=pidfile.TimeoutPIDLockFile(pid_path), + stderr=f + ): + start_server = websockets.serve(main_lin, "127.0.0.1", 5678) + asyncio.get_event_loop().run_until_complete(start_server) + asyncio.get_event_loop().run_forever() + +def stop(pid_path): + try: + pf = open(pid_path, 'r') + pid = int(pf.read().strip()) + pf.close() + except IOError: + sys.stderr.write("pidfile at <" + pid_path + "> does not exist. Daemon not running?\n") + return + + try: + while True: + os.kill(pid, SIGTERM) + time.sleep(0.01) + except OSError as err: + err = str(err) + if err.find("No such process") > 0: + if os.path.exists(pid_path): + os.remove(pid_path) + else: + print(str(err)) + sys.exit(1) + +def restart(pid_path): + stop(pid_path) + start(pid_path) + if __name__ == "__main__": + if sys.platform.startswith("win"): freeze_support() - main() \ No newline at end of file + main_win() + + else: + pid_path = "tra-daemon.pid" + if len(sys.argv) == 2: + if 'start' == sys.argv[1]: + start(pid_path) + elif 'stop' == sys.argv[1]: + stop(pid_path) + elif 'restart' == sys.argv[1]: + restart(pid_path) + else: + print("usage: %s start|stop|restart|profile" % sys.argv[0]) + sys.exit(2) + sys.exit(0) + else: + print("usage: %s start|stop|restart|profile" % sys.argv[0]) + sys.exit(2) \ No newline at end of file From 871b313d95cc0e49a62beffddf344d5c744d4f22 Mon Sep 17 00:00:00 2001 From: Arthur Lu Date: Tue, 17 Aug 2021 14:17:40 -0700 Subject: [PATCH 19/29] fixed linux only import in windows exec Former-commit-id: 61ae377e97a63385ffaaaa13413b207f54524902 --- src/cli/superscript.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/src/cli/superscript.py b/src/cli/superscript.py index 009c579..c16b7a7 100644 --- a/src/cli/superscript.py +++ b/src/cli/superscript.py @@ -376,7 +376,7 @@ async def main_lin(socket, path): sys.exit(loop_exit_code) -def main_win(): # windows main function +def main_win(): warnings.filterwarnings("ignore") sys.stderr = open("errorlog.txt", "w") @@ -582,10 +582,6 @@ def save_config(path, config_vector): except: return 1 -import daemon -from daemon import pidfile -from signal import SIGTERM - def start(pid_path, profile = False): f = open('errorlog.txt', 'w+') with daemon.DaemonContext( @@ -630,6 +626,9 @@ if __name__ == "__main__": main_win() else: + import daemon + from daemon import pidfile + from signal import SIGTERM pid_path = "tra-daemon.pid" if len(sys.argv) == 2: if 'start' == sys.argv[1]: From cafb773d8b25e0ec44ff80eedd3c5704e60501a8 Mon Sep 17 00:00:00 2001 From: Arthur Lu Date: Wed, 18 Aug 2021 00:43:00 +0000 Subject: [PATCH 20/29] replaced profile to verbose option Former-commit-id: 055c296c00aa23bbc43e3c5e264a4e09c435b1cb --- src/cli/superscript.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/src/cli/superscript.py b/src/cli/superscript.py index c16b7a7..de7f637 100644 --- a/src/cli/superscript.py +++ b/src/cli/superscript.py @@ -11,6 +11,7 @@ __changelog__ = """changelog: - superscript now runs in PEP 3143 compliant well behaved daemon on Linux systems - linux superscript daemon has integrated websocket output to monitor progress/status remotely - linux daemon now sends stderr to errorlog.txt + - added verbose option to linux superscript to allow for interactive output 0.9.3: - improved data loading performance by removing redundant PyMongo client creation (120s to 14s) - passed singular instance of PyMongo client as standin for apikey parameter in all data.py functions @@ -637,10 +638,12 @@ if __name__ == "__main__": stop(pid_path) elif 'restart' == sys.argv[1]: restart(pid_path) + elif 'verbose' == sys.argv[1]: + main_win() else: - print("usage: %s start|stop|restart|profile" % sys.argv[0]) + print("usage: %s start|stop|restart|verbose" % sys.argv[0]) sys.exit(2) sys.exit(0) else: - print("usage: %s start|stop|restart|profile" % sys.argv[0]) + print("usage: %s start|stop|restart|verbose" % sys.argv[0]) sys.exit(2) \ No newline at end of file From b6a1dfedb9c05ddf74cb9f906b017d79b6b2ac45 Mon Sep 17 00:00:00 2001 From: Arthur Lu Date: Wed, 18 Aug 2021 03:04:01 +0000 Subject: [PATCH 21/29] not working Former-commit-id: e82177d17be396d02c30f55dc70b1a014e3388dd --- src/cli/superscript.py | 2 +- src/cli/superscript.spec | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/src/cli/superscript.py b/src/cli/superscript.py index de7f637..800ddfc 100644 --- a/src/cli/superscript.py +++ b/src/cli/superscript.py @@ -371,7 +371,7 @@ async def main_lin(socket, path): break except Exception as e: await socket.send("encountered an exception while running") - print(e) + await socket.send(str(e)) loop_exit_code = 1 break diff --git a/src/cli/superscript.spec b/src/cli/superscript.spec index 04cafa7..7664ae0 100644 --- a/src/cli/superscript.spec +++ b/src/cli/superscript.spec @@ -10,6 +10,8 @@ a = Analysis(['superscript.py'], "dnspython", "sklearn.utils._weight_vector", "requests", + "websockets.legacy", + "websockets.legacy.server", ], hookspath=[], runtime_hooks=[], From 0287b5c0e209ea4389713c65d96553e5bcaf6a2b Mon Sep 17 00:00:00 2001 From: Arthur Lu Date: Wed, 18 Aug 2021 03:26:47 +0000 Subject: [PATCH 22/29] temporary workaround for missing ssl cert Former-commit-id: 739a2f36f8bfd0cce2b272c82f9a961554386f60 --- src/cli/data.py | 3 +-- src/cli/superscript.py | 1 + 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/cli/data.py b/src/cli/data.py index 5c35d43..e5bfb5b 100644 --- a/src/cli/data.py +++ b/src/cli/data.py @@ -1,10 +1,9 @@ import requests -import pymongo import pandas as pd def pull_new_tba_matches(apikey, competition, cutoff): api_key= apikey - x=requests.get("https://www.thebluealliance.com/api/v3/event/"+competition+"/matches/simple", headers={"X-TBA-Auth_Key":api_key}) + x=requests.get("https://www.thebluealliance.com/api/v3/event/"+competition+"/matches/simple", headers={"X-TBA-Auth_Key":api_key}, verify=False) out = [] for i in x.json(): if i["actual_time"] != None and i["actual_time"]-cutoff >= 0 and i["comp_level"] == "qm": diff --git a/src/cli/superscript.py b/src/cli/superscript.py index 800ddfc..34d82ee 100644 --- a/src/cli/superscript.py +++ b/src/cli/superscript.py @@ -12,6 +12,7 @@ __changelog__ = """changelog: - linux superscript daemon has integrated websocket output to monitor progress/status remotely - linux daemon now sends stderr to errorlog.txt - added verbose option to linux superscript to allow for interactive output + - moved pymongo import to superscript.py 0.9.3: - improved data loading performance by removing redundant PyMongo client creation (120s to 14s) - passed singular instance of PyMongo client as standin for apikey parameter in all data.py functions From ffead9e240c96b680663593725a4d3eb7c11bd2c Mon Sep 17 00:00:00 2001 From: Arthur Lu Date: Wed, 18 Aug 2021 05:13:25 +0000 Subject: [PATCH 23/29] fixed daemon no start issue Former-commit-id: efc353dafb9026d2d1fab1fa1be000e8997326d2 --- src/cli/superscript.py | 369 ++++++++++++++++++++++------------------- 1 file changed, 198 insertions(+), 171 deletions(-) diff --git a/src/cli/superscript.py b/src/cli/superscript.py index 34d82ee..d06dbea 100644 --- a/src/cli/superscript.py +++ b/src/cli/superscript.py @@ -151,6 +151,7 @@ import sys import asyncio import websockets import pymongo +import threading from interface import splash, log, ERR, INF, stdout, stderr from data import get_previous_time, set_current_time, load_match, push_match, load_metric, push_metric, load_pit, push_pit @@ -204,179 +205,213 @@ sample_json = """{ } }""" -async def main_lin(socket, path): +def main_lin(pid_path): + f = open('errorlog.txt', 'w+') + with daemon.DaemonContext( + working_directory=os.getcwd(), + pidfile=pidfile.TimeoutPIDLockFile(pid_path), + stderr=f + ): - while True: + async def handler(client, path): + clients.append(client) + while True: + try: + pong_waiter = await client.ping() + await pong_waiter + time.sleep(3) + except Exception as e: + clients.remove(client) + break - try: + clients = [] + start_server = websockets.serve(handler, "127.0.0.1", 5678) - loop_start = time.time() + asyncio.get_event_loop().run_until_complete(start_server) + threading.Thread(target = asyncio.get_event_loop().run_forever).start() - current_time = time.time() - await socket.send("current time: " + str(current_time)) + while True: - config = {} - if load_config(config_path, config) == 1: - sys.exit(1) - - error_flag = False - - try: - competition = config["competition"] - except: - await socket.send("could not find competition field in config") - error_flag = True - try: - match_tests = config["statistics"]["match"] - except: - await socket.send("could not find match_tests field in config") - error_flag = True - try: - metrics_tests = config["statistics"]["metric"] - except: - await socket.send("could not find metrics_tests field in config") - error_flag = True - try: - pit_tests = config["statistics"]["pit"] - except: - await socket.send("could not find pit_tests field in config") - error_flag = True + async def send_one(client, data): + await client.send(data) - if error_flag: - sys.exit(1) - error_flag = False + def send(data): + message_clients = clients.copy() + for client in message_clients: + try: + asyncio.run(send_one(client, data)) + except: + pass - if competition == None or competition == "": - await socket.send("competition field in config must not be empty") - error_flag = True - if match_tests == None: - await socket.send("match_tests field in config must not be empty") - error_flag = True - if metrics_tests == None: - await socket.send("metrics_tests field in config must not be empty") - error_flag = True - if pit_tests == None: - await socket.send("pit_tests field in config must not be empty") - error_flag = True - - if error_flag: - sys.exit(1) - - await socket.send("found and loaded competition, match_tests, metrics_tests, pit_tests from config") - - sys_max_threads = os.cpu_count() try: - cfg_max_threads = config["max-threads"] - except: - await socket.send("max-threads field in config must not be empty, refer to documentation for configuration options", code = 109) - sys.exit(1) - if cfg_max_threads > -sys_max_threads and cfg_max_threads < 0 : - alloc_processes = sys_max_threads + cfg_max_threads - elif cfg_max_threads > 0 and cfg_max_threads < 1: - alloc_processes = math.floor(cfg_max_threads * sys_max_threads) - elif cfg_max_threads > 1 and cfg_max_threads <= sys_max_threads: - alloc_processes = cfg_max_threads - elif cfg_max_threads == 0: - alloc_processes = sys_max_threads - else: - await socket.send("max-threads must be between -" + str(sys_max_threads) + " and " + str(sys_max_threads) + ", but got " + cfg_max_threads) - sys.exit(1) + loop_start = time.time() - await socket.send("found and loaded max-threads from config") - await socket.send("attempting to start " + str(alloc_processes) + " threads") - try: - exec_threads = Pool(processes = alloc_processes) + current_time = time.time() + send("current time: " + str(current_time)) + + config = {} + if load_config(config_path, config) == 1: + sys.exit(1) + + error_flag = False + + try: + competition = config["competition"] + except: + send("could not find competition field in config") + error_flag = True + try: + match_tests = config["statistics"]["match"] + except: + send("could not find match_tests field in config") + error_flag = True + try: + metrics_tests = config["statistics"]["metric"] + except: + send("could not find metrics_tests field in config") + error_flag = True + try: + pit_tests = config["statistics"]["pit"] + except: + send("could not find pit_tests field in config") + error_flag = True + + if error_flag: + sys.exit(1) + error_flag = False + + if competition == None or competition == "": + send("competition field in config must not be empty") + error_flag = True + if match_tests == None: + send("match_tests field in config must not be empty") + error_flag = True + if metrics_tests == None: + send("metrics_tests field in config must not be empty") + error_flag = True + if pit_tests == None: + send("pit_tests field in config must not be empty") + error_flag = True + + if error_flag: + sys.exit(1) + + send("found and loaded competition, match_tests, metrics_tests, pit_tests from config") + + sys_max_threads = os.cpu_count() + try: + cfg_max_threads = config["max-threads"] + except: + send("max-threads field in config must not be empty, refer to documentation for configuration options", code = 109) + sys.exit(1) + + if cfg_max_threads > -sys_max_threads and cfg_max_threads < 0 : + alloc_processes = sys_max_threads + cfg_max_threads + elif cfg_max_threads > 0 and cfg_max_threads < 1: + alloc_processes = math.floor(cfg_max_threads * sys_max_threads) + elif cfg_max_threads > 1 and cfg_max_threads <= sys_max_threads: + alloc_processes = cfg_max_threads + elif cfg_max_threads == 0: + alloc_processes = sys_max_threads + else: + send("max-threads must be between -" + str(sys_max_threads) + " and " + str(sys_max_threads) + ", but got " + cfg_max_threads) + sys.exit(1) + + send("found and loaded max-threads from config") + send("attempting to start " + str(alloc_processes) + " threads") + try: + exec_threads = Pool(processes = alloc_processes) + except Exception as e: + send("unable to start threads") + sys.exit(1) + send("successfully initialized " + str(alloc_processes) + " threads") + + exit_flag = False + + try: + apikey = config["key"]["database"] + except: + send("database key field in config must be present") + exit_flag = True + try: + tbakey = config["key"]["tba"] + except: + send("tba key field in config must be present") + exit_flag = True + + if apikey == None or apikey == "": + send("database key field in config must not be empty, please populate the database key") + exit_flag = True + if tbakey == None or tbakey == "": + send("tba key field in config must not be empty, please populate the tba key") + exit_flag = True + + if exit_flag: + sys.exit(1) + + send("found and loaded database and tba keys") + + client = pymongo.MongoClient(apikey) + + previous_time = get_previous_time(client) + send("analysis backtimed to: " + str(previous_time)) + + start = time.time() + send("loading match data") + match_data = load_match(client, competition) + send("finished loading match data in " + str(time.time() - start) + " seconds") + + start = time.time() + send("performing analysis on match data") + results = matchloop(client, competition, match_data, match_tests, exec_threads) + send("finished match analysis in " + str(time.time() - start) + " seconds") + + start = time.time() + send("uploading match results to database") + push_match(client, competition, results) + send("finished uploading match results in " + str(time.time() - start) + " seconds") + + start = time.time() + send("performing analysis on team metrics") + results = metricloop(tbakey, client, competition, current_time, metrics_tests) + send("finished metric analysis and pushed to database in " + str(time.time() - start) + " seconds") + + start = time.time() + send("loading pit data") + pit_data = load_pit(client, competition) + send("finished loading pit data in " + str(time.time() - start) + " seconds") + + start = time.time() + send("performing analysis on pit data") + results = pitloop(client, competition, pit_data, pit_tests) + send("finished pit analysis in " + str(time.time() - start) + " seconds") + + start = time.time() + send("uploading pit results to database") + push_pit(client, competition, results) + send("finished uploading pit results in " + str(time.time() - start) + " seconds") + + set_current_time(client, current_time) + send("finished all tests in " + str(time.time() - loop_start) + " seconds, looping") + + except KeyboardInterrupt: + send("detected KeyboardInterrupt, killing threads") + if "exec_threads" in locals(): + exec_threads.terminate() + exec_threads.join() + exec_threads.close() + send("terminated threads, exiting") + loop_stored_exception = sys.exc_info() + loop_exit_code = 0 + break except Exception as e: - await socket.send("unable to start threads") - sys.exit(1) - await socket.send("successfully initialized " + str(alloc_processes) + " threads") + send("encountered an exception while running") + send(str(e)) + loop_exit_code = 1 + break - exit_flag = False - - try: - apikey = config["key"]["database"] - except: - await socket.send("database key field in config must be present") - exit_flag = True - try: - tbakey = config["key"]["tba"] - except: - await socket.send("tba key field in config must be present") - exit_flag = True - - if apikey == None or apikey == "": - await socket.send("database key field in config must not be empty, please populate the database key") - exit_flag = True - if tbakey == None or tbakey == "": - await socket.send("tba key field in config must not be empty, please populate the tba key") - exit_flag = True - - if exit_flag: - sys.exit(1) - - await socket.send("found and loaded database and tba keys") - - client = pymongo.MongoClient(apikey) - - previous_time = get_previous_time(client) - await socket.send("analysis backtimed to: " + str(previous_time)) - - start = time.time() - await socket.send("loading match data") - match_data = load_match(client, competition) - await socket.send("finished loading match data in " + str(time.time() - start) + " seconds") - - start = time.time() - await socket.send("performing analysis on match data") - results = matchloop(client, competition, match_data, match_tests, exec_threads) - await socket.send("finished match analysis in " + str(time.time() - start) + " seconds") - - start = time.time() - await socket.send("uploading match results to database") - push_match(client, competition, results) - await socket.send("finished uploading match results in " + str(time.time() - start) + " seconds") - - start = time.time() - await socket.send("performing analysis on team metrics") - results = metricloop(tbakey, client, competition, current_time, metrics_tests) - await socket.send("finished metric analysis and pushed to database in " + str(time.time() - start) + " seconds") - - start = time.time() - await socket.send("loading pit data") - pit_data = load_pit(client, competition) - await socket.send("finished loading pit data in " + str(time.time() - start) + " seconds") - - start = time.time() - await socket.send("performing analysis on pit data") - results = pitloop(client, competition, pit_data, pit_tests) - await socket.send("finished pit analysis in " + str(time.time() - start) + " seconds") - - start = time.time() - await socket.send("uploading pit results to database") - push_pit(client, competition, results) - await socket.send("finished uploading pit results in " + str(time.time() - start) + " seconds") - - set_current_time(client, current_time) - await socket.send("finished all tests in " + str(time.time() - loop_start) + " seconds, looping") - - except KeyboardInterrupt: - await socket.send("detected KeyboardInterrupt, killing threads") - if "exec_threads" in locals(): - exec_threads.terminate() - exec_threads.join() - exec_threads.close() - await socket.send("terminated threads, exiting") - loop_stored_exception = sys.exc_info() - loop_exit_code = 0 - break - except Exception as e: - await socket.send("encountered an exception while running") - await socket.send(str(e)) - loop_exit_code = 1 - break - - sys.exit(loop_exit_code) + sys.exit(loop_exit_code) def main_win(): @@ -584,16 +619,8 @@ def save_config(path, config_vector): except: return 1 -def start(pid_path, profile = False): - f = open('errorlog.txt', 'w+') - with daemon.DaemonContext( - working_directory=os.getcwd(), - pidfile=pidfile.TimeoutPIDLockFile(pid_path), - stderr=f - ): - start_server = websockets.serve(main_lin, "127.0.0.1", 5678) - asyncio.get_event_loop().run_until_complete(start_server) - asyncio.get_event_loop().run_forever() +def start(pid_path): + main_lin(pid_path) def stop(pid_path): try: From 20f2040a1a0a5053b8faada17d9c87d0f590c687 Mon Sep 17 00:00:00 2001 From: Arthur Lu Date: Wed, 18 Aug 2021 05:58:21 +0000 Subject: [PATCH 24/29] changed websocket bind address to 0.0.0.0 Former-commit-id: e9258c831d535e44096a0a7e36ea869de7030f81 --- src/cli/superscript.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/cli/superscript.py b/src/cli/superscript.py index d06dbea..244b625 100644 --- a/src/cli/superscript.py +++ b/src/cli/superscript.py @@ -225,7 +225,7 @@ def main_lin(pid_path): break clients = [] - start_server = websockets.serve(handler, "127.0.0.1", 5678) + start_server = websockets.serve(handler, "0.0.0.0", 5678) asyncio.get_event_loop().run_until_complete(start_server) threading.Thread(target = asyncio.get_event_loop().run_forever).start() From 76f78047b311402f9437d3f5350d9e26e2d23456 Mon Sep 17 00:00:00 2001 From: Arthur Lu Date: Wed, 18 Aug 2021 22:35:42 +0000 Subject: [PATCH 25/29] unified main_lin, main_win functions Former-commit-id: 342dae302291871603328224ae0a23ebeada47ce --- src/cli/superscript.py | 361 ++++++++++++----------------------------- 1 file changed, 101 insertions(+), 260 deletions(-) diff --git a/src/cli/superscript.py b/src/cli/superscript.py index 244b625..b375d2c 100644 --- a/src/cli/superscript.py +++ b/src/cli/superscript.py @@ -205,223 +205,17 @@ sample_json = """{ } }""" -def main_lin(pid_path): - f = open('errorlog.txt', 'w+') - with daemon.DaemonContext( - working_directory=os.getcwd(), - pidfile=pidfile.TimeoutPIDLockFile(pid_path), - stderr=f - ): +def main(send, verbose = False): - async def handler(client, path): - clients.append(client) - while True: - try: - pong_waiter = await client.ping() - await pong_waiter - time.sleep(3) - except Exception as e: - clients.remove(client) - break + if verbose : - clients = [] - start_server = websockets.serve(handler, "0.0.0.0", 5678) + warnings.filterwarnings("ignore") + sys.stderr = open("errorlog.txt", "w") - asyncio.get_event_loop().run_until_complete(start_server) - threading.Thread(target = asyncio.get_event_loop().run_forever).start() + splash(__version__) - while True: - - async def send_one(client, data): - await client.send(data) - - def send(data): - message_clients = clients.copy() - for client in message_clients: - try: - asyncio.run(send_one(client, data)) - except: - pass - - try: - - loop_start = time.time() - - current_time = time.time() - send("current time: " + str(current_time)) - - config = {} - if load_config(config_path, config) == 1: - sys.exit(1) - - error_flag = False - - try: - competition = config["competition"] - except: - send("could not find competition field in config") - error_flag = True - try: - match_tests = config["statistics"]["match"] - except: - send("could not find match_tests field in config") - error_flag = True - try: - metrics_tests = config["statistics"]["metric"] - except: - send("could not find metrics_tests field in config") - error_flag = True - try: - pit_tests = config["statistics"]["pit"] - except: - send("could not find pit_tests field in config") - error_flag = True - - if error_flag: - sys.exit(1) - error_flag = False - - if competition == None or competition == "": - send("competition field in config must not be empty") - error_flag = True - if match_tests == None: - send("match_tests field in config must not be empty") - error_flag = True - if metrics_tests == None: - send("metrics_tests field in config must not be empty") - error_flag = True - if pit_tests == None: - send("pit_tests field in config must not be empty") - error_flag = True - - if error_flag: - sys.exit(1) - - send("found and loaded competition, match_tests, metrics_tests, pit_tests from config") - - sys_max_threads = os.cpu_count() - try: - cfg_max_threads = config["max-threads"] - except: - send("max-threads field in config must not be empty, refer to documentation for configuration options", code = 109) - sys.exit(1) - - if cfg_max_threads > -sys_max_threads and cfg_max_threads < 0 : - alloc_processes = sys_max_threads + cfg_max_threads - elif cfg_max_threads > 0 and cfg_max_threads < 1: - alloc_processes = math.floor(cfg_max_threads * sys_max_threads) - elif cfg_max_threads > 1 and cfg_max_threads <= sys_max_threads: - alloc_processes = cfg_max_threads - elif cfg_max_threads == 0: - alloc_processes = sys_max_threads - else: - send("max-threads must be between -" + str(sys_max_threads) + " and " + str(sys_max_threads) + ", but got " + cfg_max_threads) - sys.exit(1) - - send("found and loaded max-threads from config") - send("attempting to start " + str(alloc_processes) + " threads") - try: - exec_threads = Pool(processes = alloc_processes) - except Exception as e: - send("unable to start threads") - sys.exit(1) - send("successfully initialized " + str(alloc_processes) + " threads") - - exit_flag = False - - try: - apikey = config["key"]["database"] - except: - send("database key field in config must be present") - exit_flag = True - try: - tbakey = config["key"]["tba"] - except: - send("tba key field in config must be present") - exit_flag = True - - if apikey == None or apikey == "": - send("database key field in config must not be empty, please populate the database key") - exit_flag = True - if tbakey == None or tbakey == "": - send("tba key field in config must not be empty, please populate the tba key") - exit_flag = True - - if exit_flag: - sys.exit(1) - - send("found and loaded database and tba keys") - - client = pymongo.MongoClient(apikey) - - previous_time = get_previous_time(client) - send("analysis backtimed to: " + str(previous_time)) - - start = time.time() - send("loading match data") - match_data = load_match(client, competition) - send("finished loading match data in " + str(time.time() - start) + " seconds") - - start = time.time() - send("performing analysis on match data") - results = matchloop(client, competition, match_data, match_tests, exec_threads) - send("finished match analysis in " + str(time.time() - start) + " seconds") - - start = time.time() - send("uploading match results to database") - push_match(client, competition, results) - send("finished uploading match results in " + str(time.time() - start) + " seconds") - - start = time.time() - send("performing analysis on team metrics") - results = metricloop(tbakey, client, competition, current_time, metrics_tests) - send("finished metric analysis and pushed to database in " + str(time.time() - start) + " seconds") - - start = time.time() - send("loading pit data") - pit_data = load_pit(client, competition) - send("finished loading pit data in " + str(time.time() - start) + " seconds") - - start = time.time() - send("performing analysis on pit data") - results = pitloop(client, competition, pit_data, pit_tests) - send("finished pit analysis in " + str(time.time() - start) + " seconds") - - start = time.time() - send("uploading pit results to database") - push_pit(client, competition, results) - send("finished uploading pit results in " + str(time.time() - start) + " seconds") - - set_current_time(client, current_time) - send("finished all tests in " + str(time.time() - loop_start) + " seconds, looping") - - except KeyboardInterrupt: - send("detected KeyboardInterrupt, killing threads") - if "exec_threads" in locals(): - exec_threads.terminate() - exec_threads.join() - exec_threads.close() - send("terminated threads, exiting") - loop_stored_exception = sys.exc_info() - loop_exit_code = 0 - break - except Exception as e: - send("encountered an exception while running") - send(str(e)) - loop_exit_code = 1 - break - - sys.exit(loop_exit_code) - -def main_win(): - - warnings.filterwarnings("ignore") - sys.stderr = open("errorlog.txt", "w") - - splash(__version__) - - loop_exit_code = 0 - loop_stored_exception = None + loop_exit_code = 0 + loop_stored_exception = None while True: @@ -430,33 +224,38 @@ def main_win(): loop_start = time.time() current_time = time.time() - log(stdout, INF, "current time: " + str(current_time)) + send(stdout, INF, "current time: " + str(current_time)) + + send(stdout, INF, "loading config at <" + config_path + ">", code = 0) config = {} if load_config(config_path, config) == 1: + send(stderr, ERR, "could not find config at <" + config_path + ">, generating blank config and exiting", code = 100) sys.exit(1) + send(stdout, INF, "found and opened config at <" + config_path + ">", code = 0) + error_flag = False try: competition = config["competition"] except: - log(stderr, ERR, "could not find competition field in config", code = 101) + send(stderr, ERR, "could not find competition field in config", code = 101) error_flag = True try: match_tests = config["statistics"]["match"] except: - log(stderr, ERR, "could not find match_tests field in config", code = 102) + send(stderr, ERR, "could not find match_tests field in config", code = 102) error_flag = True try: metrics_tests = config["statistics"]["metric"] except: - log(stderr, ERR, "could not find metrics_tests field in config", code = 103) + send(stderr, ERR, "could not find metrics_tests field in config", code = 103) error_flag = True try: pit_tests = config["statistics"]["pit"] except: - log(stderr, ERR, "could not find pit_tests field in config", code = 104) + send(stderr, ERR, "could not find pit_tests field in config", code = 104) error_flag = True if error_flag: @@ -464,28 +263,28 @@ def main_win(): error_flag = False if competition == None or competition == "": - log(stderr, ERR, "competition field in config must not be empty", code = 105) + send(stderr, ERR, "competition field in config must not be empty", code = 105) error_flag = True if match_tests == None: - log(stderr, ERR, "match_tests field in config must not be empty", code = 106) + send(stderr, ERR, "match_tests field in config must not be empty", code = 106) error_flag = True if metrics_tests == None: - log(stderr, ERR, "metrics_tests field in config must not be empty", code = 107) + send(stderr, ERR, "metrics_tests field in config must not be empty", code = 107) error_flag = True if pit_tests == None: - log(stderr, ERR, "pit_tests field in config must not be empty", code = 108) + send(stderr, ERR, "pit_tests field in config must not be empty", code = 108) error_flag = True if error_flag: sys.exit(1) - log(stdout, INF, "found and loaded competition, match_tests, metrics_tests, pit_tests from config") + send(stdout, INF, "found and loaded competition, match_tests, metrics_tests, pit_tests from config") sys_max_threads = os.cpu_count() try: cfg_max_threads = config["max-threads"] except: - log(stderr, ERR, "max-threads field in config must not be empty, refer to documentation for configuration options", code = 109) + send(stderr, ERR, "max-threads field in config must not be empty, refer to documentation for configuration options", code = 109) sys.exit(1) if cfg_max_threads > -sys_max_threads and cfg_max_threads < 0 : @@ -497,99 +296,101 @@ def main_win(): elif cfg_max_threads == 0: alloc_processes = sys_max_threads else: - log(stderr, ERR, "max-threads must be between -" + str(sys_max_threads) + " and " + str(sys_max_threads) + ", but got " + cfg_max_threads, code = 110) + send(stderr, ERR, "max-threads must be between -" + str(sys_max_threads) + " and " + str(sys_max_threads) + ", but got " + cfg_max_threads, code = 110) sys.exit(1) - log(stdout, INF, "found and loaded max-threads from config") - log(stdout, INF, "attempting to start " + str(alloc_processes) + " threads") + send(stdout, INF, "found and loaded max-threads from config") + send(stdout, INF, "attempting to start " + str(alloc_processes) + " threads") try: exec_threads = Pool(processes = alloc_processes) except Exception as e: - log(stderr, ERR, "unable to start threads", code = 200) - log(stderr, INF, e) + send(stderr, ERR, "unable to start threads", code = 200) + send(stderr, INF, e) sys.exit(1) - log(stdout, INF, "successfully initialized " + str(alloc_processes) + " threads") + send(stdout, INF, "successfully initialized " + str(alloc_processes) + " threads") exit_flag = False try: apikey = config["key"]["database"] except: - log(stderr, ERR, "database key field in config must be present", code = 111) + send(stderr, ERR, "database key field in config must be present", code = 111) exit_flag = True try: tbakey = config["key"]["tba"] except: - log(stderr, ERR, "tba key field in config must be present", code = 112) + send(stderr, ERR, "tba key field in config must be present", code = 112) exit_flag = True if apikey == None or apikey == "": - log(stderr, ERR, "database key field in config must not be empty, please populate the database key") + send(stderr, ERR, "database key field in config must not be empty, please populate the database key") exit_flag = True if tbakey == None or tbakey == "": - log(stderr, ERR, "tba key field in config must not be empty, please populate the tba key") + send(stderr, ERR, "tba key field in config must not be empty, please populate the tba key") exit_flag = True if exit_flag: sys.exit(1) - log(stdout, INF, "found and loaded database and tba keys") + send(stdout, INF, "found and loaded database and tba keys") client = pymongo.MongoClient(apikey) previous_time = get_previous_time(client) - log(stdout, INF, "analysis backtimed to: " + str(previous_time)) + send(stdout, INF, "analysis backtimed to: " + str(previous_time)) start = time.time() - log(stdout, INF, "loading match data") + send(stdout, INF, "loading match data") match_data = load_match(client, competition) - log(stdout, INF, "finished loading match data in " + str(time.time() - start) + " seconds") + send(stdout, INF, "finished loading match data in " + str(time.time() - start) + " seconds") start = time.time() - log(stdout, INF, "performing analysis on match data") + send(stdout, INF, "performing analysis on match data") results = matchloop(client, competition, match_data, match_tests, exec_threads) - log(stdout, INF, "finished match analysis in " + str(time.time() - start) + " seconds") + send(stdout, INF, "finished match analysis in " + str(time.time() - start) + " seconds") start = time.time() - log(stdout, INF, "uploading match results to database") + send(stdout, INF, "uploading match results to database") push_match(client, competition, results) - log(stdout, INF, "finished uploading match results in " + str(time.time() - start) + " seconds") + send(stdout, INF, "finished uploading match results in " + str(time.time() - start) + " seconds") start = time.time() - log(stdout, INF, "performing analysis on team metrics") + send(stdout, INF, "performing analysis on team metrics") results = metricloop(tbakey, client, competition, current_time, metrics_tests) - log(stdout, INF, "finished metric analysis and pushed to database in " + str(time.time() - start) + " seconds") + send(stdout, INF, "finished metric analysis and pushed to database in " + str(time.time() - start) + " seconds") start = time.time() - log(stdout, INF, "loading pit data") + send(stdout, INF, "loading pit data") pit_data = load_pit(client, competition) - log(stdout, INF, "finished loading pit data in " + str(time.time() - start) + " seconds") + send(stdout, INF, "finished loading pit data in " + str(time.time() - start) + " seconds") start = time.time() - log(stdout, INF, "performing analysis on pit data") + send(stdout, INF, "performing analysis on pit data") results = pitloop(client, competition, pit_data, pit_tests) - log(stdout, INF, "finished pit analysis in " + str(time.time() - start) + " seconds") + send(stdout, INF, "finished pit analysis in " + str(time.time() - start) + " seconds") start = time.time() - log(stdout, INF, "uploading pit results to database") + send(stdout, INF, "uploading pit results to database") push_pit(client, competition, results) - log(stdout, INF, "finished uploading pit results in " + str(time.time() - start) + " seconds") + send(stdout, INF, "finished uploading pit results in " + str(time.time() - start) + " seconds") + + client.close() set_current_time(client, current_time) - log(stdout, INF, "finished all tests in " + str(time.time() - loop_start) + " seconds, looping") + send(stdout, INF, "finished all tests in " + str(time.time() - loop_start) + " seconds, looping") except KeyboardInterrupt: - log(stdout, INF, "detected KeyboardInterrupt, killing threads") + send(stdout, INF, "detected KeyboardInterrupt, killing threads") if "exec_threads" in locals(): exec_threads.terminate() exec_threads.join() exec_threads.close() - log(stdout, INF, "terminated threads, exiting") + send(stdout, INF, "terminated threads, exiting") loop_stored_exception = sys.exc_info() loop_exit_code = 0 break except Exception as e: - log(stderr, ERR, "encountered an exception while running") + send(stderr, ERR, "encountered an exception while running") print(e, file = stderr) loop_exit_code = 1 break @@ -601,10 +402,8 @@ def load_config(path, config_vector): f = open(path, "r") config_vector.update(json.load(f)) f.close() - #socket.send("found and opened config at <" + path + ">") return 0 except: - #log(stderr, ERR, "could not find config at <" + path + ">, generating blank config and exiting", code = 100) f = open(path, "w") f.write(sample_json) f.close() @@ -619,8 +418,50 @@ def save_config(path, config_vector): except: return 1 -def start(pid_path): - main_lin(pid_path) +def start(pid_path, verbose = False): + + if not verbose: + + f = open('errorlog.txt', 'w+') + with daemon.DaemonContext( + working_directory=os.getcwd(), + pidfile=pidfile.TimeoutPIDLockFile(pid_path), + stderr=f + ): + + async def handler(client, path): + clients.append(client) + while True: + try: + pong_waiter = await client.ping() + await pong_waiter + time.sleep(3) + except Exception as e: + clients.remove(client) + break + + async def send_one(client, data): + await client.send(data) + + def send(target, level, message, code = 0): + message_clients = clients.copy() + for client in message_clients: + try: + asyncio.run(send_one(client, message)) + except: + pass + + clients = [] + start_server = websockets.serve(handler, "0.0.0.0", 5678) + + asyncio.get_event_loop().run_until_complete(start_server) + threading.Thread(target = asyncio.get_event_loop().run_forever).start() + + main(send) + + else: + + main(log, verbose=verbose) def stop(pid_path): try: @@ -652,7 +493,7 @@ if __name__ == "__main__": if sys.platform.startswith("win"): freeze_support() - main_win() + start(None, verbose = True) else: import daemon @@ -667,7 +508,7 @@ if __name__ == "__main__": elif 'restart' == sys.argv[1]: restart(pid_path) elif 'verbose' == sys.argv[1]: - main_win() + start(None, verbose = True) else: print("usage: %s start|stop|restart|verbose" % sys.argv[0]) sys.exit(2) From 11d3db4b44b873e28b7de5862c9c175c342b7868 Mon Sep 17 00:00:00 2001 From: Arthur Lu Date: Thu, 19 Aug 2021 00:34:42 +0000 Subject: [PATCH 26/29] added profile option to superscript Former-commit-id: ea07d7c7094cd1469712e0d4e916ecd25dad5c23 --- .gitignore | 2 +- src/cli/superscript.py | 42 +++++++++++++++++++++++++++++++----------- 2 files changed, 32 insertions(+), 12 deletions(-) diff --git a/.gitignore b/.gitignore index 97705c5..1573f33 100644 --- a/.gitignore +++ b/.gitignore @@ -11,7 +11,7 @@ **/*.pid -**/profile +**/profile.* **/errorlog.txt /dist/superscript.* diff --git a/src/cli/superscript.py b/src/cli/superscript.py index b375d2c..d3e77b8 100644 --- a/src/cli/superscript.py +++ b/src/cli/superscript.py @@ -13,6 +13,7 @@ __changelog__ = """changelog: - linux daemon now sends stderr to errorlog.txt - added verbose option to linux superscript to allow for interactive output - moved pymongo import to superscript.py + - added profile option to linux superscript to profile runtime of script 0.9.3: - improved data loading performance by removing redundant PyMongo client creation (120s to 14s) - passed singular instance of PyMongo client as standin for apikey parameter in all data.py functions @@ -205,9 +206,9 @@ sample_json = """{ } }""" -def main(send, verbose = False): +def main(send, verbose = False, profile = False): - if verbose : + if verbose or profile: warnings.filterwarnings("ignore") sys.stderr = open("errorlog.txt", "w") @@ -395,6 +396,9 @@ def main(send, verbose = False): loop_exit_code = 1 break + if profile: + return + sys.exit(loop_exit_code) def load_config(path, config_vector): @@ -418,9 +422,27 @@ def save_config(path, config_vector): except: return 1 -def start(pid_path, verbose = False): +def start(pid_path, verbose = False, profile = False): - if not verbose: + if profile: + + def send(target, level, message, code = 0): + pass + + import cProfile, pstats, io + profile = cProfile.Profile() + profile.enable() + main(send, profile = True) + profile.disable() + f = open("profile.txt", 'w+') + ps = pstats.Stats(profile, stream = f).sort_stats('cumtime') + ps.print_stats() + + elif verbose: + + main(log, verbose = verbose) + + else: f = open('errorlog.txt', 'w+') with daemon.DaemonContext( @@ -457,11 +479,7 @@ def start(pid_path, verbose = False): asyncio.get_event_loop().run_until_complete(start_server) threading.Thread(target = asyncio.get_event_loop().run_forever).start() - main(send) - - else: - - main(log, verbose=verbose) + main(send) def stop(pid_path): try: @@ -509,10 +527,12 @@ if __name__ == "__main__": restart(pid_path) elif 'verbose' == sys.argv[1]: start(None, verbose = True) + elif 'profile' == sys.argv[1]: + start(None, profile=True) else: - print("usage: %s start|stop|restart|verbose" % sys.argv[0]) + print("usage: %s start|stop|restart|verbose|profile" % sys.argv[0]) sys.exit(2) sys.exit(0) else: - print("usage: %s start|stop|restart|verbose" % sys.argv[0]) + print("usage: %s start|stop|restart|verbose|profile" % sys.argv[0]) sys.exit(2) \ No newline at end of file From 052788afb9a0592b0ce0d1599c9f476fe4056e43 Mon Sep 17 00:00:00 2001 From: Arthur Lu Date: Thu, 19 Aug 2021 20:58:35 +0000 Subject: [PATCH 27/29] fixed verbose/profile output options Former-commit-id: ca399cf3500e066d5f3d448506dca7c5cebb27df --- src/cli/superscript.py | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/src/cli/superscript.py b/src/cli/superscript.py index d3e77b8..58a5b07 100644 --- a/src/cli/superscript.py +++ b/src/cli/superscript.py @@ -208,16 +208,14 @@ sample_json = """{ def main(send, verbose = False, profile = False): - if verbose or profile: - - warnings.filterwarnings("ignore") - sys.stderr = open("errorlog.txt", "w") + warnings.filterwarnings("ignore") + sys.stderr = open("errorlog.txt", "w") + loop_exit_code = 0 + loop_stored_exception = None + if verbose: splash(__version__) - loop_exit_code = 0 - loop_stored_exception = None - while True: try: From 097fd2836b62552c5a6787bf597f794e22d67dd0 Mon Sep 17 00:00:00 2001 From: Arthur Lu Date: Sun, 22 Aug 2021 05:35:04 +0000 Subject: [PATCH 28/29] reorganized imports in superscript Former-commit-id: 18c26a00b622ad8f946ce589f556b085e678a413 --- src/cli/superscript.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/src/cli/superscript.py b/src/cli/superscript.py index 58a5b07..91d11b5 100644 --- a/src/cli/superscript.py +++ b/src/cli/superscript.py @@ -142,20 +142,20 @@ __all__ = [ # imports: +import asyncio import json -import os import math from multiprocessing import Pool, freeze_support +import os +import pymongo +import sys +import threading import time import warnings -import sys -import asyncio import websockets -import pymongo -import threading from interface import splash, log, ERR, INF, stdout, stderr -from data import get_previous_time, set_current_time, load_match, push_match, load_metric, push_metric, load_pit, push_pit +from data import get_previous_time, set_current_time, load_match, push_match, load_pit, push_pit from processing import matchloop, metricloop, pitloop config_path = "config.json" From 9be9008ae15942bba9da30397058cad5c8fbc912 Mon Sep 17 00:00:00 2001 From: Arthur Lu Date: Thu, 26 Aug 2021 22:26:29 +0000 Subject: [PATCH 29/29] fixed and properly named build action Former-commit-id: 694733700a0d6933399e1040665ba4aa653f67b8 --- .github/workflows/build-cli.yml | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/.github/workflows/build-cli.yml b/.github/workflows/build-cli.yml index 681ab31..04e4e66 100644 --- a/.github/workflows/build-cli.yml +++ b/.github/workflows/build-cli.yml @@ -1,11 +1,11 @@ # This workflow will install Python dependencies, run tests and lint with a variety of Python versions # For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions -name: Superscript Unit Tests +name: Build Superscript Linux on: release: - types: [published, edited] + types: [published, created, edited] jobs: generate: @@ -14,4 +14,6 @@ jobs: steps: - name: Checkout master - uses: actions/checkout@master + uses: actions/checkout@master + - name: Echo test + run: echo "test" \ No newline at end of file