2019-02-27 05:18:26 +00:00
#Titan Robotics Team 2022: Super Script
2019-02-05 15:50:10 +00:00
#Written by Arthur Lu & Jacob Levine
#Notes:
#setup:
2019-03-20 21:16:48 +00:00
__version__ = " 1.0.6.000 "
2019-02-05 15:50:10 +00:00
__changelog__ = """ changelog:
2019-03-20 21:16:48 +00:00
1.0 .6 .000 :
- added pulldata function
- service now pulls in , computes data , and outputs data as planned
2019-03-08 23:43:38 +00:00
1.0 .5 .003 :
- hotfix : actually pushes data correctly now
2019-02-27 20:01:25 +00:00
1.0 .5 .002 :
- more information given
- performance improvements
2019-02-27 20:00:29 +00:00
1.0 .5 .001 :
- grammar
2019-02-27 19:38:24 +00:00
1.0 .5 .000 :
- service now iterates forever
- ready for production other than pulling json data
2019-02-27 05:18:26 +00:00
1.0 .4 .001 :
- grammar fixes
2019-02-27 01:39:56 +00:00
1.0 .4 .000 :
- actually pushes to firebase
2019-02-21 04:59:17 +00:00
1.0 .3 .001 :
2019-02-27 01:39:56 +00:00
- processes data more efficiently
2019-02-20 17:44:11 +00:00
1.0 .3 .000 :
2019-02-27 01:39:56 +00:00
- actually processes data
2019-02-19 15:44:51 +00:00
1.0 .2 .000 :
2019-02-27 01:39:56 +00:00
- added data reading from folder
- nearly crashed computer reading from 20 GiB of data
2019-02-05 15:50:10 +00:00
1.0 .1 .000 :
2019-02-27 01:39:56 +00:00
- added data reading from file
- added superstructure to code
2019-02-05 15:50:10 +00:00
1.0 .0 .000 :
2019-02-27 01:39:56 +00:00
- added import statements ( revolutionary )
2019-02-05 15:50:10 +00:00
"""
__author__ = (
" Arthur Lu <arthurlu@ttic.edu>, "
" Jacob Levine <jlevine@ttic.edu>, "
)
2019-03-10 16:42:43 +00:00
2019-02-21 04:59:17 +00:00
import firebase_admin
from firebase_admin import credentials
from firebase_admin import firestore
2019-02-05 15:42:00 +00:00
import analysis
2019-03-20 19:10:47 +00:00
#import titanlearn
2019-02-05 15:50:10 +00:00
import visualization
2019-02-19 15:44:51 +00:00
import os
2019-02-27 19:38:24 +00:00
import sys
import warnings
2019-02-19 15:44:51 +00:00
import glob
2019-02-21 04:59:17 +00:00
import numpy as np
2019-02-27 19:38:24 +00:00
import time
2019-03-20 21:16:48 +00:00
import tbarequest as tba
import csv
2019-02-21 04:59:17 +00:00
2019-02-27 05:18:26 +00:00
def titanservice ( ) :
2019-02-27 20:00:29 +00:00
print ( " [OK] loading data " )
2019-02-21 04:59:17 +00:00
2019-02-27 20:00:29 +00:00
start = time . time ( )
2019-02-27 05:18:26 +00:00
source_dir = ' data '
file_list = glob . glob ( source_dir + ' /*.csv ' ) #supposedly sorts by alphabetical order, skips reading teams.csv because of redundancy
data = [ ]
files = [ fn for fn in glob . glob ( ' data/*.csv ' )
2019-03-21 20:59:47 +00:00
if not ( os . path . basename ( fn ) . startswith ( ' teams ' ) or os . path . basename ( fn ) . startswith ( ' match ' ) or os . path . basename ( fn ) . startswith ( ' notes ' ) or os . path . basename ( fn ) . startswith ( ' observationType ' ) or os . path . basename ( fn ) . startswith ( ' teamDBRef ' ) ) ] #scores will be handled sperately
2019-02-20 17:44:11 +00:00
2019-02-27 05:18:26 +00:00
for i in files :
data . append ( analysis . load_csv ( i ) )
2019-02-19 15:44:51 +00:00
2019-02-27 05:18:26 +00:00
stats = [ ]
measure_stats = [ ]
teams = analysis . load_csv ( " data/teams.csv " )
2019-03-04 22:38:40 +00:00
scores = analysis . load_csv ( " data/scores.csv " )
2019-02-20 17:44:11 +00:00
2019-02-27 20:00:29 +00:00
end = time . time ( )
2019-03-08 23:43:38 +00:00
print ( " [OK] loaded data in " + str ( end - start ) + " seconds " )
2019-02-27 20:00:29 +00:00
2019-02-27 05:18:26 +00:00
#assumes that team number is in the first column, and that the order of teams is the same across all files
#unhelpful comment
for measure in data : #unpacks 3d array into 2ds
2019-02-21 04:59:17 +00:00
2019-03-07 15:14:20 +00:00
measure_stats = [ ]
2019-02-21 04:59:17 +00:00
2019-03-07 15:14:20 +00:00
for i in range ( len ( measure ) ) : #unpacks into specific teams
2019-02-21 04:59:17 +00:00
2019-03-21 20:59:47 +00:00
#ofbest_curve = [None]
#r2best_curve = [None]
2019-02-21 04:59:17 +00:00
2019-02-27 05:18:26 +00:00
line = measure [ i ]
2019-02-21 04:59:17 +00:00
2019-02-27 05:18:26 +00:00
#print(line)
2019-02-21 04:59:17 +00:00
2019-03-21 20:59:47 +00:00
#x = list(range(len(line)))
#eqs, rmss, r2s, overfit = analysis.optimize_regression(x, line, 10, 1)
2019-02-21 04:59:17 +00:00
2019-03-21 20:59:47 +00:00
#beqs, brmss, br2s, boverfit = analysis.select_best_regression(eqs, rmss, r2s, overfit, "min_overfit")
2019-02-21 04:59:17 +00:00
2019-02-27 05:18:26 +00:00
#print(eqs, rmss, r2s, overfit)
2019-03-21 20:59:47 +00:00
#ofbest_curve.append(beqs)
#ofbest_curve.append(brmss)
#ofbest_curve.append(br2s)
#ofbest_curve.append(boverfit)
#ofbest_curve.pop(0)
2019-02-21 04:59:17 +00:00
2019-02-27 05:18:26 +00:00
#print(ofbest_curve)
2019-02-21 04:59:17 +00:00
2019-03-21 20:59:47 +00:00
#beqs, brmss, br2s, boverfit = analysis.select_best_regression(eqs, rmss, r2s, overfit, "max_r2s")
2019-02-21 04:59:17 +00:00
2019-03-21 20:59:47 +00:00
#r2best_curve.append(beqs)
#r2best_curve.append(brmss)
#r2best_curve.append(br2s)
#r2best_curve.append(boverfit)
#r2best_curve.pop(0)
2019-02-21 04:59:17 +00:00
2019-02-27 05:18:26 +00:00
#print(r2best_curve)
2019-03-04 22:38:40 +00:00
2019-02-27 05:18:26 +00:00
2019-03-21 20:59:47 +00:00
measure_stats . append ( teams [ i ] + list ( analysis . basic_stats ( line , 0 , 0 ) ) + list ( analysis . histo_analysis ( line , 1 , - 3 , 3 ) ) )
2019-02-20 17:44:11 +00:00
2019-03-08 23:43:38 +00:00
stats . append ( list ( measure_stats ) )
nishant = [ ]
2019-03-05 19:36:47 +00:00
2019-03-08 23:43:38 +00:00
for i in range ( len ( scores ) ) :
2019-03-04 22:38:40 +00:00
ofbest_curve = [ None ]
r2best_curve = [ None ]
line = measure [ i ]
#print(line)
x = list ( range ( len ( line ) ) )
eqs , rmss , r2s , overfit = analysis . optimize_regression ( x , line , 10 , 1 )
beqs , brmss , br2s , boverfit = analysis . select_best_regression ( eqs , rmss , r2s , overfit , " min_overfit " )
#print(eqs, rmss, r2s, overfit)
ofbest_curve . append ( beqs )
ofbest_curve . append ( brmss )
ofbest_curve . append ( br2s )
ofbest_curve . append ( boverfit )
ofbest_curve . pop ( 0 )
#print(ofbest_curve)
beqs , brmss , br2s , boverfit = analysis . select_best_regression ( eqs , rmss , r2s , overfit , " max_r2s " )
r2best_curve . append ( beqs )
r2best_curve . append ( brmss )
r2best_curve . append ( br2s )
r2best_curve . append ( boverfit )
r2best_curve . pop ( 0 )
#print(r2best_curve)
z = len ( scores [ 0 ] ) + 1
nis_num = [ ]
nis_num . append ( eval ( str ( ofbest_curve [ 0 ] ) ) )
nis_num . append ( eval ( str ( r2best_curve [ 0 ] ) ) )
nis_num . append ( ( eval ( ofbest_curve [ 0 ] ) + eval ( r2best_curve [ 0 ] ) ) / 2 )
nishant . append ( teams [ i ] + nis_num )
2019-02-27 05:18:26 +00:00
json_out = { }
2019-03-04 22:38:40 +00:00
score_out = { }
#print(stats)
2019-03-08 23:43:38 +00:00
#print(stats[0])
2019-02-27 05:18:26 +00:00
2019-03-08 23:43:38 +00:00
#for i in range(len(teams)):
# json_out[str(teams[i][0])] = (stats[0][i])
2019-03-04 22:38:40 +00:00
for i in range ( len ( teams ) ) :
score_out [ str ( teams [ i ] [ 0 ] ) ] = ( nishant [ i ] )
#print(json_out.get('5'))
location = db . collection ( u ' stats ' ) . document ( u ' stats-noNN ' )
for i in range ( len ( teams ) ) :
general_general_stats = location . collection ( teams [ i ] [ 0 ] )
2019-03-08 23:43:38 +00:00
2019-03-04 22:38:40 +00:00
for j in range ( len ( files ) ) :
2019-03-08 23:43:38 +00:00
json_out [ str ( teams [ i ] [ 0 ] ) ] = ( stats [ j ] [ i ] )
2019-03-20 22:23:41 +00:00
name = os . path . basename ( files [ j ] )
general_general_stats . document ( name ) . set ( { ' stats ' : json_out . get ( teams [ i ] [ 0 ] ) } )
2019-02-27 01:39:56 +00:00
2019-03-04 22:38:40 +00:00
for i in range ( len ( teams ) ) :
nnum = location . collection ( teams [ i ] [ 0 ] ) . document ( u ' nishant_number ' ) . set ( { ' nishant ' : score_out . get ( teams [ i ] [ 0 ] ) } )
2019-02-20 17:44:11 +00:00
2019-03-04 22:38:40 +00:00
#general_general_stats.collection().document('stats').set()
#db.collection(u'stats').document(u'stats-noNN').set(score_out)
2019-02-27 19:38:24 +00:00
2019-02-28 15:04:37 +00:00
def pulldata ( ) :
2019-03-21 20:06:54 +00:00
2019-03-20 21:16:48 +00:00
teams = analysis . load_csv ( ' data/teams.csv ' )
scores = [ ]
for i in range ( len ( teams ) ) :
team_scores = [ ]
request_data_object = tba . req_team_matches ( teams [ i ] [ 0 ] , 2019 , " UDvKmPjPRfwwUdDX1JxbmkyecYBJhCtXeyVk9vmO2i7K0Zn4wqQPMfzuEINXJ7e5 " )
json_data = request_data_object . json ( )
2019-03-21 20:59:47 +00:00
#print(json_data)
for i in range ( len ( json_data ) - 1 , - 1 , - 1 ) :
if json_data [ i ] . get ( ' winning_alliance ' ) == None or json_data [ i ] . get ( ' winning_alliance ' ) == [ ] or json_data [ i ] . get ( ' winning_alliance ' ) == " " :
print ( json_data [ i ] )
json_data . remove ( json_data [ i ] )
#print(json_data)
2019-03-20 21:16:48 +00:00
json_data = sorted ( json_data , key = lambda k : k . get ( ' actual_time ' , 0 ) , reverse = False )
for j in range ( len ( json_data ) ) :
if " frc " + teams [ i ] [ 0 ] in json_data [ j ] . get ( ' alliances ' ) . get ( ' blue ' ) . get ( ' team_keys ' ) :
team_scores . append ( json_data [ j ] . get ( ' alliances ' ) . get ( ' blue ' ) . get ( ' score ' ) )
elif " frc " + teams [ i ] [ 0 ] in json_data [ j ] . get ( ' alliances ' ) . get ( ' red ' ) . get ( ' team_keys ' ) :
team_scores . append ( json_data [ j ] . get ( ' alliances ' ) . get ( ' red ' ) . get ( ' score ' ) )
scores . append ( team_scores )
2019-03-21 20:59:47 +00:00
print ( scores )
2019-03-20 21:16:48 +00:00
with open ( " data/scores.csv " , " w+ " , newline = ' ' ) as file :
writer = csv . writer ( file , delimiter = ' , ' )
2019-03-21 20:06:54 +00:00
writer . writerows ( scores )
2019-03-20 21:16:48 +00:00
2019-02-27 19:38:24 +00:00
def service ( ) :
while True :
2019-03-01 18:18:38 +00:00
pulldata ( )
2019-02-27 19:38:24 +00:00
start = time . time ( )
2019-03-21 20:59:47 +00:00
#print("[OK]" + "time is: " + time.time())
2019-03-21 19:38:53 +00:00
2019-02-27 19:38:24 +00:00
print ( " [OK] starting calculations " )
fucked = False
for i in range ( 0 , 5 ) :
2019-03-21 20:59:47 +00:00
#try:
2019-02-27 19:38:24 +00:00
titanservice ( )
break
2019-03-21 20:59:47 +00:00
#except:
2019-02-27 19:38:24 +00:00
if ( i != 4 ) :
2019-02-28 19:39:35 +00:00
print ( " [WARNING] failed, trying " + str ( 5 - i - 1 ) + " more times " )
2019-02-27 19:38:24 +00:00
else :
2019-02-28 19:39:35 +00:00
print ( " [ERROR] failed to compute data, skipping " )
2019-02-27 19:38:24 +00:00
fucked = True
end = time . time ( )
if ( fucked == True ) :
break
else :
print ( " [OK] finished calculations " )
2019-02-27 19:43:33 +00:00
print ( " [OK] waiting: " + str ( 300 - ( end - start ) ) + " seconds " + " \n " )
2019-02-27 19:38:24 +00:00
time . sleep ( 300 - ( end - start ) ) #executes once every 5 minutes
warnings . simplefilter ( " ignore " )
2019-03-09 01:00:02 +00:00
#Use a service account
2019-03-20 21:53:53 +00:00
try :
cred = credentials . Certificate ( ' keys/firebasekey.json ' )
except :
cred = credentials . Certificate ( ' keys/keytemp.json ' )
2019-02-27 19:38:24 +00:00
firebase_admin . initialize_app ( cred )
db = firestore . client ( )
service ( ) #finally we write something that isn't a function definition
2019-03-04 22:38:40 +00:00
#titanservice()