mirror of
https://github.com/titanscouting/tra-analysis.git
synced 2024-12-26 17:49:09 +00:00
added TBA requests module
This commit is contained in:
parent
ad8e3cc4e5
commit
cf897c7a8f
94
data analysis/tbarequest.py
Normal file
94
data analysis/tbarequest.py
Normal file
@ -0,0 +1,94 @@
|
||||
#Titan Robotics Team 2022: TBA Requests Module
|
||||
#Written by Arthur Lu & Jacob Levine
|
||||
#Notes:
|
||||
# this should be imported as a python module using 'import visualization'
|
||||
# this should be included in the local directory or environment variable
|
||||
# this module has not been optimized for multhreaded computing
|
||||
#Number of easter eggs: none yet
|
||||
#setup:
|
||||
|
||||
__version__ = "1.0.0.001"
|
||||
|
||||
#changelog should be viewed using print(analysis.__changelog__)
|
||||
__changelog__ = """changelog:
|
||||
1.0.0.xxx:
|
||||
-added common requests and JSON processing"""
|
||||
__author__ = (
|
||||
"Arthur Lu <arthurlu@ttic.edu>, "
|
||||
"Jacob Levine <jlevine@ttic.edu>,"
|
||||
)
|
||||
__all__ = [
|
||||
'process_json_ret',
|
||||
'req_all_events',
|
||||
'req_event_matches',
|
||||
'req_event_insights',
|
||||
'req_event_elim_alli'
|
||||
'req_team_events',
|
||||
'req_team_matches'
|
||||
]
|
||||
#imports
|
||||
import requests
|
||||
|
||||
#as this code is public, i'm not putting 2022's API key in here. just add it as a var in your script and go
|
||||
#requests a list of events that a team went to
|
||||
def req_team_events(team,year,apikey):
|
||||
headers={'X-TBA-Auth-Key':apikey}
|
||||
r=requests.get('https://www.thebluealliance.com/api/v3/team/frc'+str(team)+'/events/'+str(year),headers=headers)
|
||||
return r
|
||||
|
||||
#gets every match that a team played in
|
||||
def req_team_matches(team,year,apikey):
|
||||
headers={'X-TBA-Auth-Key':apikey}
|
||||
r=requests.get('https://www.thebluealliance.com/api/v3/team/frc'+str(team)+'/matches/'+str(year), headers=headers)
|
||||
return r
|
||||
|
||||
#gets all events in a certain year
|
||||
def req_all_events(year, apikey):
|
||||
headers={'X-TBA-Auth-Key':apikey}
|
||||
r=requests.get('https://www.thebluealliance.com/api/v3/events/'+str(year), headers=headers)
|
||||
return r
|
||||
|
||||
#gets all matches for an event
|
||||
def req_event_matches(event_key,apikey):
|
||||
headers={'X-TBA-Auth-Key':apikey}
|
||||
r=requests.get('https://www.thebluealliance.com/api/v3/event/'+str(event_key)+'/matches', headers=headers)
|
||||
return r
|
||||
|
||||
#gets elimination alliances from a event
|
||||
def req_event_elim_alli(event_key, apikey):
|
||||
headers={'X-TBA-Auth-Key':apikey}
|
||||
r=requests.get('https://www.thebluealliance.com/api/v3/event/'+str(event_key)+'/alliances', headers=headers)
|
||||
return r
|
||||
|
||||
#gets TBA's insights from an event
|
||||
def req_event_insights(event_key, apikey):
|
||||
headers={'X-TBA-Auth-Key':apikey}
|
||||
r=requests.get('https://www.thebluealliance.com/api/v3/event/'+str(event_key)+'/insights', headers=headers)
|
||||
return r
|
||||
|
||||
#processes the json return. right now, it's slow and not great. will throw an exception if it doesn't get a good status code
|
||||
def process_json_ret(req):
|
||||
if req.status_code == 200:
|
||||
keys=[]
|
||||
for i in req.json():
|
||||
for j in i.keys():
|
||||
read=False
|
||||
for k in keys:
|
||||
if k==j:
|
||||
read=True
|
||||
break
|
||||
if not read:
|
||||
keys.append(j)
|
||||
out=[]
|
||||
out.append(keys)
|
||||
for i in req.json():
|
||||
buf=[]
|
||||
for j in keys:
|
||||
try:
|
||||
buf.append(i[j])
|
||||
except:
|
||||
buf.append("")
|
||||
out.append(buf)
|
||||
return out
|
||||
else:
|
||||
raise ValueError('Status code is: '+req.status_code+', not 200')
|
@ -34,7 +34,6 @@ import matplotlib.pyplot as plt
|
||||
from sklearn.decomposition import PCA, KernelPCA, IncrementalPCA
|
||||
from sklearn.preprocessing import StandardScaler
|
||||
from sklearn.cluster import AffinityPropagation, DBSCAN, KMeans, SpectralClustering
|
||||
import statistics
|
||||
|
||||
#bar of x,y
|
||||
def bar_graph(x,y):
|
||||
@ -125,7 +124,6 @@ def kmeans(data, num_clusters):
|
||||
#Spectral Clustering- Seems to work really well
|
||||
def spectral(data, num_clusters):
|
||||
td_norm=StandardScaler().fit_transform(data)
|
||||
db = SpectralClustering(n_clusters=num_clusters, eigen_solver='arpack',
|
||||
affinity="nearest_neighbors").fit(td)
|
||||
db = SpectralClustering(n_clusters=num_clusters).fit(td)
|
||||
y=db.labels_.astype(np.int)
|
||||
return y
|
||||
|
Loading…
Reference in New Issue
Block a user