index
int64 | repo_name
string | branch_name
string | path
string | content
string | import_graph
string |
|---|---|---|---|---|---|
34,027,391
|
stevenmarr/presentation-manager
|
refs/heads/master
|
/localtesting/test_login.py
|
# coding: utf-8
import unittest
import webapp2
from webapp2 import uri_for
import webtest
from google.appengine.ext import testbed
from main import app, BaseHandler
from forms import AddUserForm
from mock import Mock, patch
from models import AppEventData
import admin
import models
import main
class AppTest(unittest.TestCase):
def setUp(self):
# Create a WSGI application.
#app = webapp2.WSGIApplication([('/', BaseHandler)])
# Wrap the app with WebTest’s TestApp.
self.testapp = webtest.TestApp(app)
self.testbed = testbed.Testbed()
self.testbed.activate()
self.testbed.init_datastore_v3_stub()
self.testbed.init_memcache_stub()
self.testbed.init_mail_stub()
def tear_down(self):
self.testbed.deactivate()
# Test the handler.
def testLoginHandler(self):
response = self.testapp.get('/')
# print dir(response)
self.assertEqual(response.status_int, 200)
assert('Login' in response.body)
#self.assertEqual(response.normal_body, 'Login')
self.assertEqual(response.content_type, 'text/html')
@patch('admin.mail.send_mail')
def testAddUserAccountHandler(self, mail_send_mock):
email = 'stevenmarr@example.com'
firstname = 'steven'
lastname = 'marr'
email_user = False
params = {'email': email,
'firstname': firstname,
'lastname': lastname,
'email_user':email_user}
response = self.testapp.post('/admin/add_user_account', params)
self.assertEqual(response.status_int, 200)
self.assertEqual(response.content_type, 'text/html')
assert('User added succesfully' in response.body)
def createUser(self, a_type='user'):
user = models.User(email = 'stevenmarr@example.com',
firstname = 'steven',
lastname = 'marr',
account_type = a_type)
user.set_password = 'password'
return user
def activateUser(self, user):
user.verified = True
def testCreateUser(self):
user = self.createUser()
assert(user.email == 'stevenmarr@example.com')
def testAdminHandlerAsUser(self):
#user =
self.activateUser(self.createUser())
#ManageSessionsHandler
response = self.testapp.get('/admin/manage_sessions')
self.assertEqual(response.status_int, 302)
def testAdminHandlerAsAdmin(self):
user = self.createUser(a_type='admin')
assert(user.account_type == 'admin')
self.activateUser(user)
response = self.login('stevenmarr@example.com', 'password')
self.assertEqual(response.status_int, 200)
print response
assert('admin' in response.body)
response = self.testapp.get('/admin/manage_sessions')
self.assertEqual(response.status_int, 200)
# Test login of activated user
def login(self, email, password):
params = {'email': email, 'password': password}
return self.testapp.post('/login', params)
def testLogin(self):
response = self.login('stevenmarr@me.com', 'password')
self.assertEqual(response.status_int, 200)
def loginUser(self, email='user@example.com', id='123', is_admin=False):
self.testbed.setup_env(
user_email=email,
user_id=id,
user_is_admin='1' if is_admin else '0',
overwrite=True)
|
{"/admin.py": ["/email_messages.py", "/constants.py", "/models.py", "/main.py", "/google_to_dropbox.py", "/forms.py"], "/email_messages.py": ["/constants.py"], "/main.py": ["/models.py", "/email_messages.py", "/secrets.py", "/constants.py", "/forms.py", "/admin.py"], "/super_admin_handlers.py": ["/main.py", "/models.py", "/secrets.py", "/forms.py"], "/serve_presentations.py": ["/models.py", "/main.py"], "/google_to_dropbox.py": ["/models.py", "/main.py", "/secrets.py"], "/dropbox_oauth.py": ["/main.py", "/secrets.py"], "/forms.py": ["/models.py"], "/messages.py": ["/main.py", "/email_messages.py", "/constants.py"], "/utilities.py": ["/models.py", "/main.py"], "/templates/OLD-templates/presentations.py": ["/models.py"]}
|
34,027,392
|
stevenmarr/presentation-manager
|
refs/heads/master
|
/utilities.py
|
from dropbox.client import DropboxClient
#from dropbox import rest as dbrest
import dropbox
from models import SessionData, ConferenceData
from google.appengine.ext.webapp import blobstore_handlers
from google.appengine.ext import db, blobstore
from main import super_admin_required, admin_required, BaseHandler, config, data_cache
import time
import logging
from google.appengine.api import taskqueue, modules
import webapp2
import json
# Backend upload to dropbox handler
class UploadToDropBox(blobstore_handlers.BlobstoreDownloadHandler, BaseHandler):
def get(self):
user_id = data_cache.get('user_id')
message = self.request.get('message')
logging.info('Message is %s'% message)
if not message: message = 'Dropbox upload in progress...'
db_account_info = data_cache.get('db_account_info')
if db_account_info:
return self.render_response('utilities.html',
success = True,
message = message,
user_id = db_account_info.get('display_name'))
else:
return self.render_response('utilities.html',
success = True,
message = message )
def post(self):
key = self.request.get('session_key')
c_key = self.request.get('conf_key')
blob_info = self.request.get('blob_info')
session = SessionData.get(key)
conference_data = ConferenceData.get(c_key)
if session.uploaded_to_dbox:
logging.info('Session | %s | already exists'% session.name)
return
if conference_data.dbox_access_token:
access_token = conference_data.dbox_access_token
else:
logging.error('FAILED access_token does not exist')
#params = {'message':'Authorization token is either revoked or does not exist'}
#taskqueue.add(url='/utilities/update_dropbox/',
# method='GET',
# params=params,
# target='%s'% conference_data.module)
return None
try:
client = DropboxClient(access_token, "en_US", rest_client=None)
logging.info('SUCCESS dbox_client created %s' % client)
except:
logging.error('FAILED dbox_client was not created')
return None
f = session.blob_store_key.open()
size = session.blob_store_key.size
uploader = client.get_chunked_uploader(f, size)
while uploader.offset < size:
try:
upload = uploader.upload_chunked()
except:
logging.error('FAILED upload of file %s'% f)
params = {'session_key':key,
'conf_key': c_key,
'blob_key':blob_info}
taskqueue.add(url='/utilities/update_dropbox/',
method='POST',
params=params,
target='db-upload')
filename = session.filename
if (conference_data.name and session.room and session.presenter[1] and filename):
response = uploader.finish('/%s/%s/%s/%s'% (conference_data.name, session.room, session.presenter[1], filename), overwrite = False) #folder structure /conf_name/room/date/lastname/filename
elif filename:
response = uploader.finish('/default/%s'% filename, overwrite = False)
else:
logging.error('FAILED problem naming file, file skipped')
f.close()
return None
session.uploaded_to_dbox = True
session.dbox_path = response['path']
session.dbox_size = response['size']
session.put()
f.close()
return
class DeleteFromDropBox(BaseHandler):
def post(self):
session = SessionData.get(self.request.get('session_key'))
conference_data = ConferenceData.get(self.request.get('conf_key'))
db_path = self.request.get('db_path')
try:
client = DropboxClient(conference_data.dbox_access_token, "en_US", rest_client=None)
logging.info('DB client created %s' % client)
except:
logging.info("DB Client was not created, access token is %s"% conference_data.dbox_access_token)
return None
try:
client.file_delete(session.dbox_path)
logging.info('File %s was deleted' % session.dbox_path)
except:
logging.error('File %s not deleted'% session.dbox_path)
return
session.dbox_path = None
data_cache.set('%s-sessions'% session.module, None)
return
class BuildUploadTasksHandler(BaseHandler):
@admin_required
def post(self):
#attempt build or a db_client before generating tasks, redirect if authorization does not exists
conference_data = self.get_conference_data()
try:
client = DropboxClient(conference_data.dbox_access_token, "en_US", rest_client=None)
except:
conference_data.dbox_update = False
data_cache.set('%s-conference_data'% self.module, None)
conference_data.put()
return self.render_response('utilities.html',
failed = True,
message = 'Invalid DropBox authorization, please authorize again')
sessions = self.get_sessions()
for session in sessions:
if session.blob_store_key != None:
params = { 'session_key':session.key(),
'conf_key':self.get_conference_data().key(),
'blob_key':session.blob_store_key.key()}
taskqueue.add(url='/utilities/update_dropbox/',
method='POST',
params=params,
target='db-upload')
logging.info('taskqueue created')
else: logging.error('Session did not post %s'% sesssion.name)
return self.render_response('utilities.html',
success = True,
message = "Dropbox upload in progress...",
user_id = None)
class UtilitiesHomeHandler(BaseHandler):
@admin_required
def get(self):
user_id = data_cache.get('user_id')
data_upload_url = blobstore.create_upload_url('/admin/upload_conference_data/')
self.render_response('utilities.html',
access_token = self.get_conference_data().dbox_access_token,
data_upload_url = data_upload_url,)
#else: self.render_response('utilities.html', data_upload_url=data_upload_url)
class ResetSessionDataDBFlagHandler(BaseHandler):
@admin_required
def post(self):
sessions = self.get_sessions()
for session in sessions:
session.uploaded_to_dbox = False
session.put()
data_cache.set('%s-sessions'% self.module, None)
time.sleep(.25)
return self.render_response('utilities.html', success = True, message = "SessionData DB Reset" )
'''user_id = data_cache.get('user_id')
db_account_info = data_cache.get('db_account_info')
if db_account_info:
self.render_response('utilities.html', success = True, message = "SessionData DB Reset", user_id = db_account_info.get('display_name'))
else:
self.render_response('utilities.html', success = True, message = "SessionData DB Reset" )
'''
app = webapp2.WSGIApplication(
[('/utilities', UtilitiesHomeHandler),
('/utilities/build_upload_dropbox/', BuildUploadTasksHandler),
('/utilities/update_dropbox/', UploadToDropBox),
('/utilities/re_initialize_upload_status/', ResetSessionDataDBFlagHandler),
('/utilities/delete_dropbox/', DeleteFromDropBox)
], debug=True, config=config)
|
{"/admin.py": ["/email_messages.py", "/constants.py", "/models.py", "/main.py", "/google_to_dropbox.py", "/forms.py"], "/email_messages.py": ["/constants.py"], "/main.py": ["/models.py", "/email_messages.py", "/secrets.py", "/constants.py", "/forms.py", "/admin.py"], "/super_admin_handlers.py": ["/main.py", "/models.py", "/secrets.py", "/forms.py"], "/serve_presentations.py": ["/models.py", "/main.py"], "/google_to_dropbox.py": ["/models.py", "/main.py", "/secrets.py"], "/dropbox_oauth.py": ["/main.py", "/secrets.py"], "/forms.py": ["/models.py"], "/messages.py": ["/main.py", "/email_messages.py", "/constants.py"], "/utilities.py": ["/models.py", "/main.py"], "/templates/OLD-templates/presentations.py": ["/models.py"]}
|
34,027,393
|
stevenmarr/presentation-manager
|
refs/heads/master
|
/templates/OLD-templates/presentations.py
|
import urllib
from google.appengine.ext import blobstore, db
from models import SessionData, AppEventData
from google.appengine.ext.webapp import blobstore_handlers
import webapp2
class ServeHandler(blobstore_handlers.BlobstoreDownloadHandler):
def get(self, resource):
resource = str(urllib.unquote(resource))
blob_info = blobstore.BlobInfo.get(resource)
query = db.GqlQuery("SELECT * FROM SessionData WHERE blob_store_key = '%s'" % resource)
for session in query:
filename = session.filename
self.send_blob(blob_info, save_as = filename)
app = webapp2.WSGIApplication(
[
('/serve/([^/]+)?', ServeHandler),
], debug=True)
|
{"/admin.py": ["/email_messages.py", "/constants.py", "/models.py", "/main.py", "/google_to_dropbox.py", "/forms.py"], "/email_messages.py": ["/constants.py"], "/main.py": ["/models.py", "/email_messages.py", "/secrets.py", "/constants.py", "/forms.py", "/admin.py"], "/super_admin_handlers.py": ["/main.py", "/models.py", "/secrets.py", "/forms.py"], "/serve_presentations.py": ["/models.py", "/main.py"], "/google_to_dropbox.py": ["/models.py", "/main.py", "/secrets.py"], "/dropbox_oauth.py": ["/main.py", "/secrets.py"], "/forms.py": ["/models.py"], "/messages.py": ["/main.py", "/email_messages.py", "/constants.py"], "/utilities.py": ["/models.py", "/main.py"], "/templates/OLD-templates/presentations.py": ["/models.py"]}
|
34,027,394
|
stevenmarr/presentation-manager
|
refs/heads/master
|
/secrets.py
|
SECRET_KEY = '027539766791a2e002cbdbc3bb57ed6420e2e46cd4e1bfd818a96ef414919a20'
#*******DropBox API*************
APP_KEY = '1oh7s5aa87v11ql'
APP_SECRET = 'm68wnr09y8xx62d'
|
{"/admin.py": ["/email_messages.py", "/constants.py", "/models.py", "/main.py", "/google_to_dropbox.py", "/forms.py"], "/email_messages.py": ["/constants.py"], "/main.py": ["/models.py", "/email_messages.py", "/secrets.py", "/constants.py", "/forms.py", "/admin.py"], "/super_admin_handlers.py": ["/main.py", "/models.py", "/secrets.py", "/forms.py"], "/serve_presentations.py": ["/models.py", "/main.py"], "/google_to_dropbox.py": ["/models.py", "/main.py", "/secrets.py"], "/dropbox_oauth.py": ["/main.py", "/secrets.py"], "/forms.py": ["/models.py"], "/messages.py": ["/main.py", "/email_messages.py", "/constants.py"], "/utilities.py": ["/models.py", "/main.py"], "/templates/OLD-templates/presentations.py": ["/models.py"]}
|
34,027,395
|
stevenmarr/presentation-manager
|
refs/heads/master
|
/models.py
|
import time
import webapp2_extras.appengine.auth.models
import logging
from google.appengine.ext import ndb, db, blobstore
from google.appengine.api import modules
from webapp2_extras import security
module = modules.get_current_module_name()
class User(webapp2_extras.appengine.auth.models.User):
email = ndb.StringProperty()
firstname = ndb.StringProperty()
lastname = ndb.StringProperty()
account_type_choices = ('presenter', 'user', 'admin', 'super_admin')
account_type = ndb.StringProperty(required = True, default = "user", choices = account_type_choices)
password = ndb.StringProperty()
module = ndb.StringProperty(required = True, default = module)
def set_password(self, raw_password):
"""Sets the password for the current user
:param raw_password:
The raw password which will be hashed and stored
"""
self.password = security.generate_password_hash(raw_password, length=12)
@classmethod
def get_by_auth_token(cls, user_id, token, subject='auth'):
"""Returns a user object based on a user ID and token.
:param user_id:
The user_id of the requesting user.
:param token:
The token string to be verified.
:returns:
A tuple ``(User, timestamp)``, with a user object and
the token timestamp, or ``(None, None)`` if both were not found.
"""
token_key = cls.token_model.get_key(user_id, subject, token)
user_key = ndb.Key(cls, user_id)
#Use get_multi() to save a RPC call.
valid_token, user = ndb.get_multi([token_key, user_key])
if valid_token and user:
timestamp = int(time.mktime(valid_token.created.timetuple()))
return user, timestamp
return None, None
@classmethod
def get_user_accounts(cls):
# How you handle this is up to you. You can return a query
# object as shown, or you could return the results.
return cls.query(cls.account_type == 'user')
@classmethod
def query_user(cls, email):
return cls.query(User.email == '%s' % email)
class SessionData(db.Model):
presenter = db.ListProperty(unicode, default=None)
user_id = db.StringProperty()
name = db.StringProperty()
room = db.StringProperty(indexed = True)
date = db.StringProperty()
time = db.StringProperty()
dotw = db.StringProperty()
#date_time = db.DateTimeProperty()
create_date = db.DateTimeProperty(auto_now_add = True)
module = db.StringProperty(default = module)
blob_store_key = blobstore.BlobReferenceProperty()
filename = db.StringProperty()
uploaded_to_dbox = db.BooleanProperty(default = False)
dbox_path = db.CategoryProperty(default = None)
dbox_size = db.StringProperty(default = None)
class AppEventData(db.Model):
event = db.StringProperty(required = True)
event_type = db.StringProperty(required = True, default = 'system', choices = ('user', 'system', 'session', 'file'))
transaction = db.StringProperty(choices = ('CREATE', 'EDIT', 'DEL', 'INFO'))
time_stamp = db.DateTimeProperty(auto_now_add = True, indexed = True)
user = db.StringProperty(required = True)
module = db.StringProperty(required = True, default = module)
class ConferenceData(db.Model):
module = db.StringProperty(default = module)
dbox_access_token = db.StringProperty()
db_user_id = db.StringProperty()
db_account_info = db.StringProperty()
dbox_update = db.BooleanProperty(default = False)
c_client = db.StringProperty(default = 'Client' )
name = db.StringProperty(default = 'Conference')
start_date = db.DateProperty()
end_date = db.DateProperty()
account_verification_msg = db.TextProperty(default = '''Dear {name},\n Thank you for activating your account, we look forward to receiving your presentations. To complete the process please click on the following link to verify your email address {url}''')
password_reset_msg = db.TextProperty(default = '''Dear {name},\nPlease click on the following link to reset your password {url}''')
new_account_msg = db.TextProperty(default = '''Dear {name},\nYour account is ready for activation for the upcoming event, Please click on the following link to activate your account {url}''')
recieved_presentation_msg = db.TextProperty(default = '''Dear {name},\nCongratulations your presentation has uploaded successfully, to view your submission and confirm the upload please click <a href="{url}">here</a>''')
|
{"/admin.py": ["/email_messages.py", "/constants.py", "/models.py", "/main.py", "/google_to_dropbox.py", "/forms.py"], "/email_messages.py": ["/constants.py"], "/main.py": ["/models.py", "/email_messages.py", "/secrets.py", "/constants.py", "/forms.py", "/admin.py"], "/super_admin_handlers.py": ["/main.py", "/models.py", "/secrets.py", "/forms.py"], "/serve_presentations.py": ["/models.py", "/main.py"], "/google_to_dropbox.py": ["/models.py", "/main.py", "/secrets.py"], "/dropbox_oauth.py": ["/main.py", "/secrets.py"], "/forms.py": ["/models.py"], "/messages.py": ["/main.py", "/email_messages.py", "/constants.py"], "/utilities.py": ["/models.py", "/main.py"], "/templates/OLD-templates/presentations.py": ["/models.py"]}
|
34,115,990
|
tolusalako/ConnectK
|
refs/heads/FQ2016_STABLE
|
/ConnectKSource_python/connectk_gui.py
|
#Original java code: Alex Van Buskirk
#Python port: Toluwanimi Salako
import tkinter as TK
from os import listdir
import connectk
from board_model import BoardModel
from player import Player, AIPlayer, GUIPlayer
from errors import NoAISelectedError, NoGUIError
from threading import Thread, Lock
import sys, traceback
def create_newgame(model = None, player1AI=None, player2AI=None, gui = True, stepmode = False):
'''Creates a new game. If no_gui, both players must be AI players'''
if not gui:
'''No GUI'''
if player1AI is not None and player2AI is not None:
player1 = AIPlayer(1, model, player1AI)
player2 = AIPlayer(2, model, player2AI)
game = connectk.ConnectK(model, player1, player2)
p = Thread(target = game.play)
p.start()
return
else:
raise NoGUIError("If no GUI option is used, both players must be AIs.")
else:
'''GUI'''
root = TK.Tk()
if model is None: model = BoardModel() #New instance with default settings
gui = ConnectKGUI(root, model)
if stepmode:
gui.stepmode.set(True)
if player1AI is None:
player1 = GUIPlayer(1, model)
gui.add_button_listener(player1.action_listener, 1)
else:
player1 = AIPlayer(1, model, player1AI)
if player2AI is None:
player2 = GUIPlayer(2, model)
gui.add_button_listener(player2.action_listener, 2)
else:
player2 = AIPlayer(2, model, player2AI)
game = connectk.ConnectK(model, player1, player2, gui)
p = Thread(target = game.play)
gui.update_gui(time = 2)
p.start()
root.mainloop()
class ConnectKGUI():
STICKY_ALL = TK.N + TK.S + TK.W + TK.E
def __init__(self, root, model):
'''Creates a new gui'''
self.root = root
self.root.wm_title("ConnectK")
self.root.geometry('{}x{}'.format(530, 400))
self.root.report_callback_exception = self.print_error
self.root.protocol("WM_DELETE_WINDOW", self.root.destroy)
# icon = TK.PhotoImage(file='icon.ico')
# self.root.tk.call('wm', 'iconphoto', self.root.w, icon)
TK.Grid.rowconfigure(self.root, 0, weight=1)
TK.Grid.columnconfigure(self.root, 0, weight=1)
self.width = model.width
self.height = model.height
self.gravity = model.gravity
self.k = model.k_length
self.colors = [None, 'red', 'blue']
self.winning_colors = [None, '#8A1741', '#4F36C9']
self.job_lock = Lock()
self.jobs = []
self.current_status = ""
self.status = TK.StringVar()
self.stepmode = TK.BooleanVar()
self.step = True
self.init_menu()
self.init_game()
def init_menu(self):
'''Creates the menu items'''
self.menu = TK.Menu(self.root)
self.filemenu = TK.Menu(self.menu, tearoff = 0)
self.menu.add_cascade(label="File", menu=self.filemenu)
self.filemenu.add_command(label="New", command=self.new_game)
self.filemenu.add_command(label="Quit", command=self.root.quit)
self.menu.add_checkbutton(label="StepMode", variable = self.stepmode)
self.menu.add_separator()
self.root.config(menu = self.menu)
def init_game(self):
'''Creates the frames and buttons for the game'''
self.frame_buttons = TK.Frame(self.root)
for x in range(self.width):
TK.Grid.columnconfigure(self.frame_buttons, x, weight=1)
for y in range(self.height):
TK.Grid.rowconfigure(self.frame_buttons, y, weight=1)
self.frame_buttons.grid(row = 0, sticky = self.STICKY_ALL)
def binded():
print("Bound")
self.buttons = []
for x in range(self.width):
col = []
for y in range(self.height):
b = TK.Button(self.frame_buttons, text = str(x) + ":" + str(y), width = 7, height = 3) #Might need to create custom button class
b.grid(row = (self.height - 1) - y, column = x, sticky = self.STICKY_ALL)
b.bind("<Button-1>", self.bind_button)
col.append(b)
self.buttons.append(col)
self.frame_labels = TK.Frame(self.root)
self.frame_labels.grid(row = 1, sticky = TK.S)
self.status_label = TK.Label(self.frame_labels, textvariable = self.status)
self.status_label.grid(sticky = TK.W)
self.listens = [None, None, None]
self.root.bind("<space>", self.toggle_step)
def add_button_listener(self, function, player):
self.listens[player] = function
def bind_button(self, event):
'''Binds a button press to a GUIplayer'''
if self.listens[connectk.current_player] is not None:
self.listens[connectk.current_player](event)
def toggle_step(self, event):
self.step = not self.step
def new_game(self):
'''Shows the newgame window'''
dialog = NewGameWindow(self.root, self.width, self.height, self.k, self.gravity)
def set_status(self, s):
self.job_lock.acquire()
self.jobs.append("self.current_status = '{}'".format(s))
self.job_lock.release()
def place_piece(self, location, player):
x,y = location
self.job_lock.acquire()
self.jobs.append("self.buttons[{}][{}]['background'] = '{}'".format(x,y,self.colors[player]))
self.job_lock.release()
def highlight_spaces(self, winning_spaces, winner):
self.job_lock.acquire()
for x,y in winning_spaces:
self.jobs.append("self.buttons[{}][{}]['background'] = '{}'".format(x,y,self.winning_colors[winner]))
self.job_lock.release()
def update_gui(self, repeat = 1, time = 2):
self.job_lock.acquire()
for job in self.jobs:
exec(job)
del self.jobs[:]
self.job_lock.release()
if repeat:
self.root.after(time, self.update_gui)
def end_updates(self, i):
if i == 0:
self.job_lock.acquire()
self.jobs.append("self.end_updates(1)")
self.job_lock.release()
else:
self.root.after_cancel(self.update_gui) #Cancel automatic update
self.update_gui(0) #Update one last time
def print_error(self, *args):
'''Prints errors as they occur.
Tkinter usually hangs and prints all errors after exiting'''
err = traceback.format_exception(*args)
for i in range(len(err)):
print (err[i])
#err = args
#print err[0].__name__,"\b:", err[1]
sys.stdout.flush()
sys.exit()
class NewGameWindow(TK.Toplevel):
def __init__(self, parent, width, height, k, gravity):
'''Creates a new game window'''
TK.Toplevel.__init__(self)
self.title("New Game")
self.option_add("*Label.Font", "helvetica 12")
self.label_width = TK.Label(self, text="Width: ")
self.label_width.grid(row=0, column = 0)
self.width = TK.StringVar(self)
self.spinbox_width = TK.Spinbox(self, from_ = 1, to = 99, width = 2, textvariable = self.width)
self.spinbox_width.grid(row = 0, column = 1)
self.label_height = TK.Label(self, text="Height: ")
self.label_height.grid(row=0, column = 2)
self.height = TK.StringVar(self)
self.spinbox_height = TK.Spinbox(self, from_ = 1, to = 99, width = 2, textvariable = self.height)
self.spinbox_height.grid(row = 0, column = 3)
self.label_k = TK.Label(self, text="K: ")
self.label_k.grid(row=0, column = 4)
self.k = TK.StringVar(self)
self.spinbox_k = TK.Spinbox(self, from_ = 1, to = 99, width = 2, textvariable = self.k)
self.spinbox_k.grid(row = 0, column = 5)
self.label_gravity = TK.Label(self, text="Gravity: ")
self.label_gravity.grid(row=0, column = 6)
self.gravity = TK.StringVar(self)
self.spinbox_gravity = TK.Spinbox(self, values=("On", "Off"), width = 3, textvariable = self.gravity)
self.spinbox_gravity.grid(row = 0, column = 7)
self.player1_labelframe = TK.LabelFrame(master = self, text = 'Player 1')
self.player1_labelframe.grid(row = 1, columnspan = 8, sticky = TK.W + TK.E)
self.player1=TK.IntVar()
self.radiobutton_p1_human = TK.Radiobutton(master = self.player1_labelframe, text = "Human", variable = self.player1, value = 0)
self.radiobutton_p1_human.grid(row = 0, column = 0,sticky = TK.W)
self.radiobutton_p1_AI = TK.Radiobutton(master = self.player1_labelframe, text = "AI", variable = self.player1, value = 1)
self.radiobutton_p1_AI.grid(row = 0, column = 1,sticky = TK.W)
self.listbox_p1_AI = TK.StringVar(self)
self.player2_labelframe = TK.LabelFrame(master = self, text = 'Player 2')
self.player2_labelframe.grid(row = 2, columnspan = 8, sticky = TK.W + TK.E)
self.player2=TK.IntVar()
self.radiobutton_p2_human = TK.Radiobutton(master = self.player2_labelframe, text = "Human", variable = self.player2, value = 0)
self.radiobutton_p2_human.grid(row = 0, column = 0,sticky = TK.W)
self.radiobutton_p2_AI = TK.Radiobutton(master = self.player2_labelframe, text = "AI", variable = self.player2, value = 1)
self.radiobutton_p2_AI.grid(row = 0, column = 1,sticky = TK.W)
self.listbox_p2_AI = TK.StringVar(self)
self.button_frame = TK.Frame(self)
self.button_frame.grid(row = 3, column = 0, columnspan = 8, sticky = TK.W + TK.E)
self.button_add_ai = TK.Button(self.button_frame, text = "Add AI", command = self.add_ai)
self.button_add_ai.grid(row = 0, column = 2)
self.button_newgame = TK.Button(self.button_frame, text = "New Game", command = self.newgame)
self.button_newgame.grid(row = 0, column = 3)
self.button_cancel = TK.Button(self.button_frame, text = "Cancel", command = self.destroy)
self.button_cancel.grid(row = 0, column = 4)
self.width.set(width)
self.height.set(height)
self.k.set(k)
self.gravity.set("On" if gravity else "Off")
self.parent = parent
self.update_ai_list()
def update_ai_list(self, ai = None):
'''Updates the list of AIs with those in the folder + ai'''
self.default_ai_tuple = tuple([f for f in listdir("./") if f[-3:] == ".py"])
if (ai is None):
self.p1_sources = TK.OptionMenu(*(self.player1_labelframe, self.listbox_p1_AI) + self.default_ai_tuple)
self.p2_sources = TK.OptionMenu(*(self.player2_labelframe, self.listbox_p2_AI) + self.default_ai_tuple)
else:
self.p1_sources = TK.OptionMenu(*(self.player1_labelframe, self.listbox_p1_AI) + self.default_ai_tuple + (ai,))
self.p2_sources = TK.OptionMenu(*(self.player2_labelframe, self.listbox_p2_AI) + self.default_ai_tuple + (ai,))
self.p1_sources.grid(row = 0, column = 2)
self.p2_sources.grid(row = 0, column = 2)
def add_ai(self):
'''Opens a file dialog for player to select AI'''
ai = TK.filedialog.askopenfilename(filetypes=[("AI files", "*.pyc")], title="Select AI")
if ai is not None:
self.update_ai_list(ai)
def newgame(self):
'''Creates a new game based on the player's options'''
player1AI = None
player2AI = None
model = BoardModel(width = int(self.width.get()), height = int(self.height.get()), k = int(self.k.get()),
gravity = True if self.gravity.get() == "On" else False)
#Setup human or AI player
if (self.player1.get()):
p1 = self.listbox_p1_AI.get()
if len(p1) != 0:
player1AI = p1 #Load AI Player
else:
raise NoAISelectedError("Select file for plater 1 AI.")
if (self.player2.get()):
p2 = self.listbox_p2_AI.get()
if len(p2) != 0:
player2AI = p2 #Load AI Player
else:
raise NoAISelectedError("Select file for plater 2 AI.")
self.destroy()
self.parent.destroy()
create_newgame(model, player1AI, player2AI)
|
{"/StudentAI.py": ["/ConnectKSource_python/board_model.py"]}
|
34,115,991
|
tolusalako/ConnectK
|
refs/heads/FQ2016_STABLE
|
/ConnectKSource_python/player.py
|
#Original java code: Alex Van Buskirk
#Python port: Toluwanimi Salako
import os, imp
from datetime import datetime, timedelta
from errors import InvalidFileError
ai_class = "StudentAI"
ai_name = "team_name"
def load_from_file(filepath):
'''Loads an AI player from file'''
global ai_class
py_ai = None
ai_file,file_ext = "",""
#Make sure the file is compiled & can be opened
try:
ai_file,file_ext = os.path.splitext(os.path.split(filepath)[-1])
if file_ext != ".py":
raise InvalidFileError("[{}]: must be compiled.".format(ai_name+file_ext))
else:
py_ai = imp.load_source(ai_file, filepath)
except Exception as e:
raise e
#Make sure the file has the AI class
if ((py_ai is None) or (not hasattr(py_ai, ai_class))):
raise InvalidFileError("{} must contain class \"{}\".".format(ai_name+file_ext, ai_class))
#Make sure the file has the AI name
if (not hasattr(py_ai, ai_name)):
raise InvalidFileError("{} must contain global variable \"{}\".".format(ai_name+file_ext, ai_file))
else:
return py_ai
class Player():
def __init__(self, player, state):
self.last_move = None
self.player = player
self.start_state = state
self.team_name = ""
def __str__(self):
return self.team_name
def set_teamname(self, name):
self.team_name = name
def get_move(self, state, deadline = 0):
result = None
if deadline == 0:
while(result is None):
result = self.last_move
else:
result = self.get_move_with_time(state, deadline);
return self.last_move
def get_move_with_time(self, state, deadline):
result = None
deadline = timedelta(seconds=deadline)
begin = datetime.now()
while(result is None):
result = self.last_move
if (datetime.now() - begin >= deadline):
break
return result
def reset_move(self):
self.last_move = None
class AIPlayer(Player):
def __init__(self, player, state, filepath):
Player.__init__(self, player, state)
self.aifile = load_from_file(filepath)
self.ai = self.aifile.StudentAI(player, state)
self.set_teamname(self.aifile.team_name)
def get_move(self, state, deadline = 0):
result = None
if deadline == 0:
while(result is None):
result = self.ai.make_move(state, deadline)
else:
result = self.get_move_with_time(state, deadline);
self.last_move = result
return self.last_move
def get_move_with_time(self, state, deadline):
result = None
deadline = timedelta(seconds=deadline)
begin = datetime.now()
while(result is None):
result = self.ai.make_move(state, deadline)
if (datetime.now() - begin >= deadline):
break
return result
class GUIPlayer(Player):
def __init__(self, player, state):
Player.__init__(self, player, state)
self.team_name = "GUI"
def action_listener(self, event):
x,y = event.widget["text"].split(":")
self.last_move = (int(x), int(y))
|
{"/StudentAI.py": ["/ConnectKSource_python/board_model.py"]}
|
34,115,992
|
tolusalako/ConnectK
|
refs/heads/FQ2016_STABLE
|
/ConnectKSource_python/board_model.py
|
#Original java code: Alex Van Buskirk
#Python port: Toluwanimi Salako
def two_dim_list_of_ints(width, height):
'''Returns a 2D list of ints filled with 0s'''
result = [];
for x in range(width):
row = [0 for y in range(height)]
result.append(row)
return result
class BoardModel():
_winner = -2
last_move = tuple()
def __init__(self, width = 9, height = 7, k = 5, gravity = False):
'''Creates a new game board'''
self.width = width
self.height = height
self.k_length = k
self.gravity = gravity
self.spaces_left = width*height
self.pieces = two_dim_list_of_ints(width, height)
def __str__(self):
'''Returns game board as string'''
return str(self.pieces).replace("],", "]\n")
def place_piece(self, location, player):
'''Updates the board with player moves'''
x, y = location
assert(self.pieces[x][y] == 0);
result_board = self.clone()
while (self.gravity and y > 0 and self.pieces[x][y-1] == 0):
y -= 1
result_board.last_move = (x, y)
result_board.pieces[x][y] = player
result_board.spaces_left = self.spaces_left - 1
return result_board
def get_space_tuple(self, location):
'''Returns the player who is in location'''
x,y = location
return self.get_space(x, y)
def get_space(self, x, y):
'''Returns the player who is in x,y'''
assert(x >= 0 and x < self.width)
assert(y >= 0 and y < self.height)
return self.pieces[x][y]
def get_width(self):
'''Returns the width of the game board'''
return self.width
def get_height(self):
'''Returns the height of the game board'''
return self.height
def get_k_length(self):
'''Returns the k_length of the game board'''
return self.k_length
def gravity_enabled(self):
'''Returns if gravity is enabled or not'''
return self.gravity
def get_last_move(self):
'''Returns the last move made'''
return self.last_move
def has_moves_left(self):
'''Returns if the game board has any empty spaces left'''
return self.spaces_left > 0
def winner(self):
'''Checks for and returns a winner if there is one'''
width = self.width
height = self.height
if (self._winner == -2): #uncached
uncached = False
for i in range(self.width):
for j in range(self.height):
if(self.pieces[i][j] == 0):
if(self.gravity):
break
else:
continue
if(i-1<0 or self.pieces[i-1][j] != self.pieces[i][j]):
count = 1
while(i+count < width and self.pieces[i][j] == self.pieces[i+count][j]):
count+=1
if(count >= self.k_length):
self._winner = self.pieces[i][j]
uncached = True
break
if uncached: break
if(i-1<0 or j-1<0 or self.pieces[i-1][j-1] != self.pieces[i][j]):
count = 1
while(i+count < width and j+count < height and self.pieces[i][j] == self.pieces[i+count][j+count]):
count+=1
if(count >= self.k_length):
self._winner = self.pieces[i][j]
uncached = True
break
if uncached: break
if(i-1<0 or j+1>=height or self.pieces[i-1][j+1] != self.pieces[i][j]):
count = 1
while(i+count < width and j-count >= 0 and self.pieces[i][j] == self.pieces[i+count][j-count]):
count+=1
if(count >= self.k_length):
self._winner = self.pieces[i][j]
uncached = True
break
if uncached: break
if(j-1<0 or self.pieces[i][j-1] != self.pieces[i][j]):
count = 1
while(j+count < height and self.pieces[i][j] == self.pieces[i][j+count]):
count+=1
if(count >= self.k_length):
self._winner = self.pieces[i][j]
uncached = True
break
if uncached: break
if uncached: break
if not uncached:
self._winner = -1 if self.has_moves_left() else 0
return self._winner
def winning_spaces(self):
'''Returns the winning spaces'''
result = []
width = self.width
height = self.height
for i in range(self.width):
for j in range(self.height):
if(self.pieces[i][j] == 0):
if(self.gravity):
break
else:
continue
if(i-1<0 or self.pieces[i-1][j] != self.pieces[i][j]):
count = 1
while(i+count < width and self.pieces[i][j] == self.pieces[i+count][j]):
count+=1
if(count >= self.k_length):
for k in range(self.k_length):
result.append((i+k, j))
return result
if(i-1<0 or j-1<0 or self.pieces[i-1][j-1] != self.pieces[i][j]):
count = 1
while(i+count < width and j+count < height and self.pieces[i][j] == self.pieces[i+count][j+count]):
count+=1
if(count >= self.k_length):
for k in range(self.k_length):
result.append((i+k, j+k))
return result
if(i-1<0 or j+1>=height or self.pieces[i-1][j+1] != self.pieces[i][j]):
count = 1
while(i+count < width and j-count >= 0 and self.pieces[i][j] == self.pieces[i+count][j-count]):
count+=1
if(count >= self.k_length):
for k in range(self.k_length):
result.append((i+k, j-k))
return result
if(j-1<0 or self.pieces[i][j-1] != self.pieces[i][j]):
count = 1
while(j+count < height and self.pieces[i][j] == self.pieces[i][j+count]):
count+=1
if(count >= self.k_length):
for k in range(self.k_length):
result.append((i, j+k))
return result
return result
def clone(self):
'''Returns a clone of the game board'''
cloned = BoardModel(self.width, self.height, self.k_length, self.gravity)
cloned.last_move = self.last_move
cloned.spaces_left = self.spaces_left
for i in range(self.width):
for j in range(self.height):
cloned.pieces[i][j] = self.pieces[i][j]
return cloned
def equals(right):
'''Checks if 'right' is equal to 'self' '''
if (not right.__class__ == BoardModel):
return False
if(self.width != right.width or self.height != right.height or self.k_length != right.k_length):
return False
for i in range(width):
for j in range(height):
if(self.pieces[i][j] != right.pieces[i][j]):
return False
return True
|
{"/StudentAI.py": ["/ConnectKSource_python/board_model.py"]}
|
34,115,993
|
tolusalako/ConnectK
|
refs/heads/FQ2016_STABLE
|
/ConnectKSource_python/connectk.py
|
#Author: Toluwanimi Salako
from datetime import datetime
from threading import Thread
from connectk_gui import *
import sys, time
MAXTIME = 0 #In seconds. 0 = no timer
FIRST_PLAYER = 1
current_player = 0
def print_(message):
print(message)
sys.stdout.flush()
class PlayerThread(Thread):
'''A thread to allow the GUI to run alongside the game'''
def __init__(self, player, model, deadline):
Thread.__init__(self)
self.player = player
self.model = model
self.deadline = deadline
self.move = None
def run(self):
try:
self.move = self.player.get_move(self.model, self.deadline)
self.player.reset_move() #Resets the player's last move for the next turn
except Exception as e:
raise e
class ConnectK():
'''Handles player moves, turns, and timing'''
def __init__(self, model, player1, player2, view = None):
self.currentboard = model
self.players = [None, player1, player2]
self.view = view
def play(self):
self.current_player = FIRST_PLAYER
self.winner = 0
while self.currentboard.winner() == -1:
if (not self.view is None and self.view.stepmode.get()):
if self.view.step:
if self.step(): break
self.view.toggle_step(None)
time.sleep(0.3)
else:
if self.step(): break
if (self.winner != 0):
if (self.view is not None):
self.view.set_status("Player {} [{}] wins!".format(self.winner,
self.players[self.winner].team_name))
self.view.highlight_spaces(self.currentboard.winning_spaces(), self.winner)
print_("Player {} [{}] wins!".format(self.winner, self.players[self.winner].team_name))
if (not self.currentboard.has_moves_left()):
if (self.view is not None):
self.view.set_status("Draw")
print_("Draw")
return self.winner
def step(self):
global current_player
current_player = self.current_player #Update value for GUI
print_("Player {} says: ".format(self.current_player))
move = None
begin = datetime.now()
pt = PlayerThread(self.players[self.current_player], self.currentboard.clone(), MAXTIME)
pt.start()
pt.join()
move = pt.move
if (move is None or self.currentboard.get_space_tuple(move) != 0):
print_ ("Player {} returned bad move: {}. Or went over time: {}"
.format(self.current_player, str(move), str(datetime.now() - begin)))
self.winner = 1 if self.current_player == 2 else 2 #Forfeit
return 1
else:
self.currentboard = self.currentboard.place_piece(move, self.current_player) #Update board
print_ ("Player {} returns move {}" .format(self.current_player, str(move)))
if (self.view is not None):
self.view.place_piece(self.currentboard.last_move, self.current_player)
self.current_player = 1 if self.current_player == 2 else 2
self.winner = self.currentboard.winner()
def width(self):
return self.currentboard.width
def height(self):
return self.currentboard.height
if __name__ == '__main__':
argc = len(sys.argv)
if argc > 1:
options = {
'w':9, #width
'h':7, #height
'k':5, #k_length
'g':0, #gravity
'u': 1, #gui
's': 0 #stepmode
}
ai_players = []
for i in range(1, argc):
if sys.argv[i][0] == '-':
options[sys.argv[i][1].lower()] = int(sys.argv[i][3])
else:
ai_players.append(sys.argv[i])
model = BoardModel(width = options['w'], height = options['h'], k = options['k'],
gravity = options['g'])
if len(ai_players) == 2:
create_newgame(model, ai_players[0], ai_players[1], gui = options['u'], stepmode = options['s'])
elif len(ai_players) == 1:
create_newgame(model, ai_players[0], gui = options['u'], stepmode = options['s'])
else:
create_newgame(model = model, gui = options['u'], stepmode = options['s'])
else:
create_newgame()
|
{"/StudentAI.py": ["/ConnectKSource_python/board_model.py"]}
|
34,115,994
|
tolusalako/ConnectK
|
refs/heads/FQ2016_STABLE
|
/ConnectKSource_python/errors.py
|
#Original java code: Alex Van Buskirk
#Python port: Toluwanimi Salako
class InvalidFileError(Exception):
def __init__(self, message):
Exception.__init__(self, message)
class NoAISelectedError(Exception):
def __init__(self, message):
Exception.__init__(self, message)
class NoGUIError(Exception):
def __init__(self, message):
Exception.__init__(self, message)
|
{"/StudentAI.py": ["/ConnectKSource_python/board_model.py"]}
|
34,115,995
|
tolusalako/ConnectK
|
refs/heads/FQ2016_STABLE
|
/StudentAI.py
|
#Author: Toluwanimi Salako
from collections import defaultdict
import random
import sys
sys.path.append(r'\ConnectKSource_python')
import ConnectKSource_python.board_model as boardmodel
team_name = "StudentAI-Default" #TODO change me
class StudentAI():
def __init__(self, player, state):
self.last_move = state.get_last_move()
self.model = state
def make_move(self, model, deadline):
'''Write AI Here. Return a tuple (col, row)'''
width = self.model.get_width()
height = self.model.get_height()
spaces = defaultdict(int)
for i in range(width):
for j in range(height):
spaces[(i,j)] = self.model.get_space(i, j)
print(self.model.spaces_left)
print(deadline)
moves = [k for k in spaces.keys() if spaces[k] == 0]
return moves[random.randint(0, len(moves) - 1)]
'''===================================
DO NOT MODIFY ANYTHING BELOW THIS LINE
==================================='''
is_first_player = False
deadline = 0
model = None
ai_piece = 1
human_piece = -1
no_piece = 0
def make_ai_shell_from_input():
'''
Reads board state from input and returns the move chosen by StudentAI
DO NOT MODIFY THIS
'''
global is_first_player
global model
global deadline
ai_shell = None
begin = "makeMoveWithState:"
end = "end"
go = True
while (go):
mass_input = input().split(" ")
if (mass_input[0] == end):
sys.exit()
elif (mass_input[0] == begin):
#first I want the gravity, then number of cols, then number of rows, then the col of the last move, then the row of the last move then the values for all the spaces.
# 0 for no gravity, 1 for gravity
#then rows
#then cols
#then lastMove col
#then lastMove row.
#then deadline.
#add the K variable after deadline.
#then the values for the spaces.
#cout<<"beginning"<<endl;
gravity = int(mass_input[1])
col_count = int(mass_input[2])
row_count = int(mass_input[3])
last_move_col = int(mass_input[4])
last_move_row = int(mass_input[5])
#add the deadline here:
deadline = -1
deadline = int(mass_input[6])
k = int(mass_input[7])
#now the values for each space.
counter = 8
#allocate 2D array.
model = boardmodel.BoardModel(col_count, row_count, k, gravity)
count_own_moves = 0
for col in range(col_count):
for row in range(row_count):
model.pieces[col][row] = int(mass_input[counter])
if (model.pieces[col][row] == ai_piece):
count_own_moves += model.pieces[col][row]
if (not model.pieces[col][row] == no_piece):
model.spaces_left -= 1
counter+=1
if (count_own_moves % 2 == 0):
is_first_player = True
model.last_move = (last_move_col, last_move_row)
ai_shell = StudentAI(1 if is_first_player else 2, model)
return ai_shell
else:
print("unrecognized command", mass_input)
#otherwise loop back to the top and wait for proper _input.
return ai_shell
def return_move(move):
'''
Prints the move made by the AI so the wrapping shell can input it
DO NOT MODIFY THIS
'''
made_move = "ReturningTheMoveMade";
#outputs made_move then a space then the row then a space then the column then a line break.
print(made_move, move[0], move[1])
def check_if_first_player():
global is_first_player
return is_first_player
if __name__ == '__main__':
'''
DO NOT MODIFY THIS
'''
global deadline
print ("Make sure this program is ran by the Java shell. It is incomplete on its own. :")
go = True
while (go): #do this forever until the make_ai_shell_from_input function ends the process or it is killed by the java wrapper.
ai_shell = make_ai_shell_from_input()
moveMade = ai_shell.make_move(model, deadline)
return_move(moveMade)
del ai_shell
sys.stdout.flush()
|
{"/StudentAI.py": ["/ConnectKSource_python/board_model.py"]}
|
34,215,712
|
csu-lzt/Multimodal
|
refs/heads/main
|
/data_preprocess.py
|
# -*- coding:utf-8 -*-
'''
数据预处理的工具函数
'''
import pandas as pd
import cv2
from tqdm import tqdm
import json
def read_caption_flickr(f):
"""读取并整理flickr数据集的Caption数据
"""
data = pd.read_table(f, sep='\t', header=None, names=['image', 'caption'])
images = {}
image_id = list(map(lambda x: x[:-6], data['image'].values.tolist()))
caption = data['caption'].values.tolist()
id_caption = list(zip(image_id, caption))
set_image_id = list(set(image_id))
for id in set_image_id:
images[id] = {'image_id': id + '.jpg',
'caption': []}
for id_temp, caption_temp in id_caption:
images[id_temp]['caption'].append(caption_temp)
return list(images.values())
def read_caption_cn(json_filename):
"""读取并整理中文图像-描述数据集的Caption数据
"""
with open(json_filename, 'r') as f:
data = json.load(f)
return data
def read_image(f, img_size=299):
"""单图读取函数(对非方形的图片进行白色填充,使其变为方形)
"""
img = cv2.imread(f)
height, width = img.shape[:2]
if height > width:
height, width = img_size, width * img_size // height
img = cv2.resize(img, (width, height))
delta = (height - width) // 2
img = cv2.copyMakeBorder(
img,
top=0,
bottom=0,
left=delta,
right=height - width - delta,
borderType=cv2.BORDER_CONSTANT,
value=[255, 255, 255]
)
else:
height, width = height * img_size // width, img_size
img = cv2.resize(img, (width, height))
delta = (width - height) // 2
img = cv2.copyMakeBorder(
img,
top=delta,
bottom=width - height - delta,
left=0,
right=0,
borderType=cv2.BORDER_CONSTANT,
value=[255, 255, 255]
)
img = img.astype('float32')
return img[..., ::-1] # cv2的读取模式为BGR,但keras的模型要求为RGB
#
if __name__ == "__main__":
import numpy as np
data = read_caption_flickr(r'data\flickr\flickr30k-caption\results_20130124.token')
train_data = data[0:31000]
valid_data = data[31000:]
samples = [valid_data[i] for i in np.random.choice(len(valid_data), 2)]
print(np.random.choice(len(valid_data), 2))
data = read_caption_cn(r'D:\Multi-Model Dataset\cn\ai_challenger_caption_train_20170902\caption_train_annotations_20170902.json')
print('data')
|
{"/model_show.py": ["/data_preprocess.py"], "/task_image_caption.py": ["/data_preprocess.py"]}
|
34,215,713
|
csu-lzt/Multimodal
|
refs/heads/main
|
/model_show.py
|
# -*- coding:utf-8 -*-
# ! -*- coding: utf-8 -*-
# bert做image caption任务,coco数据集
# 通过Conditional Layer Normalization融入条件信息
# https://kexue.fm/archives/7124
from __future__ import print_function
import numpy as np
from bert4keras.backend import keras, K
from bert4keras.layers import Loss
from bert4keras.models import build_transformer_model
from bert4keras.tokenizers import Tokenizer, load_vocab
from bert4keras.optimizers import Adam
from bert4keras.snippets import sequence_padding, is_string
from bert4keras.snippets import DataGenerator, AutoRegressiveDecoder
from bert4keras.snippets import WebServing
from keras.models import Model
from data_preprocess import read_caption_flickr, read_caption_cn, read_image
import warnings
warnings.filterwarnings('ignore')
# 模型配置
maxlen = 64
batch_size = 4
# ============================英文=======================================
# # 英文bert配置
# config_path = 'bert/uncased_L-12_H-768_A-12/bert_config.json'
# checkpoint_path = 'bert/uncased_L-12_H-768_A-12/bert_model.ckpt'
# dict_path = 'bert/uncased_L-12_H-768_A-12/vocab.txt'
# # 数据路径
# caption_path = 'data/flickr/flickr30k-caption/results_20130124.token'
# image_path = 'data/flickr/flickr30k-images/'
# # 加载数据
# data = read_caption_flickr(caption_path)
# train_data = data[0:31000]
# valid_data = data[31000:]
# ============================中文=======================================
# 中文bert配置
config_path = 'bert/roberta/bert_config.json'
checkpoint_path = 'bert/roberta/bert_model.ckpt'
dict_path = 'bert/roberta/vocab.txt'
# 数据路径
caption_path = r'D:\Multi-Model Dataset\cn\ai_challenger_caption_train_20170902\caption_train_annotations_20170902.json'
image_path = 'D:/Multi-Model Dataset/cn/ai_challenger_caption_train_20170902/caption_train_images_20170902/'
# 加载数据
data = read_caption_cn(caption_path)
valid_data = data[200000:]
# 加载并精简词表,建立分词器
token_dict, keep_tokens = load_vocab(
dict_path=dict_path,
simplified=True,
startswith=['[PAD]', '[UNK]', '[CLS]', '[SEP]'],
)
tokenizer = Tokenizer(token_dict, do_lower_case=True)
class CrossEntropy(Loss):
"""交叉熵作为loss,并mask掉padding部分
"""
def compute_loss(self, inputs, mask=None):
y_true, y_pred = inputs
if mask[1] is None:
y_mask = 1.0
else:
y_mask = K.cast(mask[1], K.floatx())[:, 1:]
y_true = y_true[:, 1:] # 目标token_ids
y_pred = y_pred[:, :-1] # 预测序列,错开一位
loss = K.sparse_categorical_crossentropy(y_true, y_pred)
loss = K.sum(loss * y_mask) / K.sum(y_mask)
return loss
# 图像模型
MobileNetV2 = keras.applications.mobilenet_v2.MobileNetV2
preprocess_input = keras.applications.mobilenet_v2.preprocess_input
image_model = MobileNetV2(include_top=False, pooling='avg')
# Bert模型
model = build_transformer_model(
config_path,
checkpoint_path,
application='lm',
keep_tokens=keep_tokens, # 只保留keep_tokens中的字,精简原字表
layer_norm_cond=image_model.output,
layer_norm_cond_hidden_size=128,
layer_norm_cond_hidden_act='swish',
additional_input_layers=image_model.input,
)
output = CrossEntropy(1)([model.inputs[0], model.outputs[0]])
model = Model(model.inputs, output)
model.compile(optimizer=Adam(1e-5))
model.summary()
model.load_weights('model/best_model.model')
class AutoCaption(AutoRegressiveDecoder):
"""img2seq解码器
"""
@AutoRegressiveDecoder.wraps(default_rtype='probas')
def predict(self, inputs, output_ids, states):
image = inputs[0]
token_ids = output_ids
segment_ids = np.zeros_like(token_ids)
return self.last_token(model).predict([token_ids, segment_ids, image])
def generate(self, image, topk=2):
if is_string(image):
image = read_image(image)
image = preprocess_input(image)
output_ids = self.beam_search([image], topk=topk) # 基于beam search
return tokenizer.decode(output_ids)
autocaption = AutoCaption(
start_id=tokenizer._token_start_id,
end_id=tokenizer._token_end_id,
maxlen=maxlen
)
def show():
samples = [valid_data[i] for i in np.random.choice(len(valid_data), 2)] # 从valid_data中随机取两个
for D in samples:
img = image_path + D['image_id']
print(u'image_id:', D['image_id'])
print(u'url:', D['url'])
print(u'predict:', autocaption.generate(img))
print(u'references:', D['caption'])
print()
def generate_caption(img):
caption = autocaption.generate(img)
return caption
if __name__ == '__main__':
# func:要转换为接口的函数,需要保证输出可以json化,即需要
# 保证json.dumps(func(inputs))能被执行成功;
# arguments:声明func所需参数,字典 key为参数名,
# 值value[0]为对应的转换函数(接口获取到的参数值都是字符串型),value[1]为该参数是否必须;
arguments = {'input image': (None, True)}
web = WebServing(port=8864)
web.route('/image-caption', generate_caption, arguments)
web.start()
# 现在可以测试访问 http://127.0.0.1:8864/gen_synonyms?text=苹果多少钱一斤
|
{"/model_show.py": ["/data_preprocess.py"], "/task_image_caption.py": ["/data_preprocess.py"]}
|
34,215,714
|
csu-lzt/Multimodal
|
refs/heads/main
|
/task_image_caption.py
|
# -*- coding:utf-8 -*-
# ! -*- coding: utf-8 -*-
# bert做image caption任务,coco数据集
# 通过Conditional Layer Normalization融入条件信息
# https://kexue.fm/archives/7124
from __future__ import print_function
import numpy as np
from bert4keras.backend import keras, K
from bert4keras.layers import Loss
from bert4keras.models import build_transformer_model
from bert4keras.tokenizers import Tokenizer, load_vocab
from bert4keras.optimizers import Adam
from bert4keras.snippets import sequence_padding, is_string
from bert4keras.snippets import DataGenerator, AutoRegressiveDecoder
from keras.models import Model
from data_preprocess import read_caption_flickr, read_caption_cn, read_image
import warnings
warnings.filterwarnings('ignore')
# 模型配置
maxlen = 64
batch_size = 4
steps_per_epoch = 1000
epochs = 20
# ============================英文=======================================
# # 英文bert配置
# config_path = 'bert/uncased_L-12_H-768_A-12/bert_config.json'
# checkpoint_path = 'bert/uncased_L-12_H-768_A-12/bert_model.ckpt'
# dict_path = 'bert/uncased_L-12_H-768_A-12/vocab.txt'
# # 数据路径
# caption_path = 'data/flickr/flickr30k-caption/results_20130124.token'
# image_path = 'data/flickr/flickr30k-images/'
# # 加载数据
# data = read_caption_flickr(caption_path)
# train_data = data[0:31000]
# valid_data = data[31000:]
# ============================中文=======================================
# 中文bert配置
config_path = 'bert/roberta/bert_config.json'
checkpoint_path = 'bert/roberta/bert_model.ckpt'
dict_path = 'bert/roberta/vocab.txt'
# 数据路径
caption_path = r'D:\Multi-Model Dataset\cn\ai_challenger_caption_train_20170902\caption_train_annotations_20170902.json'
image_path = 'D:/Multi-Model Dataset/cn/ai_challenger_caption_train_20170902/caption_train_images_20170902/'
# 加载数据
data = read_caption_cn(caption_path)
train_data = data[0:200000]
valid_data = data[200000:]
# 加载并精简词表,建立分词器
token_dict, keep_tokens = load_vocab(
dict_path=dict_path,
simplified=True,
startswith=['[PAD]', '[UNK]', '[CLS]', '[SEP]'],
)
tokenizer = Tokenizer(token_dict, do_lower_case=True)
class data_generator(DataGenerator):
"""数据生成器
"""
def __iter__(self, random=False):
batch_images, batch_token_ids, batch_segment_ids = [], [], []
for is_end, D in self.sample(random):
img = image_path + D['image_id']
caption = np.random.choice(D['caption'])
token_ids, segment_ids = tokenizer.encode(caption, maxlen=maxlen)
batch_images.append(read_image(img))
batch_token_ids.append(token_ids)
batch_segment_ids.append(segment_ids)
if len(batch_token_ids) == self.batch_size or is_end:
batch_images = np.array(batch_images)
batch_images = preprocess_input(batch_images)
batch_token_ids = sequence_padding(batch_token_ids)
batch_segment_ids = sequence_padding(batch_segment_ids)
yield [batch_token_ids, batch_segment_ids, batch_images], None
batch_images, batch_token_ids, batch_segment_ids = [], [], []
class CrossEntropy(Loss):
"""交叉熵作为loss,并mask掉padding部分
"""
def compute_loss(self, inputs, mask=None):
y_true, y_pred = inputs
if mask[1] is None:
y_mask = 1.0
else:
y_mask = K.cast(mask[1], K.floatx())[:, 1:]
y_true = y_true[:, 1:] # 目标token_ids
y_pred = y_pred[:, :-1] # 预测序列,错开一位
loss = K.sparse_categorical_crossentropy(y_true, y_pred)
loss = K.sum(loss * y_mask) / K.sum(y_mask)
return loss
# 图像模型
MobileNetV2 = keras.applications.mobilenet_v2.MobileNetV2
preprocess_input = keras.applications.mobilenet_v2.preprocess_input
image_model = MobileNetV2(include_top=False, pooling='avg')
# Bert模型
model = build_transformer_model(
config_path,
checkpoint_path,
application='lm',
keep_tokens=keep_tokens, # 只保留keep_tokens中的字,精简原字表
layer_norm_cond=image_model.output,
layer_norm_cond_hidden_size=128,
layer_norm_cond_hidden_act='swish',
additional_input_layers=image_model.input,
)
output = CrossEntropy(1)([model.inputs[0], model.outputs[0]])
model = Model(model.inputs, output)
model.compile(optimizer=Adam(1e-5))
model.summary()
class AutoCaption(AutoRegressiveDecoder):
"""img2seq解码器
"""
@AutoRegressiveDecoder.wraps(default_rtype='probas')
def predict(self, inputs, output_ids, states):
image = inputs[0]
token_ids = output_ids
segment_ids = np.zeros_like(token_ids)
return self.last_token(model).predict([token_ids, segment_ids, image])
def generate(self, image, topk=2):
if is_string(image):
image = read_image(image)
image = preprocess_input(image)
output_ids = self.beam_search([image], topk=topk) # 基于beam search
return tokenizer.decode(output_ids)
autocaption = AutoCaption(
start_id=tokenizer._token_start_id,
end_id=tokenizer._token_end_id,
maxlen=maxlen
)
def just_show():
samples = [valid_data[i] for i in np.random.choice(len(valid_data), 2)] # 从valid_data中随机取两个
for D in samples:
img = image_path + D['image_id']
print(u'image_id:', D['image_id'])
print(u'url:', D['url'])
print(u'predict:', autocaption.generate(img))
print(u'references:', D['caption'])
print()
class Evaluator(keras.callbacks.Callback):
"""评估与保存
"""
def __init__(self):
self.lowest = 1e10
def on_epoch_end(self, epoch, logs=None):
# 保存最优
if logs['loss'] <= self.lowest:
self.lowest = logs['loss']
model.save('model/best_model.model')
# 演示效果
just_show()
if __name__ == '__main__':
evaluator = Evaluator()
train_generator = data_generator(train_data, batch_size)
model.fit(
train_generator.forfit(),
steps_per_epoch=len(train_generator),
epochs=epochs,
callbacks=[evaluator]
)
# else:
#
# model.load_weights('model/best_model.weights')
|
{"/model_show.py": ["/data_preprocess.py"], "/task_image_caption.py": ["/data_preprocess.py"]}
|
34,315,953
|
zhangzhizheng/PFA
|
refs/heads/main
|
/modules/pca.py
|
# pca.py
# Contains the functions to create a PCA subspace and represent a vector in that subspace
import numpy as np
from numpy import linalg as la
# create_subspace: finds the k principle components of a matrix M
# returns eigenvalues, eigenvectors, mean
def create_subspace(M, k):
[n, m] = M.shape
# calculate the mean
mean = np.dot(M,np.ones((m,1), dtype=np.float32))/ m
if (m > n):
covariance = np.dot((M - mean), (M - mean).T)
[eigenvectors, eigenvalues] = la.eigh(covariance)
# this should usually be the case since the number of pixels in a picture is probably
# greater that the number of input pictures so instead of creating a huge Covariance
# matrix which can be very large we instead calculate the eigenvectors of NxN matrix
# and then use this to calculate the N eigenvectors of the DxD sized matrix
else:
L = np.dot((M - mean).T, (M - mean))
[eigenvalues, eigenvectors] = la.eigh(L)
eigenvectors = np.dot((M - mean), eigenvectors)
# wow python no scoping in loops, it's kinda hard to take you serious as a language sometimes
# to make the eigenvectors unit length or orthonormal
for i in range(m):
eigenvectors[:,i] = eigenvectors[:,i] / la.norm(eigenvectors[:,i])
sorted_order = np.argsort(eigenvalues)
sorted_order = np.flipud(sorted_order)
eigenvalues = eigenvalues[sorted_order]
eigenvectors = eigenvectors[:,sorted_order]
principle_eigenvalues = eigenvalues[0:k]
principle_eigenvectors = eigenvectors[:,0:k]
return principle_eigenvalues, principle_eigenvectors, mean
# project_image: projects an input image (y) onto a input subspace (W) with mean (mu)
# returns a projection onto W
def project_image(y , W, mu):
return np.dot(W.T,(y - mu)).T
# reverse_projection: projects the vector x back into the image space from subspace (W) with mean (mu)
# returns a flattened image vector
def reverse_projection(x, W, mu):
return (np.dot(W,x.T) + mu)
|
{"/main.py": ["/utils.py", "/budgets_accountant.py", "/models/fed.py", "/modules/logistic_reg.py", "/modules/client.py", "/modules/server.py", "/modules/budgets_accountant.py", "/common_utils/tf_utils.py", "/modules/hparams.py"], "/models/fed.py": ["/utils.py"], "/modules/client.py": ["/modules/budgets_accountant.py", "/common_utils/tf_utils.py"], "/modules/logistic_reg.py": ["/modules/models.py"], "/modules/server.py": ["/modules/lanczos.py", "/common_utils/tf_utils.py"]}
|
34,315,954
|
zhangzhizheng/PFA
|
refs/heads/main
|
/simulation/datasets/data_reader copy.py
|
import os
import struct
import numpy as np
import pickle
import tensorflow.compat.v1 as tf
from tensorflow.keras import datasets, layers, models
from tf_cifar10.cifar10_input import cifar10, cifar10_input
#from simulation.datasets import cifar10
"""
Loosely inspired by http://abel.ee.ucla.edu/cvxopt/_downloads/mnist.py
which is GPL licensed.
"""
def read_mnist(dataset = "training", data_path = "."):
if dataset is "training":
fname_img = os.path.join(data_path, 'train-images-idx3-ubyte')
fname_lbl = os.path.join(data_path, 'train-labels-idx1-ubyte')
elif dataset is "testing":
fname_img = os.path.join(data_path, 't10k-images-idx3-ubyte')
fname_lbl = os.path.join(data_path, 't10k-labels-idx1-ubyte')
else:
raise(ValueError, "dataset must be 'testing' or 'training'")
print(fname_lbl)
# Load everything in some numpy arrays
with open(fname_lbl, 'rb') as flbl:
magic, num = struct.unpack(">II", flbl.read(8))
lbl = np.fromfile(flbl, dtype=np.int8)
with open(fname_img, 'rb') as fimg:
magic, num, rows, cols = struct.unpack(">IIII", fimg.read(16))
img = np.fromfile(fimg, dtype=np.uint8).reshape(len(lbl), rows, cols)
# Reshape and normalize
print('shape of img:', img.shape)
img = np.reshape(img, [img.shape[0], img.shape[1] * img.shape[2]])*1.0/255.0
return img, lbl
def read_cifar10(dataset = "training", data_path = "."):
if dataset == "training":
#img, lbl = cifar10.load_training_data(data_path)
img, lbl = cifar10_input.distorted_inputs(data_dir=data_path, batch_size=6000)
elif dataset == "testing":
img, lbl = cifar10.load_test_data(data_path)
else:
raise(ValueError, "dataset must be 'testing' or 'training'")
# Reshape and normalize
print('shape of img:', img.shape)
img = np.reshape(img, [img.shape[0], img.shape[1] * img.shape[2] * img.shape[3]])
return img, lbl
def load_dataset(path, dataset):
print(path, dataset)
# load the data
if dataset == 'mnist' or dataset == 'fmnist':
data_path = os.path.join(path, dataset)
x_train, y_train = read_mnist('training', data_path)
x_test, y_test = read_mnist('testing', data_path)
elif dataset == 'cifar10':
data_path = os.path.join(path, dataset)
x_train, y_train = read_cifar10('training', data_path)
x_test, y_test = read_cifar10('testing', data_path)
print('shape of data: ', x_train.shape, y_train.shape, x_test.shape, y_test.shape)
# create the validation set
#x_vali = x_train[TRAIN_SIZE:].astype(float)
#y_vali = y_train[TRAIN_SIZE:].astype(float)
# create the train set
x_train = x_train.astype(float)
y_train = y_train.astype(float)
# sort train set (to make federated learning non i.i.d.)
indices_train = np.argsort(y_train)
sorted_x_train = x_train[indices_train]
sorted_y_train = y_train[indices_train]
# create a test set
x_test = x_test.astype(float)
y_test = y_test.astype(float)
return np.array(sorted_x_train), np.array(sorted_y_train), np.array(x_test), np.array(y_test)
|
{"/main.py": ["/utils.py", "/budgets_accountant.py", "/models/fed.py", "/modules/logistic_reg.py", "/modules/client.py", "/modules/server.py", "/modules/budgets_accountant.py", "/common_utils/tf_utils.py", "/modules/hparams.py"], "/models/fed.py": ["/utils.py"], "/modules/client.py": ["/modules/budgets_accountant.py", "/common_utils/tf_utils.py"], "/modules/logistic_reg.py": ["/modules/models.py"], "/modules/server.py": ["/modules/lanczos.py", "/common_utils/tf_utils.py"]}
|
34,315,955
|
zhangzhizheng/PFA
|
refs/heads/main
|
/modules/budgets_accountant.py
|
import numpy as np
import copy
import math
from tensorflow_privacy.privacy.analysis import compute_dp_sgd_privacy_lib
class BudgetsAccountant:
def __init__(self, epsilon, delta, noise_multiplier,
accumulation=0):
#self._public = None if priv_threshold is None else list(np.where(np.array(self._init) >= priv_threshold)[0])
#self._private = None if self._public is None else list(set(range(N)).difference(set(self._public)))
self.epsilon = epsilon
self.delta = delta
self.noise_multiplier = noise_multiplier
self.accum_bgts = 0
self.finished = False
self.__curr_steps = 0
def precheck(self, dataset_size, batch_size, loc_steps):
'''Pre-check if the current client could participate in next round'''
if self.finished:
return False
# Then we need to check if client will exhaust her budget in the following round, i.e., temp_accum_bgts > epsilon.
tmp_steps = self.__curr_steps + loc_steps
q = batch_size * 1.0 / dataset_size
tmp_accum_bgts = 10 * q * math.sqrt(tmp_steps*(-math.log10(self.delta))) / self.noise_multiplier
# If so, set the status as 'finished' and will not participate the rest training anymore; else, return True
if self.epsilon - tmp_accum_bgts < 0:
self.finished = True
return False
else:
self.tmp_accum_bgts = tmp_accum_bgts
return True
def update(self, loc_steps):
#print('update: ', clients_id)
self.__curr_steps += loc_steps
self.accum_bgts = self.tmp_accum_bgts
self.tmp_accum_bgts = 0
return self.accum_bgts
|
{"/main.py": ["/utils.py", "/budgets_accountant.py", "/models/fed.py", "/modules/logistic_reg.py", "/modules/client.py", "/modules/server.py", "/modules/budgets_accountant.py", "/common_utils/tf_utils.py", "/modules/hparams.py"], "/models/fed.py": ["/utils.py"], "/modules/client.py": ["/modules/budgets_accountant.py", "/common_utils/tf_utils.py"], "/modules/logistic_reg.py": ["/modules/models.py"], "/modules/server.py": ["/modules/lanczos.py", "/common_utils/tf_utils.py"]}
|
34,315,956
|
zhangzhizheng/PFA
|
refs/heads/main
|
/utils/grid_search.py
|
"""
Non projection component.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl import app
from absl import flags
import os
import pickle
import math
import time
#import copy
import tensorflow.compat.v1 as tf
import numpy as np
from utils import global_step_creator, sampling, Vname_to_FeedPname, Vname_to_Pname, print_new_comm_round, save_progress, \
print_loss_and_accuracy, print_new_comm_round
from data_reader import load_dataset
from create_clients import create_iid_clients, create_noniid_clients
from budgets_accountant import BudgetsAccountant
from models import nets
from models.fed import LocalUpdate, ServerAggregation
np.random.seed(10)
config = tf.ConfigProto()
config.gpu_options.per_process_gpu_memory_fraction = 0.5
session = tf.Session(config=config)
# Experiment hyperparameters
flags.DEFINE_enum('dataset', 'mnist', ['mnist', 'cifar10'], 'Which dataset to use.')
flags.DEFINE_enum('model', 'cnn', ['lr', 'cnn', '2nn'], 'Which model to use. This '
'can be a convolutional model (cnn) or a two hidden-layer '
'densely connected network (2nn).')
flags.DEFINE_boolean('noniid', False, 'If True, train with noniid data distribution.')
flags.DEFINE_integer('N', 10,
'Total number of clients.')
flags.DEFINE_integer('max_steps', 10000,
'Total number of communication round.')
flags.DEFINE_integer('client_dataset_size', None,
'If None, set the default value.')
#flags.DEFINE_integer('client_batch_size', 128,
# 'Batch size used on the client.')
#flags.DEFINE_integer('num_microbatches', 64, 'Number of microbatches '
# '(must evenly divide batch_size)')
# learning rate
flags.DEFINE_enum('lr_mode', 'const', ['const', 'decay'], 'learning rate mode.')
#flags.DEFINE_float('lr', 0.1, 'Learning rate for local update procedure.')
# Differential privacy flags
flags.DEFINE_boolean(
'dpsgd', False, 'If True, train with DP-SGD. If False, '
'train with vanilla SGD.')
flags.DEFINE_enum('eps', None, ['high','low'], 'Samping mechanism: '
'R for random sample, W for weighted sample and None')
flags.DEFINE_float(
'delta', 1e-5, 'Privacy parameter delta'
'Delta.')
flags.DEFINE_float('l2_norm_clip', 1.0, 'Clipping norm')
flags.DEFINE_integer('local_steps', 50,
'The round gap between two consecutive communications.')
# Personalized privacy flags
flags.DEFINE_enum('sample_mode', None, ['R','W1','W2'], 'Samping mechanism: '
'R for random sample, W for weighted sample and None')
flags.DEFINE_float('sample_ratio', 0.1, 'Sample ratio.')
# weighted average
flags.DEFINE_boolean('wei_avg', False, 'If True, train with weighted averaging.')
# fedavg
flags.DEFINE_boolean('fedavg', False, 'If True, train with fedavg.')
# Projection flags
#flags.DEFINE_enum('projection', 'False', ['True','False','Mixture'], 'Projection mode: '
# 'Mixture for without projection at formal period and with projection for later period.')
flags.DEFINE_boolean('projection', False, 'If True, use projection.')
flags.DEFINE_integer('proj_dims', 5,
'The dimensions of subspace.')
flags.DEFINE_enum('proj_method', 'lanczos', ['full','power','lanczos'], 'Projection method.')
flags.DEFINE_boolean('error_feedback', False, 'If True, use error feedback.')
# save dir flags
flags.DEFINE_string('save_dir', os.path.join(os.getcwd(), 'res_Jan'), 'Model directory')
flags.DEFINE_string('log', os.path.join(os.getenv('TEST_TMPDIR', '/tmp'),
'tensorflow/mnist/logs'), 'Log data directory')
FLAGS = flags.FLAGS
# How many times to repeat each experiment
NUM_REPEATS=10
# How many cores to use
CORES = 40
def dict_product(dicts):
return (dict(zip(dicts, x)) for x in product(*dicts.values()))
def print_model_information():
print('Model Information: {}'.format(FLAGS.model))
print('DPSGD? {}'.format(FLAGS.dpsgd)
agg_method = 'fedavg' if FLAGS.fedavg else ('weiavg' if FLAGS.wei_avg else 'pro'+FLAGS.proj_dims)
print('Aggregation method: {}'.format(agg_method))
print('Privacy budgets setting: {} level'.format(FLAGS.eps)
for i in range(FLAGS.N):
print("client {}: epsilons {:.3f}, delta {:.3f}, noise_multiplier {:.3f}".format(epsilons, FLAGS.delta))
print('Dataset Information: {}'.format(FLAGS.dataset))
print('non I.I.D.? {}'.format(FLAGS.noniid))
print('dataset size: x_train->{}, y_train->{}, x_test->{}, y_test->{}'.format(len(x_train), len(y_train), len(x_test), len(y_test)))
print('client dataset size: {}'.format(len(client_set[0])))
print('client sampling ratio: {}'.format(m))
def main(unused_argv):
project_path = os.getcwd()
# load dataset
x_train, y_train, x_test, y_test = load_dataset(FLAGS.dataset, project_path)
# split data
client_set_path = os.path.join(project_path, 'dataset', FLAGS.dataset, 'clients', ('noniid' if FLAGS.noniid else 'iid'))
#client_set_path = project_path + '/dataset/' + FLAGS.dataset + '/clients/' + ('noniid' if FLAGS.noniid else 'iid')
client_dataset_size = len(x_train) // FLAGS.N if FLAGS.client_dataset_size is None else FLAGS.client_dataset_size
if not FLAGS.noniid:
client_set = create_iid_clients(FLAGS.N, len(x_train), 10, client_dataset_size, client_set_path)
else:
client_set = create_noniid_clients(FLAGS.N, len(x_train), 10, client_dataset_size, client_set_path)
COMM_ROUND = int(FLAGS.max_steps / FLAGS.local_steps)
# set personalized privacy budgets
if FLAGS.dpsgd:
if FLAGS.eps == 'high':
mean = [1, 5, 9]
std = [0.1, 1, 1]
threshold = 6
elif FLAGS.eps == 'low':
mean = [0.5, 2.5, 4.5]
std = [0.1, 0.5, 0.5]
threshold = 3
pr_dist = [0.7, 0.2, 0.1]
print("mean:{}, std:{}, threshold:{}, pr_dist:{}".format(mean, std, threshold, pr_dist))
epsilons = []
for i in range(FLAGS.N):
dist_idx = np.argmax(np.random.multinomial(1, pr_dist))
epsilons.append(np.random.normal(mean[dist_idx], std[dist_idx]))
print("epsilons:{}".format(epsilons))
noise_multiplier = []
for i in range(FLAGS.N):
q = FLAGS.client_batch_size / len(client_set[i])
nm = 10 * q * math.sqrt(FLAGS.max_steps * FLAGS.sample_ratio * (-math.log10(FLAGS.delta))) / epsilons[i]
noise_multiplier.append(nm)
print('noise_multiplier:', noise_multiplier)
budgets_accountant = BudgetsAccountant(FLAGS.N, epsilons, FLAGS.delta, noise_multiplier, FLAGS.local_steps, threshold)
if FLAGS.sample_mode is None:
m = FLAGS.N
else:
m = int(FLAGS.sample_ratio * FLAGS.N)
'''
epsilons_ = {
'high': {
'mean': [1, 5, 9],
'std': [0.1, 1, 1],
'threshold': 6,
'prob': [0.7, 0.2, 0.1]
},
'low': {
'mean': [0.5, 2.5, 4.5],
'std': [0.1, 0.5, 0.5],
'threshold': 3,
'prob': [0.7, 0.2, 0.1]
}
}
eps_levels = ['high', 'low']
'''
hyper_ = {
'client_batch_size': [64, 128, 256],
'num_microbatches': [64, 128, 256],
'learning_rate': [0.001, 0.01, 0.1],
'local_steps': [10, 50, 100],
'l2_norm_clip': [1.0]
}
hypers = list(dict_product(hypers_))
hyperparameter_names = list(hypers[0].keys())
repeat_time = NUM_REPEATS
total_configurations = len(eps_levels) * len(hypers) * repeat_time
start = clock()
result = []
for hyper in hypers:
if hyper['client_batch_size'] < hyper['num_microbatches']:
continue
for time in range(repeat_time):
args = [datasets_l[alg_name][hyper['L']]['training'], binary_train_y, eps/training_labels.shape[1], delta/training_labels.shape[1], hyper, model_name, counter, total_configurations]
results.append(pool.apply_async(run_pepsi, args))
end = clock()
pool.close()
pool.join()
for i, eps in enumerate(eps_levels):
for j, hyper in enumerate(hypers):
#print('mean:{}, std:{}, threshold:{}, pr:{}'.format(eps['mean'], eps['std'], eps['threshold'], eps['prob']), end='')
print('{s}'.format(eps), end='')
#print('{:.2f}'.format(eps), end='', file=logfile)
for name in hyperparameter_names:
print('\t{0}'.format(hyper[name]), end='')
#print('\t{0}'.format(hyper[name]), end='', file=logfile)
print('\t{:.3f}\t{:.3f}\t{:.3e}'.format(ave_list[i, j], std_list[i, j], gamma_list[i,j]))
#print('\t{:.3f}\t{:.3f}\t{:.3e}'.format(ave_list[i, j], std_list[i, j], gamma_list[i,j]), file=logfile)
print('------------------------------------------------------------')
print('best result for eps:{0} is ave:{1} and std:{2}'.format(eps, max_correct_list[i][0], max_correct_list[i][1]))
print('------------------------------------------------------------')
#print('------------------------------------------------------------', file=logfile)
#print('best result for eps:{0} is ave:{1} and std:{2}'.format(eps, max_correct_list[i][0], max_correct_list[i][1]), file=logfile)
#print('------------------------------------------------------------', file=logfile)
print('Running Time: '+str(end-start)+'s')
def run_pepsi(x, y, epsilons, delta, lambda_param,
learning_rate=None, total_iters=None,
L=1):
accuracy_accountant = []
# define tensors and operators in the graph 'g_c'
with tf.Graph().as_default():
# build model
if FLAGS.dpsgd:
gradient_op_list, train_op_list, eval_op, loss, data_placeholder, labels_placeholder = nets.mnist_model(FLAGS, \
epsilons, noise_multiplier)
else:
gradient_op_list, train_op_list, eval_op, loss, data_placeholder, labels_placeholder = nets.mnist_model(FLAGS)
# increase and set global step
increase_global_step, set_global_step = global_step_creator()
# ==
# dict, each key-value pair corresponds to the placeholder_name of each tf.trainable_variables
# and its placeholder.
# trainable_variables: the placeholder name corresponding to each tf.trainable variable.
model_placeholder = dict(zip([Vname_to_FeedPname(var) for var in tf.trainable_variables()],
[tf.placeholder(name=Vname_to_Pname(var),
shape=var.get_shape(),
dtype=tf.float32)
for var in tf.trainable_variables()]))
# all trainable variables are set to the value specified through
# the placeholders in 'model_placeholder'.
assignments = [tf.assign(var, model_placeholder[Vname_to_FeedPname(var)]) for var in
tf.trainable_variables()]
init = tf.global_variables_initializer()
with tf.Session() as sess:
sess.run(init)
# initial global model and errors
model = dict(zip([Vname_to_FeedPname(var) for var in tf.trainable_variables()],
[sess.run(var) for var in tf.trainable_variables()]))
model['global_step_placeholder:0'] = 0
errors = list(model.values()) if FLAGS.error_feedback else [0]*len(tf.trainable_variables())
#server.set_global_model(model)
# initial server aggregation
#w = weights if FLAGS.wei_avg else None
server = ServerAggregation(model, FLAGS.dpsgd, FLAGS.projection, FLAGS.proj_dims, FLAGS.wei_avg)
# initial local update
local = LocalUpdate(x_train, y_train, client_set, FLAGS.client_batch_size, data_placeholder, labels_placeholder)
for r in range(COMM_ROUND):
print_new_comm_round(r)
# select the participating clients
if FLAGS.dpsgd:
participating_clients = sampling(FLAGS.N, m, client_set, FLAGS.client_batch_size, \
FLAGS.sample_mode, budgets_accountant)
else:
participating_clients = range(FLAGS.N) # temporary
# if the condition of training cannot be satisfied. (no public clients or no sufficient candidates.
if not len(participating_clients):
print("the condition of training cannot be satisfied. (no public clients or no sufficient candidates.")
break
print(participating_clients)
############################################################################################################
# For each client c (out of the m chosen ones):
for c in participating_clients:
start_time = time.time()
#########################################################################################################
# Start local update
# Setting the trainable Variables in the graph to the values stored in feed_dict 'model'
#sess.run(assignments, feed_dict=model)
update = local.update(sess, assignments, c, model, FLAGS.local_steps, train_op_list[c])
server.aggregate(c, update, is_public = (c in budgets_accountant._public if FLAGS.dpsgd else True))
if FLAGS.dpsgd:
print('For client %d and delta=%f, the budget is %f and the used budget is: %f' %
(c, float(FLAGS.delta), epsilons[c], budgets_accountant.get_accumulation(c)))
#print('local update procedure time:', time.time() - start_time)
# End of the local update
############################################################################################################
# average and update the global model, apply_gradients(grads_and_vars, global_step)
e = errors if FLAGS.error_feedback else None
if FLAGS.fedavg:
n_clients = len(participating_clients)
w = np.array([1/n_clients] * n_clients)
print(w)
elif FLAGS.wei_avg:
epsSubset = np.array(epsilons)[participating_clients]
eps_sum = sum(epsSubset)
w = np.array([eps/eps_sum for eps in epsSubset])
print(epsSubset, w)
else:
w = None
model = server.fedavg(model, e, w)
# Setting the trainable Variables in the graph to the values stored in feed_dict 'model'
sess.run(assignments + [increase_global_step], feed_dict=model)
# validate the (current) global model using validation set.
# create a feed-dict holding the validation set.
feed_dict = {str(data_placeholder.name): x_test,
str(labels_placeholder.name): y_test}
# compute the loss on the validation set.
global_loss = sess.run(loss, feed_dict=feed_dict)
count = sess.run(eval_op, feed_dict=feed_dict)
accuracy = float(count) / float(len(y_test))
accuracy_accountant.append(accuracy)
print_loss_and_accuracy(global_loss, accuracy, stage='test')
return
'''
if FLAGS.dpsgd:
save_progress(FLAGS, model, accuracy_accountant, budgets_accountant.get_global_budget())
else:
save_progress(FLAGS, model, accuracy_accountant)
'''
if __name__ == '__main__':
app.run(main)
|
{"/main.py": ["/utils.py", "/budgets_accountant.py", "/models/fed.py", "/modules/logistic_reg.py", "/modules/client.py", "/modules/server.py", "/modules/budgets_accountant.py", "/common_utils/tf_utils.py", "/modules/hparams.py"], "/models/fed.py": ["/utils.py"], "/modules/client.py": ["/modules/budgets_accountant.py", "/common_utils/tf_utils.py"], "/modules/logistic_reg.py": ["/modules/models.py"], "/modules/server.py": ["/modules/lanczos.py", "/common_utils/tf_utils.py"]}
|
34,315,957
|
zhangzhizheng/PFA
|
refs/heads/main
|
/common_utils/tf_utils.py
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl import app
from absl import flags
import os
import pickle
import math
import heapq
import csv
import re
#import copy
import tensorflow.compat.v1 as tf
import numpy as np
np.random.seed(10)
def global_step_creator( ):
global_step = [v for v in tf.global_variables() if v.name == "global_step:0"][0]
global_step_placeholder = tf.placeholder(dtype=tf.float32,
shape=(),
name='global_step_placeholder')
set_global_step = tf.assign(global_step, global_step_placeholder)
return set_global_step
'''
one = tf.constant(1, dtype=tf.float32, name='one')
new_global_step = tf.add(global_step, one)
increase_global_step = tf.assign(global_step, new_global_step)
'''
def Assignements(dic):
return [tf.assign(var, dic[Vname_to_Pname(var)]) for var in tf.trainable_variables()]
def Vname_to_Pname(var):
return var.name[:var.name.find(':')] + '_placeholder'
def Vname_to_FeedPname(var):
return var.name[:var.name.find(':')] + '_placeholder:0'
def Vname_to_Vname(var):
return var.name[:var.name.find(':')]
|
{"/main.py": ["/utils.py", "/budgets_accountant.py", "/models/fed.py", "/modules/logistic_reg.py", "/modules/client.py", "/modules/server.py", "/modules/budgets_accountant.py", "/common_utils/tf_utils.py", "/modules/hparams.py"], "/models/fed.py": ["/utils.py"], "/modules/client.py": ["/modules/budgets_accountant.py", "/common_utils/tf_utils.py"], "/modules/logistic_reg.py": ["/modules/models.py"], "/modules/server.py": ["/modules/lanczos.py", "/common_utils/tf_utils.py"]}
|
34,315,958
|
zhangzhizheng/PFA
|
refs/heads/main
|
/modules/client.py
|
"""
client update
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl import app
from absl import flags
import os
import pickle
import math
import copy
import tensorflow.compat.v1 as tf
import numpy as np
import scipy
import time
from operator import mul
from functools import reduce
from modules.budgets_accountant import BudgetsAccountant
from common_utils import main_utils
from common_utils.tf_utils import Vname_to_FeedPname, Vname_to_Pname
np.random.seed(10)
class Client(object):
def __init__(self, x_train, y_train, batch_size, loc_steps):
self.x_train = x_train
self.y_train = y_train
self.dataset_size = len(x_train)
self.batch_size = batch_size
self.loc_steps = loc_steps
self.ba = None
self.Vk = None
self.mean = None
self.global_steps = 0
def set_ba(self, ba):
'''set client's budget accountant'''
self.ba = ba
def set_ops(self, train_op, eval_op, scalar_loss,
data_placeholder, labels_placeholder):
self.train_op = train_op
self.eval_op = eval_op
self.scalar_loss = scalar_loss
self.data_ph = data_placeholder
self.labels_ph = labels_placeholder
def precheck(self):
if self.ba is None:
return True
else:
return self.ba.precheck(self.dataset_size, self.batch_size, self.loc_steps)
def download_model(self, sess, assignments, set_global_step, model):
sess.run(assignments, feed_dict=model)
sess.run(set_global_step, feed_dict={'global_step_placeholder:0':self.global_steps})
def set_projection(self, Vk=None, mean=None, is_private=False):
self.Vk = Vk
self.mean = mean
self.is_private = is_private
def local_update(self, sess, model, global_steps):
# local SGD then get the model updates
for it in range(self.loc_steps):
# batch_ind holds the indices of the current batch
batch_ind = np.random.permutation(self.dataset_size)[0:self.batch_size]
x_batch = self.x_train[[int(j) for j in batch_ind]]
y_batch = self.y_train[[int(j) for j in batch_ind]]
# Fill a feed dictionary with the actual set of data and labels using the data and labels associated
# to the indices stored in batch_ind:
feed_dict = {str(self.data_ph.name): x_batch,
str(self.labels_ph.name): y_batch}
# Run one optimization step.
_ = sess.run(self.train_op, feed_dict = feed_dict)
self.global_steps = sess.run(global_steps)
updates = [model[Vname_to_FeedPname(var)] - sess.run(var) for var in tf.trainable_variables()]
num_params1 = 0
for u in updates:
num_params1 += reduce(mul, u.shape)
#print(reduce(mul, u.shape))
Bytes1 = num_params1*4
print('num_params: {}, Bytes: {}, M: {}'.format(num_params1, Bytes1, Bytes1/(1024*1024)))
if (self.Vk is not None) and self.is_private:
update_1d = [u.flatten() for u in updates]
updates = [ np.dot(self.Vk[i].T, update_1d[i]-self.mean[i]) for i in range(len(update_1d)) ]
num_params2 = 0
for u in updates:
num_params2 += reduce(mul, u.shape)
#print(reduce(mul, u.shape))
Bytes2 = num_params2*4
print('After: num_params: {}, Bytes: {}, M: {}'.format(num_params2, Bytes2, Bytes2/(1024*1024)))
# update the budget accountant
accum_bgts = self.ba.update(self.loc_steps) if self.ba is not None else None
return updates, accum_bgts, Bytes1, Bytes2
|
{"/main.py": ["/utils.py", "/budgets_accountant.py", "/models/fed.py", "/modules/logistic_reg.py", "/modules/client.py", "/modules/server.py", "/modules/budgets_accountant.py", "/common_utils/tf_utils.py", "/modules/hparams.py"], "/models/fed.py": ["/utils.py"], "/modules/client.py": ["/modules/budgets_accountant.py", "/common_utils/tf_utils.py"], "/modules/logistic_reg.py": ["/modules/models.py"], "/modules/server.py": ["/modules/lanczos.py", "/common_utils/tf_utils.py"]}
|
34,315,959
|
zhangzhizheng/PFA
|
refs/heads/main
|
/common_utils/dpsgd_utils.py
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl import app
from absl import flags
import os
import pickle
import math
import heapq
import csv
import re
#import copy
import tensorflow.compat.v1 as tf
import numpy as np
np.random.seed(10)
def set_epsilons(filename, N, is_distributions = True):
print('=========Epsilons Info========')
with open('epsfiles/{}.txt'.format(filename), 'r') as rfile:
lines = rfile.readlines()
num_lines = len(lines)
if re.search('mixgauss', filename):
print('{} is a mix gaussian distribution.'.format(filename))
dists = []
for i in range(num_lines-2):
print(lines[i])
values = lines[i].split()
dist = {'mean':float(values[1]), 'std':float(values[2])}
dists.append(dist)
threshold = float(lines[-1].split()[1])
pr_dist = [ float(x) for x in lines[-2].split()[1:] ]
print('pr_list:{}, threshold:{}'.format(pr_dist, threshold))
while(True):
epsilons = []
for i in range(N):
dist_idx = np.argmax(np.random.multinomial(1, pr_dist))
eps = np.random.normal(dists[dist_idx]['mean'], dists[dist_idx]['std'])
epsilons.append(eps)
epsilons = np.array(epsilons)
if (len( epsilons [epsilons > threshold] ) > 0) :
break
elif re.search('gauss', filename):
print('{} is a gaussian distribution.'.format(filename))
values = lines[0].split()
dist = {'mean':float(values[1]), 'std':float(values[2])}
epsilons = np.random.normal(dist['mean'], dist['std'], N)
threshold = float(lines[-1].split()[1])
elif re.search('uniform', filename):
print('{} is a uniform distribution.'.format(filename))
values = lines[0].split()[1:]
_min, _max = float(values[0]), float(values[1])
epsilons = np.random.uniform(_min, _max, N)
threshold = float(lines[-1].split()[1])
while len( epsilons [epsilons > threshold] ) == 0:
epsilons = np.random.uniform(_min, _max, N)
if len( epsilons [epsilons > threshold] ) > 0:
break
elif re.search('pareto', filename):
print('{} is a pareto distribution.'.format(filename))
x_m, alpha = float(lines[0].split()[1]), float(lines[0].split()[2])
print(x_m, alpha)
epsilons = (np.random.pareto(alpha, N) + 1) * x_m
#threshold = np.sort(epsilons)[::-1][int(N*0.2)-1]
threshold = 2 if N == 10 else 5
elif re.search('min', filename):
print('{} take the minimum value over all clients\' preferences.'.format(filename))
x_min = float(lines[0].split()[1])
print(x_min)
epsilons = [x_min] * N
threshold = None
elif re.search('max', filename):
print('{} take the maximum value over all clients\' preferences.'.format(filename))
x_max = float(lines[0].split()[1])
epsilons = [x_max] * N
threshold = None
else:
'''or you can directly provide the exact epsilons of each clients. Note that the total number
of epsilons should be equal to the number of clients N.
#format:
epsilons 0.5 0.5 0.5 0.5 ... (total N values)
threshold 1.0
'''
print('{} is not a distribution.'.format(filename))
values = lines[0].split()[1:]
epsilons = [float(v) for v in values]
threshold = float(lines[1][1])
print('epsilons:{}, total {} values.'.format(epsilons, len(epsilons)))
return epsilons
def compute_noise_multiplier( N, L, T, epsilon, delta):
q = L / N
nm = 10 * q * math.sqrt(T * (-math.log10(delta))) / epsilon
return nm
|
{"/main.py": ["/utils.py", "/budgets_accountant.py", "/models/fed.py", "/modules/logistic_reg.py", "/modules/client.py", "/modules/server.py", "/modules/budgets_accountant.py", "/common_utils/tf_utils.py", "/modules/hparams.py"], "/models/fed.py": ["/utils.py"], "/modules/client.py": ["/modules/budgets_accountant.py", "/common_utils/tf_utils.py"], "/modules/logistic_reg.py": ["/modules/models.py"], "/modules/server.py": ["/modules/lanczos.py", "/common_utils/tf_utils.py"]}
|
34,315,960
|
zhangzhizheng/PFA
|
refs/heads/main
|
/utils/parser.py
|
import os
import csv
#from utils import set_epsilons
rfpath = 'log_wproj_1200/log_2/mnist/cnn/iid/mixgauss2/'
wfpath = 'res_wproj_1200/log_2/mnist/cnn/iid/mixgauss2/'
#wfilename = 'log_test/log_lr_iid_20_bs128_nm128_10000_100_R8_mediandp_pro5_256_constlr_0121_v6'
#file4 = 'parser_res/log_lr_iid_20_bs128_nm128_10000_100_R8_mediandp_pro5_256_constlr_0121_v6'
settings = [1,2,3,5,10,20,30,50,100]
#settings = [10,50,100]
#settings = [(128,128),(128,16),(64,64),(64,16),(32,32),(32,16)]
#settings = [1,2,5,10]
for i in settings:
if not os.path.exists(wfpath):
os.makedirs(wfpath)
fname = '30_bs4_nm4_10000_100_R8_wpro{}_256_constlr0.01'.format(i)
rfname = os.path.join(rfpath,fname)
wfname = os.path.join(wfpath,'30-wpro{}_256-100-bs4-constlr0.01'.format(i))+'.csv'
if os.path.isfile(rfname):
print(rfname)
Accuracy_accountant = []
with open(rfname, 'r') as rfile:
line = rfile.readline()
while(line):
if(line[:6]==' - The'):
#print(line)
acc = line.split(':')[1]
acc = float(acc)
Accuracy_accountant.append(acc)
line = rfile.readline()
print(wfname)
with open(wfname, "w") as csvfile:
wfile = csv.writer(csvfile, delimiter=',')
wfile.writerow(Accuracy_accountant)
else:
print('file {} not exists.'.format(rfname))
|
{"/main.py": ["/utils.py", "/budgets_accountant.py", "/models/fed.py", "/modules/logistic_reg.py", "/modules/client.py", "/modules/server.py", "/modules/budgets_accountant.py", "/common_utils/tf_utils.py", "/modules/hparams.py"], "/models/fed.py": ["/utils.py"], "/modules/client.py": ["/modules/budgets_accountant.py", "/common_utils/tf_utils.py"], "/modules/logistic_reg.py": ["/modules/models.py"], "/modules/server.py": ["/modules/lanczos.py", "/common_utils/tf_utils.py"]}
|
34,315,961
|
zhangzhizheng/PFA
|
refs/heads/main
|
/modules/lanczos.py
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl import app
from absl import flags
import math
import tensorflow.compat.v1 as tf
import numpy as np
np.random.seed(10)
def Lanczos( mat, m=128 ):
# reference: https://en.wikipedia.org/wiki/Lanczos_algorithm
n = mat[0].shape[0]
v0 = np.random.rand(n)
v0 /= np.sqrt(np.dot(v0,v0))
V = np.zeros( (m,n) )
T = np.zeros( (m,m) )
V[0, :] = v0
# step 2.1 - 2.3
w = np.sum([np.dot(col, np.dot(col.T, V[0,:])) for col in mat], 0)
alfa = np.dot(w, V[0,:])
w = w - alfa * V[0,:]
T[0,0] = alfa
# needs to start the iterations from indices 1
for j in range(1, m-1):
beta = np.sqrt( np.dot( w, w ) )
V[j,:] = w/beta
# This performs some rediagonalization to make sure all the vectors
# are orthogonal to eachother
for i in range(j-1):
V[j, :] = V[j,:] - np.dot(np.conj(V[j,:]), V[i, :])*V[i,:]
V[j, :] = V[j, :]/np.linalg.norm(V[j, :])
w = np.sum([np.dot(col, np.dot(col.T, V[j,:])) for col in mat], 0)
alfa = np.dot(w, V[j, :])
w = w - alfa * V[j, :] - beta*V[j-1, :]
T[j,j ] = alfa
T[j-1,j] = beta
T[j,j-1] = beta
return T, V
|
{"/main.py": ["/utils.py", "/budgets_accountant.py", "/models/fed.py", "/modules/logistic_reg.py", "/modules/client.py", "/modules/server.py", "/modules/budgets_accountant.py", "/common_utils/tf_utils.py", "/modules/hparams.py"], "/models/fed.py": ["/utils.py"], "/modules/client.py": ["/modules/budgets_accountant.py", "/common_utils/tf_utils.py"], "/modules/logistic_reg.py": ["/modules/models.py"], "/modules/server.py": ["/modules/lanczos.py", "/common_utils/tf_utils.py"]}
|
34,315,962
|
zhangzhizheng/PFA
|
refs/heads/main
|
/simulation/clients/create_clients.py
|
import pickle
import numpy as np
import math
import os
def create_clients(num, num_examples, dir):
'''
This function creates clients that hold non-iid MNIST data accroding to the experiments in
https://research.google.com/pubs/pub44822.html. (it actually just creates indices that point
to data. but the way these indices are grouped, they create a non-iid client.)
we first sort the data by digit label, divide it into 200 shards of size 250, and assign each
of 100 clients 2 shards. Thus, as most clients will only have examples of two digits.
:param num: number of clients
:param dir: where to store
:return: _
'''
examples_per_client = num_examples//num
num_classes = 10
clients = os.path.join(dir, str(num)+'_clients.pkl')
if os.path.exists(clients):
print('Client exists at: {}'.format(clients))
return
if not os.path.exists(dir):
os.makedirs(dir)
buckets = []
for k in range(num_classes):
temp = []
for j in range(int(num / 10)):
temp = np.hstack((temp, k * int(num_examples/10) + np.random.permutation(int(num_examples/10))))
print('temp.len: ', len(temp))
buckets = np.hstack((buckets, temp))
print('buckets.len: ', len(buckets))
shards = 2 * num # 20
print('buckets.shape:', buckets.shape, 'shards', shards) # buckets.shape: (10, 5000*(N/10))
perm = np.random.permutation(shards)
# z will be of length N*5000 and each element represents a client.
z = []
ind_list = np.split(buckets, shards) # 50000/20 = 2500
print('ind_list.len:', len(ind_list))
for j in range(0, shards, 2):
# each entry of z is associated to two shards. the two shards are sampled randomly by using the permutation matrix
# perm and stacking two shards together using vstack. Each client now holds 2500*2 datapoints.
z.append(np.hstack((ind_list[int(perm[j])], ind_list[int(perm[j + 1])])))
# shuffle the data in each element of z, so that each client doesn't have all digits stuck together.
perm_2 = np.random.permutation(int(2 * len(buckets) / shards))
z[-1] = z[-1][perm_2]
filehandler = open(clients, "wb")
pickle.dump(z, filehandler)
filehandler.close()
print('client created at: {}'.format(clients))
def create_iid_clients(num_clients, num_examples, num_classes, num_examples_per_client, path):
'''
This function creates clients that hold iid MNIST data.
we first sort the data by digit label, divide it into 200 shards of size 250, and assign each
of 100 clients 2 shards. Thus, as most clients will only have examples of two digits.
:param num: number of clients
:param dir: where to store
:return: _
'''
#assert num_examples % examples_per_client == 0, "Number of examples per client must devide the total number of examples."
file_path = os.path.join(path, '{}_{}_clients.pkl'.format(num_clients, num_examples_per_client))
if os.path.exists(os.path.join(file_path)):
print('Client exists at: {}'.format(file_path))
client_set = pickle.load(open(file_path, 'rb'))
return client_set
if not os.path.exists(path):
os.makedirs(path)
# client_set will be of length N*5000 and each element represents a client.
client_set = []
rounds = math.ceil(num_clients * num_examples_per_client / num_examples)
client_per_round = int(num_examples / num_examples_per_client)
client_count = 0
for i in range(rounds):
# shuffle the data
perm = np.random.permutation(num_examples)
for j in range(client_per_round):
if client_count == num_clients:
break
client_count += 1
#each entry of z is associated to 'examples_per_client' examples.
client_set.append(np.array(perm[j * num_examples_per_client : (j+1) * num_examples_per_client]))
filehandler = open(file_path, "wb")
pickle.dump(client_set, filehandler)
filehandler.close()
print('client created at: {}'.format(file_path))
return client_set
def create_noniid_clients(num_clients, num_examples, num_classes, \
num_examples_per_client, num_classes_per_client, path):
'''
This function creates clients that hold non-iid MNIST data accroding to the experiments in
https://research.google.com/pubs/pub44822.html. (it actually just creates indices that point
to data. but the way these indices are grouped, they create a non-iid client.)
we first sort the data by digit label, divide it into 200 shards of size 250, and assign each
of 100 clients 2 shards. Thus, as most clients will only have examples of two digits.
:param num: number of clients
:param dir: where to store
:return: _
'''
print('Number of classes per client {}'.format(num_classes_per_client))
classes_per_client = num_classes_per_client
examples_per_client = num_examples_per_client
file_path = os.path.join(path, '{}_{}_{}_clients.pkl'.format(num_clients, examples_per_client, classes_per_client))
if os.path.exists(os.path.join(file_path)):
print('Client exists at: {}'.format(file_path))
client_set = pickle.load(open(file_path, 'rb'))
return client_set
if not os.path.exists(path):
os.makedirs(path)
buckets = [] # 60000 = 10 * 6000
for k in range(num_classes):
temp = np.array(k * int(num_examples / num_classes) + np.random.permutation(int(num_examples / num_classes)))
print('temp:{}'.format(temp))
'''
for j in range(int(num_clients / 10)):
temp = np.hstack((temp, k * int(num_examples / num_classes) + np.random.permutation(int(num_examples / num_classes))))
print('temp.len: ', len(temp))
'''
buckets = np.hstack((buckets, temp))
print('buckets.len: ', len(buckets))
shards = classes_per_client * num_clients # 20
print('buckets.shape:', buckets.shape, 'shards', shards) # buckets.shape: (10 * 6000)
perm = np.random.permutation(shards) # 20
# client_set will be of length num_examples/N and each element represents a client.
client_set = []
extra = len(buckets) % shards
if extra:
buckets = buckets[:-extra]
ind_list = np.split(buckets, shards) # 60000/20 = 3000
print('ind_list.len:', len(ind_list))
for j in range(0, shards, classes_per_client):
# each entry of z is associated to two shards. the two shards are sampled randomly by using the permutation matrix
# perm and stacking two shards together using vstack. Each client now holds 2500*2 datapoints.
temp = []
for k in range(classes_per_client):
temp = np.hstack((temp, ind_list[int(perm[j+k])]))
client_set.append(temp)
# shuffle the data in each element of z, so that each client doesn't have all digits stuck together.
perm_2 = np.random.permutation(len(temp))
client_set[-1] = client_set[-1][perm_2]
filehandler = open(file_path, "wb")
pickle.dump(client_set, filehandler)
filehandler.close()
print('client created at: {}'.format(file_path))
return client_set
#filehandler = open(dir + '/' + str(num_clients) + '_clients.pkl', "wb")
#pickle.dump(z, filehandler)
#filehandler.close()
#print('client created at: '+ dir + '/' + str(num_clients) + '_clients.pkl')
def check_labels(N, client_set, y_train):
labels_set = []
for cid in range(N):
idx = [int(val) for val in client_set[cid]]
labels_set.append(set(np.array(y_train)[idx]))
labels_count = [0]*10
for label in np.array(y_train)[idx]:
labels_count[int(label)] += 1
print('cid: {}, number of labels: {}/10.'.format(cid, len(labels_set[cid])))
print(labels_count)
|
{"/main.py": ["/utils.py", "/budgets_accountant.py", "/models/fed.py", "/modules/logistic_reg.py", "/modules/client.py", "/modules/server.py", "/modules/budgets_accountant.py", "/common_utils/tf_utils.py", "/modules/hparams.py"], "/models/fed.py": ["/utils.py"], "/modules/client.py": ["/modules/budgets_accountant.py", "/common_utils/tf_utils.py"], "/modules/logistic_reg.py": ["/modules/models.py"], "/modules/server.py": ["/modules/lanczos.py", "/common_utils/tf_utils.py"]}
|
34,315,963
|
zhangzhizheng/PFA
|
refs/heads/main
|
/modules/logistic_reg.py
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Builds the MNIST network.
Implements the inference/loss/training pattern for model building.
1. inference() - Builds the model as far as required for running the network
forward to make predictions.
2. loss() - Adds to the inference model the layers required to generate loss.
3. training() - Adds to the loss model the Ops required to generate and
apply gradients.
This file is used by the various "fully_connected_*.py" files and not meant to
be run.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
import numpy as np
import tensorflow.compat.v1 as tf
from tensorflow_privacy.privacy.optimizers import dp_optimizer
from tensorflow_privacy.privacy.analysis import privacy_ledger
#from models.dnn_cifar10 import conv_net
from modules.models import Model
# The MNIST dataset has 10 classes, representing the digits 0 through 9.
NUM_CLASSES = 10
# The MNIST images are always 28x28 pixels.
IMAGE_SIZE = {'mnist':(28,28,1), 'fmnist':(28,28,1), 'cifar10':(32,32,3) }
class LogisticRegression(Model):
def __init__(self, dataset, batch_size, lr, lr_decay):
self.dataset = dataset
self.batch_size = batch_size
self.lr = lr
self.lr_decay = lr_decay
self.dpsgd = False
def set_dpsgd_params(self, l2_norm_clip, num_microbatches, noise_multipliers):
self.dpsgd = True
self.l2_norm_clip = l2_norm_clip
self.num_microbatches = num_microbatches
self.noise_multipliers = noise_multipliers
def evaluation(self, logits, labels):
# For a classifier model, we can use the in_top_k Op.
# It returns a bool tensor with shape [batch_size] that is true for
# the examples where the label is in the top k (here k=1)
# of all logits for that example.
correct = tf.nn.in_top_k(logits, labels, 1)
# Return the number of true entries.
return tf.reduce_sum(tf.cast(correct, tf.int32))
def init_placeholder(self):
img_size = IMAGE_SIZE[self.dataset]
img_pixels = img_size[0] * img_size[1] * img_size[2]
self.data_placeholder = tf.placeholder(tf.float32, shape=(None,img_pixels), name='images_placeholder')
labels_placeholder = tf.placeholder(tf.int32, shape=(None), name='labels_placeholder')
self.labels_placeholder = tf.cast(labels_placeholder, dtype=tf.int64)
return self.data_placeholder, self.labels_placeholder
def placeholder_inputs(self, batch_size, IMAGE_PIXELS):
"""Generate placeholder variables to represent the input tensors.
These placeholders are used as inputs by the rest of the model building
code and will be fed from the downloaded data in the .run() loop, below.
Args:
batch_size: The batch size will be baked into both placeholders.
Returns:
images_placeholder: Images placeholder.
labels_placeholder: Labels placeholder.
"""
# Note that the shapes of the placeholders match the shapes of the full
# image and label tensors, except the first dimension is now batch_size
# rather than the full size of the train or test data sets.
images_placeholder = tf.placeholder(tf.float32, shape=(None,IMAGE_PIXELS), name='images_placeholder')
labels_placeholder = tf.placeholder(tf.int32, shape=(None), name='labels_placeholder')
return images_placeholder, labels_placeholder
def __lr_mnist(self, features):
W = tf.Variable(tf.zeros([784, 10]))
b = tf.Variable(tf.zeros([10]))
logits = tf.nn.softmax(tf.matmul(features, W) + b)
return logits
def build_model(self, features):
if self.dataset == 'mnist' or self.dataset == 'fmnist':
return self.__lr_mnist(features)
else:
raise ValueError('No model matches the required dataset.')
'''
# another architecture
else:
return self.__lr__xx(features)
'''
def train_model(self):
# - global_step : A Variable, which tracks the amount of steps taken by the clients:
global_step = tf.Variable(0, dtype=tf.float32, trainable=False, name='global_step')
# - learning_rate : A tensorflow learning rate, dependent on the global_step variable.
if self.lr_decay:
learning_rate = tf.train.exponential_decay(learning_rate=self.lr,
global_step=global_step,
decay_steps=5000,
decay_rate=0.5,
staircase=True,
name='learning_rate')
print('decay lr: start at {}'.format(self.lr))
else:
learning_rate = self.lr
print('constant lr: {}'.format(self.lr))
# Create the gradient descent optimizer with the given learning rate.
if self.dpsgd:
for noise_multiplier in self.noise_multipliers:
optimizer = dp_optimizer.DPGradientDescentGaussianOptimizer(
l2_norm_clip=self.l2_norm_clip,
noise_multiplier=noise_multiplier,
num_microbatches=self.num_microbatches,
learning_rate=learning_rate)
opt_loss = vector_loss
train_op = optimizer.minimize(loss=opt_loss, global_step=global_step)
train_op_list.append(train_op)
else:
optimizer = tf.train.GradientDescentOptimizer(
learning_rate=learning_rate)
opt_loss = scalar_loss
train_op = optimizer.minimize(loss=opt_loss, global_step=global_step)
train_op_list = [train_op] * FLAGS.N
return train_op
def eval_model(self):
# - logits : output of the [fully connected neural network] when fed with images.
logits = self.build_model(self.data_placeholder)
# - loss : when comparing logits to the true labels.
# Calculate loss as a vector (to support microbatches in DP-SGD).
vector_loss = tf.nn.sparse_softmax_cross_entropy_with_logits(labels=self.labels_placeholder, logits=(logits))
# Define mean of loss across minibatch (for reporting through tf.Estimator).
scalar_loss = tf.reduce_mean(input_tensor=vector_loss)
# - eval_correct : when run, returns the amount of labels that were predicted correctly.
eval_op = self.evaluation(logits, self.labels_placeholder)
# Add a scalar summary for the snapshot loss.
tf.summary.scalar('loss', scalar_loss)
self.vector_loss = vector_loss
self.scalar_loss = scalar_loss
return eval_op, vector_loss, scalar_loss
def get_model(self, num_clients):
# - placeholder for the input Data (in our case MNIST), depends on the batch size specified in C
img_size = IMAGE_SIZE[self.dataset]
img_pixels = img_size[0] * img_size[1] * img_size[2]
data_placeholder, labels_placeholder = self.placeholder_inputs(self.batch_size, img_pixels)
# Define FCNN architecture
# - logits : output of the [fully connected neural network] when fed with images.
logits = self.build_model(data_placeholder)
# - loss : when comparing logits to the true labels.
# Calculate loss as a vector (to support microbatches in DP-SGD).
labels_placeholder = tf.cast(labels_placeholder, dtype=tf.int64)
vector_loss = tf.nn.sparse_softmax_cross_entropy_with_logits(labels=labels_placeholder, logits=logits)
# Define mean of loss across minibatch (for reporting through tf.Estimator).
scalar_loss = tf.reduce_mean(input_tensor=vector_loss)
# - eval_correct : when run, returns the amount of labels that were predicted correctly.
eval_op = self.evaluation(logits, labels_placeholder)
# Add a scalar summary for the snapshot loss.
tf.summary.scalar('loss', scalar_loss)
# - global_step : A Variable, which tracks the amount of steps taken by the clients:
global_step = tf.Variable(0, dtype=tf.float32, trainable=False, name='global_step')
# - learning_rate : A tensorflow learning rate, dependent on the global_step variable.
if self.lr_decay:
learning_rate = tf.train.exponential_decay(learning_rate=self.lr,
global_step=global_step,
decay_steps=2500,
decay_rate=0.5,
staircase=True,
name='learning_rate')
print('decay lr: {}'.format(self.lr))
else:
learning_rate = self.lr
print('constant lr: {}'.format(learning_rate))
# Create the gradient descent optimizer with the given learning rate.
if self.dpsgd:
train_op_list = []
for cid in range(num_clients):
optimizer = dp_optimizer.DPGradientDescentGaussianOptimizer(
l2_norm_clip=self.l2_norm_clip,
noise_multiplier=self.noise_multipliers[cid],
num_microbatches=self.num_microbatches,
learning_rate=learning_rate)
opt_loss = vector_loss
train_op = optimizer.minimize(loss=opt_loss, global_step=global_step)
train_op_list.append(train_op)
else:
optimizer = tf.train.GradientDescentOptimizer(
learning_rate=learning_rate)
opt_loss = scalar_loss
train_op = optimizer.minimize(loss=opt_loss, global_step=global_step)
train_op_list = [train_op] * num_clients
return train_op_list, eval_op, scalar_loss, global_step, data_placeholder, labels_placeholder
|
{"/main.py": ["/utils.py", "/budgets_accountant.py", "/models/fed.py", "/modules/logistic_reg.py", "/modules/client.py", "/modules/server.py", "/modules/budgets_accountant.py", "/common_utils/tf_utils.py", "/modules/hparams.py"], "/models/fed.py": ["/utils.py"], "/modules/client.py": ["/modules/budgets_accountant.py", "/common_utils/tf_utils.py"], "/modules/logistic_reg.py": ["/modules/models.py"], "/modules/server.py": ["/modules/lanczos.py", "/common_utils/tf_utils.py"]}
|
34,315,964
|
zhangzhizheng/PFA
|
refs/heads/main
|
/modules/hparams.py
|
"""
Non projection component.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
class HParams(object):
def __init__(self, loc_batch_size,
loc_num_microbatches,
loc_lr,
glob_steps,
loc_steps,
loc_l2_norm):
self.bs = loc_batch_size
self.num_mbs = loc_num_microbatches
self.lr = loc_lr
self.glob_steps = glob_steps
self.loc_steps = loc_steps
self.l2_norm_clip = loc_l2_norm
|
{"/main.py": ["/utils.py", "/budgets_accountant.py", "/models/fed.py", "/modules/logistic_reg.py", "/modules/client.py", "/modules/server.py", "/modules/budgets_accountant.py", "/common_utils/tf_utils.py", "/modules/hparams.py"], "/models/fed.py": ["/utils.py"], "/modules/client.py": ["/modules/budgets_accountant.py", "/common_utils/tf_utils.py"], "/modules/logistic_reg.py": ["/modules/models.py"], "/modules/server.py": ["/modules/lanczos.py", "/common_utils/tf_utils.py"]}
|
34,315,965
|
zhangzhizheng/PFA
|
refs/heads/main
|
/modules/server.py
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl import app
from absl import flags
import abc
import os
import math
import copy
import tensorflow.compat.v1 as tf
import numpy as np
import scipy
import time
from modules.lanczos import Lanczos
from common_utils.tf_utils import Vname_to_FeedPname
np.random.seed(10)
class ServerOperation(metaclass=abc.ABCMeta):
def _add_one(self, num_vars, _update, _agg_updates):
return [np.expand_dims(_update[i],0) for i in range(num_vars)] if not len(_agg_updates) else \
[np.append(_agg_updates[i], np.expand_dims(_update[i],0), 0) for i in range(num_vars)]
@abc.abstractmethod
def aggregate(self, update, is_public=None):
"""
Aggregation
"""
@abc.abstractmethod
def average(self):
"""
Federated Averaging: average all collected updates
"""
def update(self, glob_model, eps_list=None):
"""
return the updated global model
"""
keys = [Vname_to_FeedPname(v) for v in tf.trainable_variables()]
num_vars = len(keys)
shape_vars = [glob_model[k].shape for k in keys ]
mean_updates = self.average(num_vars, shape_vars, eps_list)
new_weights = [glob_model[keys[i]] - mean_updates[i] for i in range(num_vars)]
new_model = dict(zip(keys, new_weights))
return new_model
class FedAvg(ServerOperation):
def __init__(self):
print('Using naive FedAvg algorithm...')
self.__updates = []
def aggregate(self, update, is_public=None):
num_vars = len(update)
aggregate_fn = lambda var1, var2 : self._add_one(num_vars, var1, var2)
self.__updates = aggregate_fn(update, self.__updates)
def average(self, num_vars=None, shape_vars=None, eps_subset=None):
mean_updates = [np.average(self.__updates[i], 0).reshape(shape_vars[i]) \
for i in range(num_vars)]
self.__updates = []
return mean_updates
class WeiAvg(ServerOperation):
def __init__(self):
print('Using weighted averaging algorithm...')
self.__updates = []
def aggregate(self, update, is_public=None):
num_vars = len(update)
aggregate_fn = lambda var1, var2 : self._add_one(num_vars, var1, var2)
self.__updates = aggregate_fn(update, self.__updates)
def average(self, num_vars=None, shape_vars=None, eps_subset=None):
eps_sum = sum(eps_subset)
weights = np.array([eps/eps_sum for eps in eps_subset])
print('weights: {}'.format(weights))
mean_updates = [np.average(self.__updates[i], 0, weights).reshape(shape_vars[i]) \
for i in range(num_vars)]
self.__updates = []
return mean_updates
class PFA(ServerOperation):
def __init__(self, proj_dims, lanczos_iter, delay):
print('Using projected averaging (Pfizer) algorithm...')
self.__num_pub = 0
self.__num_priv = 0
self.__priv_updates = []
self.__pub_updates = []
self.proj_dims = proj_dims
self.lanczos_iter = lanczos_iter
self.delay = delay
self.Vk = None
self.mean = None
def aggregate(self, update, is_public=False):
num_vars = len(update)
update_1d = [u.flatten() for u in update]
aggregate_fn = lambda var1, var2 : self._add_one(num_vars, var1, var2)
if is_public:
#print('is_public')
self.__num_pub += 1
self.__pub_updates = aggregate_fn(update_1d, self.__pub_updates)
else:
#print('is_private')
self.__num_priv += 1
self.__priv_updates = aggregate_fn(update_1d, self.__priv_updates)
def __standardize(self, M):
'''Compute the mean of every dimension of the whole dataset'''
[n, m] = M.shape
if m == 1:
print(m==1)
return M, np.zeros(n)
# calculate the mean
mean = np.dot(M,np.ones((m,1), dtype=np.float32)) / m
return M - mean, mean.flatten()
def __eigen_by_lanczos(self, mat):
T, V = Lanczos(mat, self.lanczos_iter)
T_evals, T_evecs = np.linalg.eig(T)
idx = T_evals.argsort()[-1 : -(self.proj_dims+1) : -1]
Vk = np.dot(V.T, T_evecs[:,idx])
return Vk
def __projection(self, num_vars, shape_vars):
if len(self.__priv_updates):
mean_priv_updates = [np.mean(self.__priv_updates[i], 0) for i in range(num_vars)]
mean_pub_updates = [np.mean(self.__pub_updates[i], 0) for i in range(num_vars)]
mean_proj_priv_updates = [0] * num_vars
mean_updates = [0] * num_vars
for i in range(num_vars):
pub_updates, mean = self.__standardize(self.__pub_updates[i].T)
Vk = self.__eigen_by_lanczos(pub_updates.T)
mean_proj_priv_updates[i] = np.dot(Vk, np.dot(Vk.T, (mean_priv_updates[i] - mean))) + mean
mean_updates[i] = ((self.__num_priv * mean_proj_priv_updates[i] + self.__num_pub * mean_pub_updates[i]) /
(self.__num_pub + self.__num_priv)).reshape(shape_vars[i])
return mean_updates
elif len(self.__pub_updates) and not len(self.__priv_updates):
mean_updates = [np.mean(self.__pub_updates[i], 0).reshape(shape_vars[i]) for i in range(num_vars)]
return mean_updates
else:
raise ValueError('Cannot process the projection without private local updates.')
def __delayed_projection(self, num_vars, shape_vars, warmup=False):
if len(self.__priv_updates):
mean_pub_updates = [np.mean(self.__pub_updates[i], 0) for i in range(num_vars)]
mean_priv_updates = [np.mean(self.__priv_updates[i], 0) for i in range(num_vars)]
mean_proj_priv_updates = [0] * num_vars
mean_updates = [0] * num_vars
Vks = []
means = []
if warmup:
for i in range(num_vars):
pub_updates, mean = self.__standardize(self.__pub_updates[i].T)
Vk = self.__eigen_by_lanczos(pub_updates.T)
Vks.append(Vk)
means.append(mean)
mean_proj_priv_updates[i] = np.dot(Vk, np.dot(Vk.T, (mean_priv_updates[i] - mean))) + mean
mean_updates[i] = ((self.__num_priv * mean_proj_priv_updates[i] + self.__num_pub * mean_pub_updates[i]) /
(self.__num_pub + self.__num_priv)).reshape(shape_vars[i])
else:
for i in range(num_vars):
mean_proj_priv_updates[i] = np.dot(self.Vk[i], mean_priv_updates[i]) + self.mean[i]
mean_updates[i] = ((self.__num_priv * mean_proj_priv_updates[i] + self.__num_pub * mean_pub_updates[i]) /
(self.__num_pub + self.__num_priv)).reshape(shape_vars[i])
pub_updates, mean = self.__standardize(self.__pub_updates[i].T)
Vk = self.__eigen_by_lanczos(pub_updates.T)
Vks.append(Vk)
means.append(mean)
self.Vk = Vks
self.mean = means
return mean_updates
elif len(self.__pub_updates) and not len(self.__priv_updates):
mean_updates = [np.mean(self.__pub_updates[i], 0).reshape(shape_vars[i]) for i in range(num_vars)]
return mean_updates
else:
raise ValueError('Cannot process the projection without private local updates.')
def average(self, num_vars, shape_vars, eps_list=None):
if self.delay:
mean_updates = self.__delayed_projection(num_vars, shape_vars, warmup=(self.Vk is None))
else:
mean_updates = self.__projection(num_vars, shape_vars)
self.__num_pub = 0
self.__num_priv = 0
self.__priv_updates = []
self.__pub_updates = []
return mean_updates
class WeiPFA(ServerOperation):
def __init__(self, proj_dims, lanczos_iter, delay):
print('Using projected averaging (Pfizer) algorithm...')
self.__num_pub = 0
self.__num_priv = 0
self.__priv_updates = []
self.__pub_updates = []
self.__priv_eps = []
self.__pub_eps = []
self.proj_dims = proj_dims
self.lanczos_iter = lanczos_iter
self.delay = delay
self.Vk = None
self.mean = None
def aggregate(self, eps, update, is_public=False):
num_vars = len(update)
update_1d = [u.flatten() for u in update]
aggregate_fn = lambda var1, var2 : self._add_one(num_vars, var1, var2)
if is_public:
#print('is_public')
self.__num_pub += 1
self.__pub_eps.append(eps)
self.__pub_updates = aggregate_fn(update_1d, self.__pub_updates)
else:
#print('is_private')
self.__num_priv += 1
self.__priv_eps.append(eps)
self.__priv_updates = aggregate_fn(update_1d, self.__priv_updates)
def __standardize(self, M):
'''Compute the mean of every dimension of the whole dataset'''
[n, m] = M.shape
if m == 1:
return M, np.zeros(n)
# calculate the mean
mean = np.dot(M,np.ones((m,1), dtype=np.float32)) / m
#mean = [np.mean(M[i]) / m for i in range(n)]
return M - mean, mean.flatten()
def __eigen_by_lanczos(self, mat):
T, V = Lanczos(mat, self.lanczos_iter)
T_evals, T_evecs = np.linalg.eig(T)
idx = T_evals.argsort()[-1 : -(self.proj_dims+1) : -1]
Vk = np.dot(V.T, T_evecs[:,idx])
#sorted_vec = sorted(T_evals, reverse=True)
#print(sorted_vec)
return Vk
def __weighted_project_priv_updates(self, num_vars, shape_vars):
if len(self.__priv_updates):
priv_weights = np.array(self.__priv_eps) / sum(self.__priv_eps)
pub_weights = np.array(self.__pub_eps) / sum(self.__pub_eps)
#print(priv_weights, pub_weights)
mean_priv_updates = [np.average(self.__priv_updates[i], 0, priv_weights) \
for i in range(num_vars)]
mean_pub_updates = [np.average(self.__pub_updates[i], 0, pub_weights) \
for i in range(num_vars)]
mean_proj_priv_updates = [0] * num_vars
mean_updates = [0] * num_vars
print(num_vars)
for i in range(num_vars):
print('__pub_updates[{}].shape: {}'.format(i, self.__pub_updates[i].shape))
pub_updates, mean = self.__standardize(self.__pub_updates[i].T)
print('pub_updates[{}].shape: {}'.format(i, pub_updates[i].shape))
Vk = self.__eigen_by_lanczos(pub_updates.T)
print('Vk.shape: {}'.format(Vk.shape))
mean_proj_priv_updates[i] = np.dot(Vk, np.dot(Vk.T, (mean_priv_updates[i] - mean))) + mean
mean_updates[i] = ((mean_proj_priv_updates[i] * sum(self.__priv_eps) + mean_pub_updates[i] * sum(self.__pub_eps))
/ sum(self.__priv_eps + self.__pub_eps)).reshape(shape_vars[i])
return mean_updates
elif len(self.__pub_updates) and not len(self.__priv_updates):
mean_updates = [np.mean(self.__pub_updates[i], 0).reshape(shape_vars[i]) for i in range(num_vars)]
return mean_updates
else:
raise ValueError('Cannot process the projection without private local updates.')
def __delayed_weighted_project_priv_updates(self, num_vars, shape_vars, warmup=False):
if len(self.__priv_updates):
priv_weights = np.array(self.__priv_eps) / sum(self.__priv_eps)
pub_weights = np.array(self.__pub_eps) / sum(self.__pub_eps)
mean_pub_updates = [np.average(self.__pub_updates[i], 0, pub_weights) \
for i in range(num_vars)]
mean_priv_updates = [np.average(self.__priv_updates[i], 0, priv_weights) \
for i in range(num_vars)]
mean_proj_priv_updates = [0] * num_vars
mean_updates = [0] * num_vars
Vks = []
means = []
if warmup:
for i in range(num_vars):
pub_updates, mean = self.__standardize(self.__pub_updates[i].T)
Vk = self.__eigen_by_lanczos(pub_updates.T)
Vks.append(Vk)
means.append(mean)
mean_proj_priv_updates[i] = np.dot(Vk, np.dot(Vk.T, (mean_priv_updates[i] - mean))) + mean
mean_updates[i] = ((sum(self.__priv_eps) * mean_proj_priv_updates[i] + sum(self.__pub_eps) * mean_pub_updates[i]) /
(sum(self.__priv_eps) + sum(self.__pub_eps))).reshape(shape_vars[i])
else:
for i in range(num_vars):
mean_proj_priv_updates[i] = np.dot(self.Vk[i], mean_priv_updates[i]) + self.mean[i]
mean_updates[i] = ((sum(self.__priv_eps) * mean_proj_priv_updates[i] + sum(self.__pub_eps) * mean_pub_updates[i]) /
(sum(self.__priv_eps + self.__pub_eps))).reshape(shape_vars[i])
pub_updates, mean = self.__standardize(self.__pub_updates[i].T)
Vk = self.__eigen_by_lanczos(pub_updates.T)
Vks.append(Vk)
means.append(mean)
self.Vk = Vks
self.mean = means
return mean_updates
elif len(self.__pub_updates) and not len(self.__priv_updates):
mean_updates = [np.mean(self.__pub_updates[i], 0).reshape(shape_vars[i]) for i in range(num_vars)]
return mean_updates
else:
raise ValueError('Cannot process the projection without private local updates.')
def average(self, num_vars, shape_vars, eps_list=None):
if not self.delay:
mean_updates = self.__weighted_project_priv_updates(num_vars, shape_vars)
else:
mean_updates = self.__delayed_weighted_project_priv_updates(num_vars, shape_vars, warmup=(self.Vk is None))
self.__num_pub = 0
self.__num_priv = 0
self.__priv_updates = []
self.__pub_updates = []
self.__priv_eps = []
self.__pub_eps = []
return mean_updates
class Server(object):
def __init__(self, num_clients, sample_mode, sample_ratio):
self.num_clients = num_clients
self.sample_mode = sample_mode
self.sample_ratio = sample_ratio
self.public = None
self.__epsilons = None
'''clustering'''
def set_public_clients(self, epsilons):
self.__epsilons = epsilons
sorted_eps = np.sort(epsilons)
percent = 0.1
threshold = sorted_eps[-int(percent * self.num_clients)]
self.public = list(np.where(np.array(epsilons) >= threshold)[0])
def init_global_model(self, sess):
keys = [Vname_to_FeedPname(var) for var in tf.trainable_variables()]
global_model = dict(zip(keys, [sess.run(var) for var in tf.trainable_variables()]))
return global_model
def init_alg(self, dp=True, fedavg=False, weiavg=False, \
projection=False, proj_wavg=True, delay=True, proj_dims=None, lanczos_iter=None):
if fedavg or (not dp):
self.__alg = FedAvg()
elif weiavg:
assert( dp==False, 'Detected DP components were not applied so that the WeiAvg algorithm was denied.')
self.__alg = WeiAvg()
elif projection:
assert( dp==False, 'Detected DP components were not applied so that the Pfizer algorithm was denied.')
self.__alg = PFA(proj_dims, lanczos_iter, delay)
elif proj_wavg:
assert( dp==False, 'Detected DP components were not applied so that the Pfizer algorithm was denied.')
self.__alg = WeiPFA(proj_dims, lanczos_iter, delay)
else:
raise ValueError('Choose an algorithm (FedAvg/WeiAvg/Pfizer) to get the aggregated model.')
def get_proj_info(self):
return self.__alg.Vk, self.__alg.mean
def aggregate(self, cid, update, projection=False, proj_wavg=False):
if projection:
self.__alg.aggregate(update, is_public=True if (cid in self.public) else False)
elif proj_wavg:
self.__alg.aggregate(self.__epsilons[cid], update, is_public=True if (cid in self.public) else False)
else:
self.__alg.aggregate(update)
def update(self, global_model, eps_list=None):
return self.__alg.update(global_model, eps_list)
def __a_res(items, weights, m):
"""
:samples: [(item, weight), ...]
:k: number of selected items
:returns: [(item, weight), ...]
"""
weights = np.array(weights) / sum(weights)
heap = [] # [(new_weight, item), ...]
for i in items:
wi = weights[i]
ui = np.random.random()
ki = ui ** (1/wi)
if len(heap) < m:
heapq.heappush(heap, (ki, i))
elif ki > heap[0][0]:
heapq.heappush(heap, (ki, i))
if len(heap) > m:
heapq.heappop(heap)
return [item[1] for item in heap]
def __naive_weighted_sampling(items, weights, m):
weights = np.array(weights) / max(weights)
samples = [ item for item in items if np.random.random() <= weights[item] ][0:min(m, len(items))]
return samples
def __top_k(items, weights, m):
heap = [] # [(new_weight, item), ...]
for i in items:
wi = weights[i]
if len(heap) < m:
heapq.heappush(heap, (wi, i))
elif wi > heap[0][0]:
heapq.heappush(heap, (wi, i))
if len(heap) > m:
heapq.heappop(heap)
return [item[1] for item in heap]
def sample_clients(self, candidates):
# Randomly choose a total of m (out of n) client-indices that participate in this round
# randomly permute a range-list of length n: [1,2,3...n] --> [5,2,7..3]
m = int(self.num_clients * self.sample_ratio)
if len(candidates) < m:
return []
if self.sample_mode == 'None':
print('Full client participation.')
return candidates
else:
print('Partial client participation with ramdom client sampling.')
participants = list(np.random.permutation(candidates))[0:m]
# Only when we are running Pfizer method, `ba._public` is not None.
# For FedAvg or WAVG or MIN/MAX, public clients are not necessary while sampling.
if self.public is None:
return participants
# For Pfizer, we require the subset contains at least 1 public and 1 private client.
check = 50
while check and len(set(participants).intersection(set(self.public))) == 0:
check -= 1
print('There are no public clients be sampled in this round.')
participants = list(np.random.permutation(candidates))[0:m]
return participants if check else []
|
{"/main.py": ["/utils.py", "/budgets_accountant.py", "/models/fed.py", "/modules/logistic_reg.py", "/modules/client.py", "/modules/server.py", "/modules/budgets_accountant.py", "/common_utils/tf_utils.py", "/modules/hparams.py"], "/models/fed.py": ["/utils.py"], "/modules/client.py": ["/modules/budgets_accountant.py", "/common_utils/tf_utils.py"], "/modules/logistic_reg.py": ["/modules/models.py"], "/modules/server.py": ["/modules/lanczos.py", "/common_utils/tf_utils.py"]}
|
34,315,966
|
zhangzhizheng/PFA
|
refs/heads/main
|
/common_utils/main_utils.py
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl import app
from absl import flags
import os
import pickle
import math
import heapq
import csv
import re
#import copy
import tensorflow.compat.v1 as tf
import numpy as np
np.random.seed(10)
def save_progress(FLAGS, model, Accuracy_accountant, Budgets_accountant=None, nbytes1=None, nbytes2=None):
'''
This function saves our progress either in an existing file structure or writes a new file.
:param save_dir: STRING: The directory where to save the progress.
:param model: DICTIONARY: The model that we wish to save.
:param Delta_accountant: LIST: The list of deltas that we allocared so far.
:param Accuracy_accountant: LIST: The list of accuracies that we allocated so far.
:param PrivacyAgent: CLASS INSTANCE: The privacy agent that we used (specifically the m's that we used for Federated training.)
:param FLAGS: CLASS INSTANCE: The FLAGS passed to the learning procedure.
:return: nothing
'''
save_dir = os.path.join(os.getcwd(), FLAGS.save_dir, 'res_{}'.format(FLAGS.version), FLAGS.dataset, FLAGS.model, ('noniid{}'.format(FLAGS.noniid_level) if FLAGS.noniid else 'iid'), (FLAGS.eps if FLAGS.dpsgd else 'nodp'))
if not os.path.exists(save_dir):
os.makedirs(save_dir)
filename = "{}{}{}{}{}{}{}{}".format( FLAGS.N,
('-fedavg' if FLAGS.fedavg else ''),
('-wavg' if FLAGS.weiavg else ''),
('-pro{}_{}'.format(FLAGS.proj_dims, FLAGS.lanczos_iter) if FLAGS.projection else ''),
('-wpro{}_{}'.format(FLAGS.proj_dims, FLAGS.lanczos_iter) if FLAGS.proj_wavg else ''),
('-plus' if FLAGS.delay else ''),
'-{}-bs{}'.format(FLAGS.local_steps, FLAGS.client_batch_size),
('-decaylr{}'.format(FLAGS.lr) if FLAGS.lr_decay else '-constlr{}'.format(FLAGS.lr)) )
with open(os.path.join(save_dir, filename + '.csv'), "w") as csvfile:
writer = csv.writer(csvfile, delimiter=',')
if FLAGS.dpsgd:
writer.writerow(Budgets_accountant)
if FLAGS.delay:
writer.writerow(nbytes1)
writer.writerow(nbytes2)
writer.writerow(Accuracy_accountant)
def print_loss_and_accuracy(global_loss, accuracy, stage='validation'):
print(' - Current Model has a loss of: %s' % global_loss)
print(' - The Accuracy on the ' + stage + ' set is: %s' % accuracy)
print('--------------------------------------------------------------------------------------')
print('--------------------------------------------------------------------------------------')
def print_new_comm_round(real_round):
print('--------------------------------------------------------------------------------------')
print('------------------------ Communication round %s ---------------------------------------' % str(real_round))
print('--------------------------------------------------------------------------------------')
|
{"/main.py": ["/utils.py", "/budgets_accountant.py", "/models/fed.py", "/modules/logistic_reg.py", "/modules/client.py", "/modules/server.py", "/modules/budgets_accountant.py", "/common_utils/tf_utils.py", "/modules/hparams.py"], "/models/fed.py": ["/utils.py"], "/modules/client.py": ["/modules/budgets_accountant.py", "/common_utils/tf_utils.py"], "/modules/logistic_reg.py": ["/modules/models.py"], "/modules/server.py": ["/modules/lanczos.py", "/common_utils/tf_utils.py"]}
|
34,315,967
|
zhangzhizheng/PFA
|
refs/heads/main
|
/main.py
|
"""
Non projection component.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl import app
from absl import flags
import os
import pickle
import math
import time
import re
import tensorflow.compat.v1 as tf
import numpy as np
from modules.cnn import CNN
from modules.logistic_reg import LogisticRegression
from modules.client import Client
from modules.server import Server
from modules.budgets_accountant import BudgetsAccountant
from simulation.datasets import data_reader
from simulation.clients import create_clients
from common_utils import dpsgd_utils, main_utils
from common_utils.tf_utils import global_step_creator, Vname_to_FeedPname, Vname_to_Pname
from modules.hparams import HParams
# np.random.seed(10)
config = tf.ConfigProto()
#config.gpu_options.per_process_gpu_memory_fraction = 0.5
session = tf.Session(config=config)
# Experiment hyperparameters
flags.DEFINE_enum('dataset', 'mnist', ['mnist', 'fmnist', 'cifar10'],
'Which dataset to use.')
flags.DEFINE_enum('model', 'cnn', ['lr', 'cnn', '2nn'],
'Which model to use. This can be a convolutional model (cnn)'
'or a two hidden-layer densely connected network (2nn).')
flags.DEFINE_boolean('noniid', False, 'If True, train with noniid data.')
flags.DEFINE_integer('noniid_level', 10, 'Level of noniid.')
flags.DEFINE_integer('N', 10,
'Total number of clients.')
flags.DEFINE_integer('max_steps', 10000,
'Total number of communication round.')
flags.DEFINE_integer('local_steps', 100,
'The round gap between two consecutive communications.')
flags.DEFINE_integer('client_dataset_size', None,
'If None, set the default value.')
flags.DEFINE_integer('client_batch_size', 4,
'Batch size used on the client.')
flags.DEFINE_integer('num_microbatches', 4, 'Number of microbatches '
'(must evenly divide batch_size)')
# learning rate
flags.DEFINE_boolean('lr_decay', False, 'If True, learning rate decays.')
flags.DEFINE_float('lr', 0.1, 'Learning rate for local update procedure.')
# Differential privacy flags
flags.DEFINE_boolean('dpsgd', False, 'If True, train with DP-SGD. '
'If False, train with vanilla SGD.')
flags.DEFINE_string('eps', None, 'epsilon file name.')
flags.DEFINE_float('delta', 1e-5, 'DP parameter Delta.')
flags.DEFINE_float('l2_norm_clip', 1.0, 'Clipping norm')
# Personalized privacy flags
flags.DEFINE_enum('sample_mode', 'R', ['R','W1','W2'],
'R for random sample, W for weighted sample and '
'None for full participation.')
flags.DEFINE_float('sample_ratio', 0.8, 'Sample ratio.')
# minimum epsilon
flags.DEFINE_boolean('min', False, 'If True, train eps_min dp.')
# weighted average
flags.DEFINE_boolean('weiavg', False, 'If True, train with weighted averaging.')
# fedavg
flags.DEFINE_boolean('fedavg', False, 'If True, train with fedavg.')
# Projection flags
flags.DEFINE_boolean('projection', False, 'If True, use projection.')
flags.DEFINE_boolean('proj_wavg', False, 'If True, use the weighted projection.')
flags.DEFINE_boolean('delay', False, 'If True, use the delayed aggregation.')
flags.DEFINE_integer('proj_dims', 1, 'The dimensions of subspace.')
flags.DEFINE_integer('lanczos_iter', 256, 'Projection method.')
# save dir flags
flags.DEFINE_integer('version', 1, 'version of dataset.')
flags.DEFINE_string('save_dir', 'res', 'Model directory')
flags.DEFINE_string('log', os.path.join(os.getenv('TEST_TMPDIR', '/tmp'),
'tensorflow/mnist/logs'), 'Log data directory')
FLAGS = flags.FLAGS
def prepare_local_data(project_path, dataset, nclients, noniid, version):
data_path = os.path.abspath(os.path.join(project_path,"..","PFA_res","dataset"))
print(data_path)
# universal set
x_train, y_train, x_test, y_test = data_reader.load_dataset(data_path, dataset)
print('x_train:{} y_train:{} / x_test:{}, y_test:{}'.format(\
len(x_train), len(y_train), len(x_test), len(y_test)))
# split the universal
client_set_path = os.path.join(data_path, dataset, 'clients',
('noniid' if noniid else 'iid'),
'v{}'.format(version))
client_dataset_size = len(x_train) // nclients if FLAGS.client_dataset_size is None \
else FLAGS.client_dataset_size
if not noniid:
client_set = create_clients.create_iid_clients(nclients, len(x_train), 10,
client_dataset_size, client_set_path)
else:
client_set = create_clients.create_noniid_clients(nclients, len(x_train), 10,
client_dataset_size, FLAGS.noniid_level, client_set_path)
labels = [0]*10
for i in y_train:
labels[int(i)] += 1
return x_train, y_train, x_test, y_test, client_set
def prepare_priv_preferences(epsfile, num_clients):
epsilons = None
if FLAGS.dpsgd:
epsilons = dpsgd_utils.set_epsilons(epsfile, num_clients)
return epsilons
def main(unused_argv):
hp = HParams(loc_batch_size=FLAGS.client_batch_size,
loc_num_microbatches=FLAGS.num_microbatches,
loc_lr=FLAGS.lr,
glob_steps=FLAGS.max_steps,
loc_steps=FLAGS.local_steps,
loc_l2_norm=FLAGS.l2_norm_clip)
project_path = os.getcwd()
print(project_path)
# prepare the local dataset all clients
x_train, y_train, x_test, y_test, client_set = \
prepare_local_data(project_path, FLAGS.dataset, FLAGS.N, FLAGS.noniid, FLAGS.version)
create_clients.check_labels(FLAGS.N, client_set, y_train) # print and check
print('client dataset size: {}'.format(len(client_set[0])))
# Prepare all clients (simulation)
# simulate a list of the personal privacy preferences of all clients
# If FLAGS.dpsgd is False, `prepare_priv_preferences` return None
# otherwise return a list of epsilon with size FLAGS.N
priv_preferences = prepare_priv_preferences(FLAGS.eps, FLAGS.N)
print('priv_preferences: {}'.format(priv_preferences))
clients = []
for cid in range(FLAGS.N):
print(client_set[cid])
idx = [int(val) for val in client_set[cid]]
client = Client(x_train=x_train[idx],
y_train=y_train[idx],
batch_size=hp.bs, # batch_size
loc_steps=hp.loc_steps) # learning_rate
if FLAGS.dpsgd:
# prepare the dpsgd params for client #c
# `noise_multiplier` is a parameter in tf_privacy package, which is also the gaussian distribution parameter for random noise.
epsilon = priv_preferences[cid]
delta = FLAGS.delta
noise_multiplier = dpsgd_utils.compute_noise_multiplier(N=client.dataset_size,
L=hp.bs,
T=hp.glob_steps * FLAGS.sample_ratio,
epsilon=epsilon,
delta=delta)
ba = BudgetsAccountant(epsilon, delta, noise_multiplier)
client.set_ba(ba)
clients.append(client)
# Prepare server (simulation)
server = Server(FLAGS.N, FLAGS.sample_mode, FLAGS.sample_ratio)
if FLAGS.projection or FLAGS.proj_wavg:
server.set_public_clients(priv_preferences)
# pre-define the number of server-clients communication rounds
COMM_ROUND = hp.glob_steps // hp.loc_steps
print('communication rounds:{}'.format(COMM_ROUND))
# record the test accuracy of the training process.
accuracy_accountant = []
privacy_accountant = []
start_time = time.time()
# define tensors and operators in the graph 'g_c'
with tf.Graph().as_default():
# build model
if FLAGS.model == 'lr':
model = LogisticRegression(FLAGS.dataset, FLAGS.client_batch_size, FLAGS.lr, FLAGS.lr_decay)
elif FLAGS.model =='cnn':
model = CNN(FLAGS.dataset, FLAGS.client_batch_size, FLAGS.lr, FLAGS.lr_decay)
else:
raise ValueError('No avaliable class in `./modules` matches the required model.')
if FLAGS.dpsgd:
model.set_dpsgd_params(l2_norm_clip = FLAGS.l2_norm_clip,
num_microbatches = FLAGS.num_microbatches,
noise_multipliers = [ clients[cid].ba.noise_multiplier for cid in range(FLAGS.N) ] )
# build the model on the server side
train_op_list, eval_op, loss, global_steps, data_placeholder, labels_placeholder = model.get_model(FLAGS.N)
# clients download the model from server
for cid in range(FLAGS.N):
clients[cid].set_ops( train_op_list[cid], eval_op, loss, data_placeholder, labels_placeholder )
# increase and set global step
real_global_steps = 0
set_global_step = global_step_creator()
# dict, each key-value pair corresponds to the placeholder_name of each tf.trainable_variables
# and its placeholder.
# trainable_variables: the placeholder name corresponding to each tf.trainable variable.
model_placeholder = dict(zip([Vname_to_FeedPname(var) for var in tf.trainable_variables()],
[tf.placeholder(name=Vname_to_Pname(var),
shape=var.get_shape(),
dtype=tf.float32)
for var in tf.trainable_variables()]))
# all trainable variables are set to the value specified through
# the placeholders in 'model_placeholder'.
assignments = [tf.assign(var, model_placeholder[Vname_to_FeedPname(var)])\
for var in tf.trainable_variables()]
with tf.Session(config = tf.ConfigProto(log_device_placement=False,
allow_soft_placement=True,
gpu_options=tf.GPUOptions(allow_growth=True))) as sess:
#sess.run(tf.global_variables_initializer())
sess.run(tf.initialize_all_variables())
# initial global model and errors
model = server.init_global_model(sess)
alg = server.init_alg(FLAGS.dpsgd,
FLAGS.fedavg,
FLAGS.weiavg,
FLAGS.projection,
FLAGS.proj_wavg,
FLAGS.delay,
FLAGS.proj_dims,
FLAGS.lanczos_iter)
Vk, mean = None, None
accum_nbytes1 = 0 # before pfaplus
accum_nbytes2 = 0 # after pfaplus
accum_nbytes_list1 = []
accum_nbytes_list2 = []
# initial local update
#local = LocalUpdate(x_train, y_train, client_set, hp.bs, data_placeholder, labels_placeholder)
for r in range(COMM_ROUND):
main_utils.print_new_comm_round(r)
comm_start_time = time.time()
if FLAGS.N == 1:
for it in range(FLAGS.local_steps):
# batch_ind holds the indices of the current batch
batch_ind = np.random.permutation(FLAGS.client_dataset_size)[0:FLAGS.client_batch_size]
x_batch = clients[0].x_train[[int(j) for j in batch_ind]]
y_batch = clients[0].y_train[[int(j) for j in batch_ind]]
# Fill a feed dictionary with the actual set of data and labels using the data and labels associated
# to the indices stored in batch_ind:
feed_dict = {str(data_placeholder.name): x_batch,
str(labels_placeholder.name): y_batch}
# Run one optimization step.
_ = sess.run(train_op_list[0], feed_dict = feed_dict)
#self.global_steps = sess.run(global_steps)
weights = [sess.run(var) for var in tf.trainable_variables()]
keys = [Vname_to_FeedPname(v) for v in tf.trainable_variables()]
model = dict(zip(keys, weights))
else:
# precheck and pick up the candidates who can take the next commiunication round.
candidates = [ cid for cid in range(FLAGS.N) if clients[cid].precheck() ]
# select the participating clients
participants = server.sample_clients(candidates)
# if the condition of training cannot be satisfied.
# (no public clients or no sufficient candidates.
if len(participants) == 0:
print("the condition of training cannot be satisfied. (no public clients or no sufficient candidates.")
print('Done! The procedure time:', time.time() - start_time)
break
print('==== participants in round {} includes: ====\n {} '.format(r, participants))
max_accum_bgts = 0
#####################################################
# For each client c (out of the m chosen ones):
for cid in participants:
#####################################################
# Start local update
# 1. Simulate that clients download the global model from server.
# in here, we set the trainable Variables in the graph to the values stored in feed_dict 'model'
clients[cid].download_model(sess, assignments, set_global_step, model)
if Vk is not None:
clients[cid].set_projection(Vk, mean, is_private=(cid not in server.public))
#print(model['dense_1/bias_placeholder:0'])
# 2. clients update the model locally
update, accum_bgts, bytes1, bytes2 = clients[cid].local_update(sess, model, global_steps)
accum_nbytes1 += (bytes1)/(1024*1024)
accum_nbytes2 += (bytes2)/(1024*1024)
if accum_bgts is not None:
max_accum_bgts = max(max_accum_bgts, accum_bgts)
server.aggregate(cid, update, FLAGS.projection, FLAGS.proj_wavg)
if FLAGS.dpsgd:
print('For client %d and delta=%f, the budget is %f and the left budget is: %f' %
(cid, delta, clients[cid].ba.epsilon, clients[cid].ba.accum_bgts))
# End of the local update
#####################################################
# average and update the global model
model = server.update( model, eps_list=(priv_preferences[participants] if FLAGS.weiavg else None) )
if (FLAGS.projection or FLAGS.proj_wavg) and FLAGS.delay:
Vk, mean = server.get_proj_info()
# Setting the trainable Variables in the graph to the values stored in feed_dict 'model'
sess.run(assignments, feed_dict=model)
# validate the (current) global model using validation set.
# create a feed-dict holding the validation set.
feed_dict = {str(data_placeholder.name): x_test,
str(labels_placeholder.name): y_test}
# compute the loss on the validation set.
global_loss = sess.run(loss, feed_dict=feed_dict)
count = sess.run(eval_op, feed_dict=feed_dict)
accuracy = float(count) / float(len(y_test))
accuracy_accountant.append(accuracy)
if FLAGS.dpsgd:
privacy_accountant.append(max_accum_bgts)
if FLAGS.delay:
accum_nbytes_list1.append(accum_nbytes1)
accum_nbytes_list2.append(accum_nbytes2)
main_utils.save_progress(FLAGS, model, accuracy_accountant, privacy_accountant, accum_nbytes_list1, accum_nbytes_list2)
else:
main_utils.save_progress(FLAGS, model, accuracy_accountant, privacy_accountant)
else:
main_utils.save_progress(FLAGS, model, accuracy_accountant)
main_utils.print_loss_and_accuracy(global_loss, accuracy, stage='test')
print('time of one communication:', time.time() - comm_start_time)
print('Done! The procedure time:', time.time() - start_time)
if __name__ == '__main__':
app.run(main)
|
{"/main.py": ["/utils.py", "/budgets_accountant.py", "/models/fed.py", "/modules/logistic_reg.py", "/modules/client.py", "/modules/server.py", "/modules/budgets_accountant.py", "/common_utils/tf_utils.py", "/modules/hparams.py"], "/models/fed.py": ["/utils.py"], "/modules/client.py": ["/modules/budgets_accountant.py", "/common_utils/tf_utils.py"], "/modules/logistic_reg.py": ["/modules/models.py"], "/modules/server.py": ["/modules/lanczos.py", "/common_utils/tf_utils.py"]}
|
34,315,968
|
zhangzhizheng/PFA
|
refs/heads/main
|
/modules/models.py
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Builds the MNIST network.
Implements the inference/loss/training pattern for model building.
1. inference() - Builds the model as far as required for running the network
forward to make predictions.
2. loss() - Adds to the inference model the layers required to generate loss.
3. training() - Adds to the loss model the Ops required to generate and
apply gradients.
This file is used by the various "fully_connected_*.py" files and not meant to
be run.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
import numpy as np
import tensorflow.compat.v1 as tf
from tensorflow_privacy.privacy.optimizers import dp_optimizer
from tensorflow_privacy.privacy.analysis import privacy_ledger
#from models.dnn_cifar10 import conv_net
# The MNIST dataset has 10 classes, representing the digits 0 through 9.
NUM_CLASSES = 10
# The MNIST images are always 28x28 pixels.
IMAGE_SIZE = {'mnist':(28,28,1), 'fmnist':(28,28,1), 'cifar10':(32,32,3) }
class Model(metaclass=abc.ABCMeta):
@abc.abstractmethod
def evaluation():
"""Evaluate the quality of the logits at predicting the label.
Args:
logits: Logits tensor, float - [batch_size, NUM_CLASSES].
labels: Labels tensor, int32 - [batch_size], with values in the
range [0, NUM_CLASSES).
Returns:
A scalar int32 tensor with the number of examples (out of batch_size)
that were predicted correctly.
"""
@abc.abstractmethod
def init_placeholder():
"""Generate placeholder variables to represent the input tensors.
These placeholders are used as inputs by the rest of the model building
code and will be fed from the downloaded data in the .run() loop, below.
Args:
batch_size: The batch size will be baked into both placeholders.
Returns:
data_placeholder: Images placeholder.
labels_placeholder: Labels placeholder.
"""
@abc.abstractmethod
def build_model():
"""Given input features, returns the logits from a ML model."""
"""
@abc.abstractmethod
def train_model():
pass
@abc.abstractmethod
def eval_model():
pass
"""
|
{"/main.py": ["/utils.py", "/budgets_accountant.py", "/models/fed.py", "/modules/logistic_reg.py", "/modules/client.py", "/modules/server.py", "/modules/budgets_accountant.py", "/common_utils/tf_utils.py", "/modules/hparams.py"], "/models/fed.py": ["/utils.py"], "/modules/client.py": ["/modules/budgets_accountant.py", "/common_utils/tf_utils.py"], "/modules/logistic_reg.py": ["/modules/models.py"], "/modules/server.py": ["/modules/lanczos.py", "/common_utils/tf_utils.py"]}
|
34,452,932
|
djgpujota/madjs
|
refs/heads/main
|
/modules/backAdmin/modulo_clientes_registrados.py
|
from ..database import abrirConexion,cerrarConexion
def consulta():
conexion=abrirConexion()
cursor= conexion.cursor()
cursor.execute("SELECT * FROM personas where estado=0 order by id_personas asc")
personas=cursor.fetchall()
# cursor.execute("SELECT count(*) FROM personas ")
# # total=cursor.fetchone()
# total=cursor.fetchone()
# total=total[0]
cerrarConexion(conexion)
return personas
def consultaClintes():
conexion=abrirConexion()
cursor= conexion.cursor()
cursor.execute("SELECT count(*) FROM personas where estado=0")
total=cursor.fetchone()
total=total[0]
cursor.execute("SELECT count(*) FROM personas where estado=1 ")
totalEliminado=cursor.fetchone()
totalEliminado=totalEliminado[0]
cerrarConexion(conexion)
return total,totalEliminado
def eliminarClientes(id):
print('el id es:',id)
conexion=abrirConexion()
cursor=conexion.cursor()
cursor.execute("update personas set estado=1 where id_personas={}".format(id))
# cursor.execute("DELETE FROM personas WHERE id_personas={}".format(id))
conexion.commit()
cerrarConexion(conexion)
def consultaId(id):
conexion=abrirConexion()
cursor=conexion.cursor()
cursor.execute("SELECT * FROM personas WHERE id_personas={}".format(id))
consulta=cursor.fetchall()
conexion.commit()
cerrarConexion(conexion)
return consulta
def busquedaCliente(name):
conexion=abrirConexion()
cursor=conexion.cursor()
cursor.execute("select * from personas where nombre_personas='{}'".format(name))
consulta=cursor.fetchall()
conexion.commit()
cerrarConexion(conexion)
return consulta
class Productoscliente:
def consulta(d):
conexion=abrirConexion()
cursor= conexion.cursor()
cursor.execute("select * from ordenes_clientes where id_personas={}".format(d))
consulta=cursor.fetchall()
cursor.execute("select count(*) from ordenes_clientes where id_personas={}".format(d))
total=cursor.fetchone()
total=total[0]
cerrarConexion(conexion)
return consulta,total
|
{"/modules/backAdmin/modulo_clientes_registrados.py": ["/modules/database.py"], "/modules/backCliente/modulo_almacenados.py": ["/modules/database.py"], "/modules/backAdmin/modulo_productos_vendidos.py": ["/modules/database.py"], "/modules/backAdmin/modulo_reservas.py": ["/modules/database.py"], "/modules/backAdmin/modulo_productos_ingresados.py": ["/modules/database.py"], "/routes.py": ["/modules/backCliente/modulo_reserva_cliente.py", "/modules/users.py", "/modules/backCliente/modulo_almacenados.py", "/modules/backAdmin/modulo_productos_ingresados.py", "/modules/backAdmin/modulo_clientes_registrados.py", "/modules/backAdmin/modulo_reservas.py", "/modules/backAdmin/modulo_productos_vendidos.py"], "/modules/backCliente/modulo_reserva_cliente.py": ["/modules/database.py"], "/modules/users.py": ["/modules/database.py"]}
|
34,452,933
|
djgpujota/madjs
|
refs/heads/main
|
/modules/database.py
|
import psycopg2
# try:
def abrirConexion():
con= psycopg2.connect(
host = 'localhost',
database = 'MADJS-F',
user = 'postgres',
password = '1810hsxd'
)
return con
# except:
# print('Error al conectarse ')
def cerrarConexion(con):
con.close()
|
{"/modules/backAdmin/modulo_clientes_registrados.py": ["/modules/database.py"], "/modules/backCliente/modulo_almacenados.py": ["/modules/database.py"], "/modules/backAdmin/modulo_productos_vendidos.py": ["/modules/database.py"], "/modules/backAdmin/modulo_reservas.py": ["/modules/database.py"], "/modules/backAdmin/modulo_productos_ingresados.py": ["/modules/database.py"], "/routes.py": ["/modules/backCliente/modulo_reserva_cliente.py", "/modules/users.py", "/modules/backCliente/modulo_almacenados.py", "/modules/backAdmin/modulo_productos_ingresados.py", "/modules/backAdmin/modulo_clientes_registrados.py", "/modules/backAdmin/modulo_reservas.py", "/modules/backAdmin/modulo_productos_vendidos.py"], "/modules/backCliente/modulo_reserva_cliente.py": ["/modules/database.py"], "/modules/users.py": ["/modules/database.py"]}
|
34,452,934
|
djgpujota/madjs
|
refs/heads/main
|
/modules/backCliente/modulo_almacenados.py
|
from ..database import abrirConexion ,cerrarConexion
class buscarP():
def busquedaProducto(p):
conexion=abrirConexion()
cursor= conexion.cursor()
cursor.execute("select * from producto_ingresado Where nombre_producto_ingresado = '{}';".format(p))
consulta=cursor.fetchall()
if consulta==[]:
print('Producto No Encontrado')
cerrarConexion(conexion)
return 'Producto No Encontrado',consulta
else:
print(consulta,'Holaaaaaaaaaaaaaaaaaaaaaaa')
cerrarConexion(conexion)
return 'Producto Encontrado',consulta
|
{"/modules/backAdmin/modulo_clientes_registrados.py": ["/modules/database.py"], "/modules/backCliente/modulo_almacenados.py": ["/modules/database.py"], "/modules/backAdmin/modulo_productos_vendidos.py": ["/modules/database.py"], "/modules/backAdmin/modulo_reservas.py": ["/modules/database.py"], "/modules/backAdmin/modulo_productos_ingresados.py": ["/modules/database.py"], "/routes.py": ["/modules/backCliente/modulo_reserva_cliente.py", "/modules/users.py", "/modules/backCliente/modulo_almacenados.py", "/modules/backAdmin/modulo_productos_ingresados.py", "/modules/backAdmin/modulo_clientes_registrados.py", "/modules/backAdmin/modulo_reservas.py", "/modules/backAdmin/modulo_productos_vendidos.py"], "/modules/backCliente/modulo_reserva_cliente.py": ["/modules/database.py"], "/modules/users.py": ["/modules/database.py"]}
|
34,452,935
|
djgpujota/madjs
|
refs/heads/main
|
/modules/backAdmin/modulo_productos_vendidos.py
|
from ..database import abrirConexion,cerrarConexion
class productosVendidos:
def consulta():
conexion=abrirConexion()
cursor=conexion.cursor()
cursor.execute("Select * from producto_salida")
consulta=cursor.fetchall()
cerrarConexion(conexion)
return consulta
|
{"/modules/backAdmin/modulo_clientes_registrados.py": ["/modules/database.py"], "/modules/backCliente/modulo_almacenados.py": ["/modules/database.py"], "/modules/backAdmin/modulo_productos_vendidos.py": ["/modules/database.py"], "/modules/backAdmin/modulo_reservas.py": ["/modules/database.py"], "/modules/backAdmin/modulo_productos_ingresados.py": ["/modules/database.py"], "/routes.py": ["/modules/backCliente/modulo_reserva_cliente.py", "/modules/users.py", "/modules/backCliente/modulo_almacenados.py", "/modules/backAdmin/modulo_productos_ingresados.py", "/modules/backAdmin/modulo_clientes_registrados.py", "/modules/backAdmin/modulo_reservas.py", "/modules/backAdmin/modulo_productos_vendidos.py"], "/modules/backCliente/modulo_reserva_cliente.py": ["/modules/database.py"], "/modules/users.py": ["/modules/database.py"]}
|
34,452,936
|
djgpujota/madjs
|
refs/heads/main
|
/modules/backAdmin/modulo_reservas.py
|
from ..database import abrirConexion,cerrarConexion
class moduloreservas():
def productosVendidos(c):
conexion=abrirConexion()
cursor=conexion.cursor()
cursor.execute("select id_producto_salida from producto_salida")
id = cursor.fetchall()
# Validaciones de id
# si existe algun dato registrado
if(id!=[]):
cursor.execute("select id_producto_salida from producto_salida")
a = cursor.fetchall()
a1 = (len(a))+1
cursor.execute("select id_ordenes_clientes from aceptar_reserva where id_aceptar_reserva ={}".format(c))
consultaorden=cursor.fetchone()
consultaorden=consultaorden[0]
cursor.execute("select * from ordenes_clientes where id_ordenes_clientes={}".format(consultaorden))
consultaorden1=cursor.fetchall()
#print(consultaorden1[0][3],consultaorden1[0][5],'Holaaaaaaaaaaaaaaaaaaaaaaaaa2222222222222222222222222222222222')
cursor.execute("select nombre_producto_ingresado from producto_ingresado where id_producto_ingresado ={}".format(consultaorden1[0][2]))
nombreproducto=cursor.fetchone()
nombreproducto=nombreproducto[0]
print(consultaorden1[0][3],consultaorden1[0][5],'Holaaaaaaaaaaaaaaaaaaaaaaaaa2222222222222222222222222222222222')
cursor.execute("INSERT INTO producto_salida values ({},{},'{}',{},{})".format(a1,c,nombreproducto,consultaorden1[0][3],consultaorden1[0][5]))
conexion.commit()
else:
a2 = 1
cursor.execute("select id_ordenes_clientes from aceptar_reserva where id_aceptar_reserva ={}".format(c))
consultaorden=cursor.fetchone()
consultaorden=consultaorden[0]
cursor.execute("select * from ordenes_clientes where id_ordenes_clientes={}".format(consultaorden))
consultaorden1=cursor.fetchall()
cursor.execute("select nombre_producto_ingresado from producto_ingresado where id_producto_ingresado ={}".format(consultaorden1[0][2]))
nombreproducto=cursor.fetchone()
nombreproducto=nombreproducto[0]
print(consultaorden1[0][3],consultaorden1[0][5],'Holaaaaaaaaaaaaaaaaaaaaaaaaa2222222222222222222222222222222222')
cursor.execute("INSERT INTO producto_salida values ({},{},'{}',{},{})".format(a2,c,nombreproducto,consultaorden1[0][3],consultaorden1[0][5]))
conexion.commit()
cerrarConexion(conexion)
def Aceptar(b):
conexion=abrirConexion()
cursor=conexion.cursor()
cursor.execute("select id_aceptar_reserva from aceptar_reserva")
id = cursor.fetchall()
# Validaciones de id
# si existe algun dato registrado
if(id!=[]):
cursor.execute("select id_aceptar_reserva from aceptar_reserva")
a = cursor.fetchall()
a1 = (len(a))+1
cursor.execute("INSERT INTO aceptar_reserva values ({},{},1)".format(a1,b))
conexion.commit()
moduloreservas.productosVendidos(a1)
else:
a2 = 1
cursor.execute("INSERT INTO aceptar_reserva values ({},{},1)".format(a2,b))
conexion.commit()
moduloreservas.productosVendidos(a2)
cerrarConexion(conexion)
def rechazar(b):
conexion=abrirConexion()
cursor=conexion.cursor()
cursor.execute("select id_aceptar_reserva from aceptar_reserva")
id = cursor.fetchall()
# Validaciones de id
# si existe algun dato registrado
if(id!=[]):
cursor.execute("select id_aceptar_reserva from aceptar_reserva")
a = cursor.fetchall()
a1 = (len(a))+1
cursor.execute("INSERT INTO aceptar_reserva values ({},{},0)".format(a1,b))
conexion.commit()
else:
a2 = 1
cursor.execute("INSERT INTO aceptar_reserva values ({},{},0)".format(a2,b))
conexion.commit()
cerrarConexion(conexion)
def prueba():
conexion=abrirConexion()
cursor=conexion.cursor()
v=1
numeros=[]
cursor.execute("select * from ordenes_clientes where estado=0")
consulta1=cursor.fetchall()
cursor.execute("select * from ordenes_clientes o ,aceptar_reserva l where o.id_ordenes_clientes=l.id_ordenes_clientes")
lista=cursor.fetchall()
for i in lista:
v=i[0]
numeros.append(v)
print(v)
print(numeros)
consultaF=[]
for p in numeros:
for i in consulta1:
if i[0]==p:
consulta1.remove(i)
print(i[0],'esto es i')
print(i,'se esta eliminanddo esto')
print(p,'esto es p')
# cursor.execute("select * from ordenes_clientes where id_ordenes_clientes<>{}".format(p))
# resultado=cursor.fetchall()
# consultaF.append(resultado)
# cursor.execute("select * from ordenes_clientes where id_ordenes_clientes <> {p}".format())
# consulta2=cursor.fetchone()
#print("sadsadasdasd")
#print(consulta1,"")
return consulta1
|
{"/modules/backAdmin/modulo_clientes_registrados.py": ["/modules/database.py"], "/modules/backCliente/modulo_almacenados.py": ["/modules/database.py"], "/modules/backAdmin/modulo_productos_vendidos.py": ["/modules/database.py"], "/modules/backAdmin/modulo_reservas.py": ["/modules/database.py"], "/modules/backAdmin/modulo_productos_ingresados.py": ["/modules/database.py"], "/routes.py": ["/modules/backCliente/modulo_reserva_cliente.py", "/modules/users.py", "/modules/backCliente/modulo_almacenados.py", "/modules/backAdmin/modulo_productos_ingresados.py", "/modules/backAdmin/modulo_clientes_registrados.py", "/modules/backAdmin/modulo_reservas.py", "/modules/backAdmin/modulo_productos_vendidos.py"], "/modules/backCliente/modulo_reserva_cliente.py": ["/modules/database.py"], "/modules/users.py": ["/modules/database.py"]}
|
34,452,937
|
djgpujota/madjs
|
refs/heads/main
|
/modules/backAdmin/modulo_productos_ingresados.py
|
from ..database import abrirConexion,cerrarConexion
# from datetime import datetime
class productosIngresados():
def consulta():
abrir = abrirConexion()
cursor = abrir.cursor()
cursor.execute("select * from producto_ingresado")
extraer = cursor.fetchall()
cerrarConexion(abrir)
return extraer
def consultaProveedor():
abrir = abrirConexion()
cursor = abrir.cursor()
cursor.execute("select * from proveedor")
extraer = cursor.fetchall()
cerrarConexion(abrir)
return extraer
def registro(proveedor,nombre,precio,cantidad,imagen,categoria):
# Llammamos a la conexion a la base de datos
con= abrirConexion()
conexion=con
#Creamos el cursor con la conexion anteriormente llamado
cursor= conexion.cursor()
cursor.execute("select id_producto_ingresado from producto_ingresado")
id = cursor.fetchall()
# Validaciones de id
# si existe algun dato registrado
if(id!=[]):
cursor.execute("select id_producto_ingresado from producto_ingresado")
a = cursor.fetchall()
a1 = (len(a))+1
cursor.execute("insert into producto_ingresado values ({},{},'{}',{},{},'{}','{}')".format(a1,proveedor,nombre,precio,cantidad,imagen,categoria))
conexion.commit()
else:
a2 = 1
cursor.execute("insert into producto_ingresado values ({},{},'{}',{},{},'{}','{}')".format(a2,proveedor,nombre,precio,cantidad,imagen,categoria))
conexion.commit()
cerrarConexion(conexion)
def registroProveedor(nombre,ruc,direccion,telefono,email):
# Llammamos a la conexion a la base de datos
con= abrirConexion()
conexion=con
#Creamos el cursor con la conexion anteriormente llamado
cursor= conexion.cursor()
cursor.execute("select id_proveedor from proveedor")
id = cursor.fetchall()
# Validaciones de id
# si existe algun dato registrado
if(id!=[]):
cursor.execute("select id_proveedor from proveedor")
a = cursor.fetchall()
a1 = (len(a))+1
cursor.execute("insert into proveedor values ({},'{}','{}','{}',{},'{}')".format(a1,nombre,ruc,direccion,telefono,email))
conexion.commit()
else:
a2 = 1
cursor.execute("insert into proveedor values ({},'{}','{}','{}',{},'{}')".format(a2,nombre,ruc,direccion,telefono,email))
conexion.commit()
cerrarConexion(conexion)
class Actual():
def actualizar(proveedor,nombre,precio,cantidad,idrecivido):
proveedor=proveedor
nombre=nombre
precio=precio
cantidad=cantidad
conexion= abrirConexion()
cursor=conexion.cursor()
cursor.execute("select id_producto_ingresado from producto_ingresado")
id = cursor.fetchall()
if(id!=[]):
print('tiene datos')
print(id)
cursor.execute("select id_producto_ingresado from producto_ingresado")
a1 = cursor.fetchall()
a2= (len(a1))+1
# cursor.execute("insert into ordenes_clientes values ({},{},{},12,'{}',{},'{}','vegetales','{}')".format(a2,b2,c2,nombre,cedula,correo,fecha))
#query="UPDATE producto_ingresado SET id_proveedor=%s, nombre_producto_ingresado=%s,precio_producto_ingresado=%s,cantidad_producto_ingresado=%s WHERE producto_ingresado.id_proveedor=%s;"
# query="UPDATE ordenes_clientes SET ({},{},{},12,'{}','{}','vegetales','{}' WHERE cedula_ordenes_cliente={})".format(a2,b2,c2,nombre,cedula,correo,fecha,cedula)
#datos=(proveedor,nombre,precio,cantidad,id)
data=cursor.execute("UPDATE producto_ingresado SET id_proveedor={}, nombre_producto_ingresado='{}',precio_producto_ingresado={},cantidad_producto_ingresado={} WHERE id_producto_ingresado={};".format(proveedor,nombre,precio,cantidad,idrecivido))
conexion.commit()
else:
value=1
# cursor.execute("insert into personas (id_personas,id_rol,nombre_personas,dir_personas,telf_personas,email_personas,clave_personas) values ({},2,'dylan','12',12,'12','12')".format(value)
# cursor.execute("insert into ordenes_clientes values ({},{},{},12,'{}',{},'{}','vegetales','{}')".format(value,value1,value2,nombre,cedula,correo,fecha))
# query="UPDATE ordenes_clientes SET ({},{},{},12,'{}','{}','vegetales','{}' WHERE cedula_ordenes_cliente={})".format(value,value1,value2,nombre,correo,fecha,cedula)
query="UPDATE producto_ingresado SET id_proveedor=%s, nombre_producto_ingresado=%s,precio_producto_ingresado=%s,cantidad_producto_ingresado=%s WHERE producto_ingresado.id_proveedor=%s;"
# query="UPDATE ordenes_clientes SET id_ordenes_clientes,id_clientes,numero_ordenes_clientes,subtotal_ordenes_clientes,nombre_ordenes_cliente,,correo_ordenes_cliente=,categoria_ordenes_cliente,fecha_ordenes_cliente WHERE cedula_ordenes_clientes;".format(value,value1,value2,nombre,cedula,correo,fecha,cedula)
datos=(proveedor,nombre,precio,cantidad,id)
data=cursor.execute(query,datos)
conexion.commit()
return data
class amostrar():
def editaa(id):
conexion=abrirConexion()
cursor=conexion.cursor()
cursor.execute("SELECT * FROM producto_ingresado WHERE id_producto_ingresado=%s",(id))
formulario=cursor.fetchall()
print(formulario)
conexion.commit()
return formulario,id
|
{"/modules/backAdmin/modulo_clientes_registrados.py": ["/modules/database.py"], "/modules/backCliente/modulo_almacenados.py": ["/modules/database.py"], "/modules/backAdmin/modulo_productos_vendidos.py": ["/modules/database.py"], "/modules/backAdmin/modulo_reservas.py": ["/modules/database.py"], "/modules/backAdmin/modulo_productos_ingresados.py": ["/modules/database.py"], "/routes.py": ["/modules/backCliente/modulo_reserva_cliente.py", "/modules/users.py", "/modules/backCliente/modulo_almacenados.py", "/modules/backAdmin/modulo_productos_ingresados.py", "/modules/backAdmin/modulo_clientes_registrados.py", "/modules/backAdmin/modulo_reservas.py", "/modules/backAdmin/modulo_productos_vendidos.py"], "/modules/backCliente/modulo_reserva_cliente.py": ["/modules/database.py"], "/modules/users.py": ["/modules/database.py"]}
|
34,452,938
|
djgpujota/madjs
|
refs/heads/main
|
/routes.py
|
import os
from flask import Flask
from flask import render_template,request,redirect,flash,url_for,jsonify,send_from_directory
from datetime import datetime
from random import randint
from modules.backCliente.modulo_reserva_cliente import ints,alm,eliminar,bebidas,carnes,vegetales,update,eed
#solo import aqui
from flask.helpers import url_for
# from de module aqui
from modules.users import users,Persona
#from modules.modulos import almaceje,edit,resiva
#from backCliente.modulo_reserva_cliente import ver
from wtforms import Form, BooleanField, StringField, validators
from modules.backCliente.modulo_almacenados import buscarP
from modules.backAdmin.modulo_productos_ingresados import productosIngresados,Actual,amostrar
from modules.backAdmin.modulo_clientes_registrados import consulta,consultaClintes,eliminarClientes,consultaId,busquedaCliente,Productoscliente
from modules.backAdmin.modulo_reservas import moduloreservas
from modules.backAdmin.modulo_productos_vendidos import productosVendidos
app=Flask(__name__)
app.secret_key="Develoteca"
CARPETA=os.path.join('static/images')
app.config['CARPETA']=CARPETA
@app.route('/static/images/<nombre>')
def img(nombre):
return send_from_directory(app.config['CARPETA'],nombre)
# index
@app.route('/')
def index():
return render_template('index.html')
# users
@app.route('/users/login')
def login():
users.admin()
return render_template('users/login.html')
@app.route('/users/login_admin', methods=['GET', 'POST'])
def loguin():
if request.method == 'POST':
email=request.form['email']
password= request.form['password']
print(email,password)
correcto = users.loguin(email, password)
if (correcto==0):
flash ("Usuario y contraseña no validos")
return redirect(url_for('login'))
if (correcto==1):
if(email == 'MADJS@gmail.com'):
return redirect(url_for('panel'))
return redirect(url_for('index'))
@app.route('/users/registrar/', methods=['GET','POST'])
def registrar():
form=Persona()
if form.validate_on_submit():
try:
name=request.form['nombre']
dir=request.form['direccion']
telf=int(request.form['telefono'])
email=request.form['correo']
clave1=request.form['clave']
print(type(name),dir,telf,email,clave1)
if telf<0:
flash('Error en el campo telefono')
return redirect(url_for('registrar'))
users.registro(name,dir,telf,email,clave1)
return redirect('/users/registrar/')
except ValueError:
# Esta captura el error cuando se manda un string en el telefono
flash('Esta ingresando datos erroneos')
return redirect(url_for('registrar'))
return render_template('users/registrar.html', form=form)
# admin
@app.route('/panel')
def panel():
return render_template('index_admin.html')
@app.route('/modulo/clientes')
def mClientes():
personas=consulta()
total,eliminados=consultaClintes()
return render_template('admin/clientes_registrados.html',personas=personas,total=total,eliminados=eliminados)
# modal clientes registrados
@app.route('/ajaxfile', methods=['POST', 'GET'])
def ajaxfile():
if request.method=='POST':
userid= request.form['userid']
personas,total=Productoscliente.consulta(userid)
return jsonify({'htmlresponse': render_template('admin/reservasClientes.html',personas=personas,total=total )})
@app.route('/ajaxBuscar', methods=['POST', 'GET'])
def ajaxBus():
if request.method=='POST':
busqueda= request.form['uBuscar']
print(busqueda)
consulta=busquedaCliente(busqueda)
if consulta != []:
mensaje="Se ha encontrado el cliente"
return jsonify({'htmlresponse': render_template('admin/busquedaCliente.html',consulta=consulta,mensaje=mensaje)})
# eliminarClientes(userid)
if consulta == []:
mensaje="No se ha encontrado el cliente"
return jsonify({'htmlresponse': render_template('admin/busquedaCliente.html',consulta=consulta,mensaje=mensaje)})
@app.route('/ajaxElim', methods=['POST', 'GET'])
def ajaxElim():
if request.method=='POST':
userid= int(request.form['userid'])
personas=consultaId(userid)
# eliminarClientes(userid)
return jsonify({'htmlresponse': render_template('admin/alerta.html',personas=personas)})
@app.route('/eliminar/<int:id>')
def meliminarClientes(id):
eliminarClientes(id=id)
return redirect('/modulo/clientes')
@app.route('/modulo/productosIngresados' , methods=['POST','GET'])
def mProductosI():
productos = productosIngresados.consulta()
proveedores = productosIngresados.consultaProveedor()
#if request.method=='POST':
# proveedor = request.form['proveedor']
# empresa = request.form['empresa']
# nombre = request.form['nombre']
# precio = request.form['precio']
# cantidad = request.form['cantidad']
# productosIngresados.registro(proveedor,empresa,nombre,precio,cantidad)
# return redirect(url_for('mProductosI'))
return render_template('admin/productos_ingresados.html', productos=productos, proveedores=proveedores)
@app.route('/crearProveedor', methods=['POST','GET'])
def crearProveedor():
if request.method=='POST':
nombre = request.form['nombre']
ruc = request.form['ruc']
direccion = request.form['direccion']
telefono = request.form['telefono']
email = request.form['email']
productosIngresados.registroProveedor(nombre,ruc,direccion,telefono,email)
return redirect(url_for('mProductosI'))
return render_template('admin/productos_ingresados.html')
@app.route('/registrarProducto', methods=['POST','GET'])
def registrarProducto():
if request.method=='POST':
proveedor = request.form['proveedores']
nombre = request.form['nombre']
precio = request.form['precio']
cantidad = request.form['cantidad']
_imagen = request.files['imagen']
categoria=request.form['categoria']
now = datetime.now()
tiempo = now.strftime("%Y%H%M%S")
if _imagen.filename != '' :
nuevaImagen = tiempo + _imagen.filename
_imagen.save("static/images/"+nuevaImagen)
# _imagen.save("../static/imagen"+nuevaImagen)
productosIngresados.registro(proveedor,nombre,precio,cantidad,nuevaImagen,categoria)
return redirect(url_for('mProductosI'))
return render_template('admin/productos_ingresados.html')
#EditarProducto
@app.route('/modulo/edit/producto')
def editarProducto():
return render_template('admin/editarProductos.html')
@app.route('/actualizarAd/<id>', methods=['POST'])
def actu(id):
if request.method== 'POST':
proveedor=request.form['proveedores']
nombre=request.form['nombre']
precio=request.form['precio']
cantidad=request.form['cantidad']
if(proveedor=='' or nombre == '' or precio=='' or cantidad=='' ):
flash('Error existen Campos vacios')
return redirect(url_for('mProductosI'))
id=id[1]
print(id)
Actual.actualizar(proveedor,nombre,precio,cantidad,id)
#print(nombre , cedula,correo,fecha)
return redirect(url_for('mProductosI'))
@app.route('/editt/<int:id>',methods=['POST','GET'])
def editsd(id):
formulario,id=amostrar.editaa([id])
return render_template('/admin/editarProductos.html',formulario=formulario,id=id)
@app.route('/modulo/productosVendidos')
def mProductosV():
productos=productosVendidos.consulta()
return render_template('admin/productos_vendidos.html',productos=productos)
@app.route('/modulo/reservas')
def mReservas():
formulario=moduloreservas.prueba()
return render_template('admin/reservas.html',formulario=formulario)
@app.route('/aceptarReserva/<id>')
def mInventario(id):
moduloreservas.Aceptar(id)
return redirect('/modulo/reservas')
@app.route('/rechazarReserva/<id>')
def rechazar(id):
moduloreservas.rechazar(id)
return redirect('/modulo/reservas')
# cliente
@app.route('/cliente/almacenados')
def almacenados():
formulario,elim=alm.almacenados()
print(formulario)
return render_template('cliente/almacenados.html',formulario=formulario,elim=elim)
## restaurar
@app.route('/restaurar/<int:id>')
def restaurar(id):
alm.restaurar([id])
return redirect(url_for('almacenados'))
# almacenados()
@app.route('/destroy/<int:cedula>')
def destroy(cedula):
el=eliminar.dest([cedula])
return redirect('/cliente/almacenados')
## comentar o descomentar almacenaje
# def almacena():
# formulario=alm.almacenados()
# #print(formulario)
# return formulario
# almacena()
## comentar o descomentar el insert into
@app.route('/cliente/reservar/')
def reservar():
return render_template('cliente/reservar.html')
@app.route('/ajaxBuscarP',methods=['POST','GET'])
def ajaxBuscarP():
if request.method=='POST':
consulta=request.form['buscar']
mensaje,respuesta= buscarP.busquedaProducto(consulta)
return jsonify({'htmlresponse': render_template('cliente/BusquedaProducto.html',mensaje=mensaje,respuesta=respuesta)})
@app.route('/ajaxlimpieza', methods=['POST', 'GET'])
def rer():
if request.method=='POST':
id=request.form['userid']
return jsonify({'respuesta': render_template('cliente/registroreserva.html',id=id)})
@app.route('/reservar/<id>', methods=['POST'])
def reser(id):
nombre=request.form['txtNombre']
cantidad=request.form['txtCantidad']
cedula=request.form['txtCedula']
correo=request.form['txtCorreo']
fecha=request.form['txtFecha']
id=id
if(nombre=='' or cantidad =='' or cedula=='' or correo=='' or fecha=='' ):
flash('Error existen Campos vacios')
return redirect(url_for('reservar'))
mensaje= ints.resr(nombre , cantidad,cedula,correo,fecha,id)
if mensaje==0:
return redirect(url_for('reservar'))
#print(nombre , cedula,correo,fecha)
return redirect('/cliente/reservar/')
### limpieza
@app.route('/cliente/limpieza/')
def resert():
limpieza=ints.consultaLimpieza()
return render_template('cliente/limpieza.html',limpieza=limpieza)
@app.route('/reservar', methods=['POST'])
def resr():
nombre=request.form['txtNombre']
cantidad=request.form['txtCantidad']
cedula=request.form['txtCedula']
correo=request.form['txtCorreo']
fecha=request.form['txtFecha']
if(nombre=='' or cantidad == '' or cedula=='' or correo=='' or fecha=='' ):
flash('Error existen Campos vacios')
return redirect(url_for('reservar'))
ints.resr(nombre , cantidad,cedula,correo,fecha)
#print(nombre , cedula,correo,fecha)
return redirect('/cliente/limpieza/')
### bebidas
@app.route('/cliente/bebidas/')
def rese():
bebidas=ints.consultabebidas()
return render_template('cliente/bebidas.html', bebidas=bebidas)
@app.route('/bebid', methods=['POST'])
def bbs():
nombre=request.form['txtNombre']
cantidad=request.form['txtCantidad']
cedula=request.form['txtCedula']
correo=request.form['txtCorreo']
fecha=request.form['txtFecha']
if(nombre=='' or cantidad == '' or cedula=='' or correo=='' or fecha=='' ):
flash('Error existen Campos vacios')
return redirect(url_for('rese'))
bebidas.beb(nombre ,cantidad, cedula,correo,fecha)
#print(nombre , cedula,correo,fecha)
return redirect('/cliente/bebidas/')
## carnes
@app.route('/cliente/carnes/')
def crn():
carnes=ints.consultacarnes()
return render_template('cliente/carnes.html', carnes=carnes)
@app.route('/carnes', methods=['POST'])
def crs():
nombre=request.form['txtNombre']
cantidad=request.form['txtCantidad']
cedula=request.form['txtCedula']
correo=request.form['txtCorreo']
fecha=request.form['txtFecha']
if(nombre=='' or cantidad == '' or cedula=='' or correo=='' or fecha=='' ):
flash('Error existen Campos vacios')
return redirect(url_for('rese'))
carnes.car(nombre ,cantidad, cedula,correo,fecha)
#print(nombre , cedula,correo,fecha)
return redirect('/cliente/carnes/')
## vegetales
@app.route('/cliente/vegetales/')
def veget():
vegetales=ints.consultavegetales()
return render_template('cliente/vegetales.html',vegetales=vegetales)
@app.route('/vegetales', methods=['POST'])
def vgt():
nombre=request.form['txtNombre']
cantidad=request.form['txtCantidad']
cedula=request.form['txtCedula']
correo=request.form['txtCorreo']
fecha=request.form['txtFecha']
if(nombre=='' or cantidad == '' or cedula=='' or correo=='' or fecha=='' ):
flash('Error existen Campos vacios')
return redirect(url_for('rese'))
vegetales.ve(nombre , cantidad,cedula,correo,fecha)
#print(nombre , cedula,correo,fecha)
return redirect('/cliente/vegetales/')
#
#
# editar
@app.route('/cliente/editar/')
def actua():
return render_template('cliente/edit.html')
@app.route('/update/<int:id>', methods=['POST'])
def act(id):
if request.method== 'POST':
nombre=request.form['txtNombre']
cantidad=request.form['txtCantidad']
cedula=request.form['txtCedula']
correo=request.form['txtCorreo']
fecha=request.form['txtFecha']
if(nombre=='' or cantidad == '' or cedula=='' or correo=='' or fecha=='' ):
flash('Error existen Campos vacios')
return redirect(url_for('almacenados'))
update.actualizar(nombre ,cantidad, cedula,correo,fecha,id)
#print(nombre , cedula,correo,fecha)
return redirect(url_for('almacenados'))
@app.route('/edit/<int:id>')
def editrs(id):
formulario=eed.editaa([id])
return render_template('/cliente/edit.html',formulario=formulario)
### funciona pero no guarda correctamente y no muestra ver por que error en formulario si se cambia la cedula no vale
# @app.route('/cliente/almacenados')
# def almacenados():
# formulario=alm.almacenados()
# print(formulario)
# return render_template('cliente/almacenados.html',formulario=formulario)
# # almacenados()
# @app.route('/destroy/<int:cedula>')
# def destroy(cedula):
# el=eliminar.dest([cedula])
# return redirect('/cliente/almacenados')
#ejemplo
# @app.route('/ejemplo')
# def ejemplo():
# ver = insert()
# pw=resiva()
# print(pw)
# i=into()
# print(i)
# return ver, pw
# ejemplo()
if __name__=='__main__':
app.run(debug=True)
|
{"/modules/backAdmin/modulo_clientes_registrados.py": ["/modules/database.py"], "/modules/backCliente/modulo_almacenados.py": ["/modules/database.py"], "/modules/backAdmin/modulo_productos_vendidos.py": ["/modules/database.py"], "/modules/backAdmin/modulo_reservas.py": ["/modules/database.py"], "/modules/backAdmin/modulo_productos_ingresados.py": ["/modules/database.py"], "/routes.py": ["/modules/backCliente/modulo_reserva_cliente.py", "/modules/users.py", "/modules/backCliente/modulo_almacenados.py", "/modules/backAdmin/modulo_productos_ingresados.py", "/modules/backAdmin/modulo_clientes_registrados.py", "/modules/backAdmin/modulo_reservas.py", "/modules/backAdmin/modulo_productos_vendidos.py"], "/modules/backCliente/modulo_reserva_cliente.py": ["/modules/database.py"], "/modules/users.py": ["/modules/database.py"]}
|
34,452,939
|
djgpujota/madjs
|
refs/heads/main
|
/modules/modulos.py
|
# MODULOS se comunica con los modulos inferiores, envia datos a la db y envia datos a las rutas
#from database import con
#from backCliente.modulo_reserva_cliente import igual
#from modules.database import cerrarConexion,abrirConexion
# aqui para routes
from flask import render_template,request
from datetime import datetime
from threading import current_thread
#from backCliente.modulo_reserva_cliente import ver
#from modules.database import abrirConexion,cerrarConexion
import time
#from backCliente.modulo_reserva_cliente import ver
# def l():
# lt=ver()
# print(lt)
# return lt
# l()
#from backCliente.modulo_reserva_cliente import ver
# def resiva():
# cone=con
# cursor= cone.cursor()
# cursor.execute('SELECT * FROM rol')
# print(cursor.fetchall())
# cone.commit
# resiva()
|
{"/modules/backAdmin/modulo_clientes_registrados.py": ["/modules/database.py"], "/modules/backCliente/modulo_almacenados.py": ["/modules/database.py"], "/modules/backAdmin/modulo_productos_vendidos.py": ["/modules/database.py"], "/modules/backAdmin/modulo_reservas.py": ["/modules/database.py"], "/modules/backAdmin/modulo_productos_ingresados.py": ["/modules/database.py"], "/routes.py": ["/modules/backCliente/modulo_reserva_cliente.py", "/modules/users.py", "/modules/backCliente/modulo_almacenados.py", "/modules/backAdmin/modulo_productos_ingresados.py", "/modules/backAdmin/modulo_clientes_registrados.py", "/modules/backAdmin/modulo_reservas.py", "/modules/backAdmin/modulo_productos_vendidos.py"], "/modules/backCliente/modulo_reserva_cliente.py": ["/modules/database.py"], "/modules/users.py": ["/modules/database.py"]}
|
34,452,940
|
djgpujota/madjs
|
refs/heads/main
|
/modules/backCliente/modulo_reserva_cliente.py
|
from ..database import abrirConexion ,cerrarConexion
conexion=abrirConexion()
def insert ():
ls='hola'
print(ls)
return ls
# insert()
# def resiva():
# cursor= conexion.cursor()
# cursor.execute('SELECT * FROM rol')
# form=cursor.fetchall()
# print(form)
# return form
# resiva()
#### ingresar la reserva
class ints():
def consultaLimpieza():
conexion=abrirConexion()
cursor = conexion.cursor()
cursor.execute("SELECT * FROM producto_ingresado WHERE categoria_productos = 'productos de limpieza'")
consulta = cursor.fetchall()
cerrarConexion(conexion)
return consulta
def consultabebidas():
conexion=abrirConexion()
cursor = conexion.cursor()
cursor.execute("SELECT * FROM producto_ingresado WHERE categoria_productos = 'bebidas'")
consulta = cursor.fetchall()
cerrarConexion(conexion)
return consulta
def consultacarnes():
conexion=abrirConexion()
cursor = conexion.cursor()
cursor.execute("SELECT * FROM producto_ingresado WHERE categoria_productos = 'carnes'")
consulta = cursor.fetchall()
cerrarConexion(conexion)
return consulta
def consultavegetales():
conexion=abrirConexion()
cursor = conexion.cursor()
cursor.execute("SELECT * FROM producto_ingresado WHERE categoria_productos = 'Vegetales'")
consulta = cursor.fetchall()
cerrarConexion(conexion)
return consulta
def resr(nombre,cantidad,cedula,correo,fecha,idl):
nombre=nombre
cantidad=float(cantidad)
cedula=cedula
correo=correo
fecha=fecha
cursor=conexion.cursor()
cursor.execute("select id_ordenes_clientes from ordenes_clientes")
id = cursor.fetchall()
if(id!=[]):
try:
cursor.execute("select id_ordenes_clientes from ordenes_clientes")
a1 = cursor.fetchall()
a2= (len(a1))+1
cursor.execute("select id_personas from personas WHERE nombre_personas='{}'".format(nombre))
b1 = cursor.fetchone()
b1=b1[0]
print(b1,'esto me devuelve')
#if b1== 'NoneType':
#return 'Este usuario no esta registrado'
cursor.execute("select precio_producto_ingresado from producto_ingresado where id_producto_ingresado={}".format(idl))
c1 = cursor.fetchone()
c1=float(c1[0])
print(c1,'este es el precio')
calculartotal=(c1*cantidad)
print(calculartotal,'aqui esta calculando')
#c2=(len(c1))+1
cursor.execute("insert into ordenes_clientes values ({},{},{},{},'{}',{},{},'{}','Producto Hogar','{}',0)".format(a2,b1,idl,calculartotal,nombre,cantidad,cedula,correo,fecha))
conexion.commit()
except:
return 0
else:
try:
value=1
cursor.execute("select id_personas from personas WHERE nombre_personas='{}'".format(nombre))
b1 = cursor.fetchone()
b1=b1[0]
print(b1,'esto me devuelve')
#if b1== 'NoneType':
#return 'Este usuario no esta registrado'
cursor.execute("select precio_producto_ingresado from producto_ingresado where id_producto_ingresado={}".format(idl))
c1 = cursor.fetchone()
c1=float(c1[0])
print(c1,'este es el precio')
calculartotal=(c1*cantidad)
print(calculartotal,'aqui esta calculando')
#c2=(len(c1))+1
cursor.execute("insert into ordenes_clientes values ({},{},{},{},'{}',{},{},'{}','Producto Hogar','{}',0)".format(value,b1,idl,calculartotal,nombre,cantidad,cedula,correo,fecha))
conexion.commit()
except:
return 0
##### productos de bebidas
class bebidas():
def beb(nombre,cantidad,cedula,correo,fecha):
nombre=nombre
cantidad=cantidad
cedula=cedula
correo=correo
fecha=fecha
cursor=conexion.cursor()
cursor.execute("select id_ordenes_clientes from ordenes_clientes")
id = cursor.fetchall()
if(id!=[]):
print('tiene datos')
print(id)
cursor.execute("select id_ordenes_clientes from ordenes_clientes")
a1 = cursor.fetchall()
a2= (len(a1))+1
cursor.execute("select id_personas from ordenes_clientes")
b1 = cursor.fetchall()
b2=(len(b1))+1
cursor.execute("select id_producto_ingresado from ordenes_clientes")
c1 = cursor.fetchall()
c2=(len(c1))+1
cursor.execute("insert into ordenes_clientes values ({},{},{},12,'{}',{},{},'{}','Bebidas','{}',0)".format(a2,b2,c2,nombre,cantidad,cedula,correo,fecha))
conexion.commit()
else:
value=1
value1=1
value2=1
# cursor.execute("insert into personas (id_personas,id_rol,nombre_personas,dir_personas,telf_personas,email_personas,clave_personas) values ({},2,'dylan','12',12,'12','12')".format(value)
cursor.execute("insert into ordenes_clientes values ({},{},{},12,'{}',{},{},'{}','Bebidas','{}',0)".format(value,value1,value2,nombre,cantidad,cedula,correo,fecha))
conexion.commit()
### narnes
class carnes():
def car(nombre,cantidad,cedula,correo,fecha):
nombre=nombre
cantidad=cantidad
cedula=cedula
correo=correo
fecha=fecha
cursor=conexion.cursor()
cursor.execute("select id_ordenes_clientes from ordenes_clientes")
id = cursor.fetchall()
if(id!=[]):
print('tiene datos')
print(id)
cursor.execute("select id_ordenes_clientes from ordenes_clientes")
a1 = cursor.fetchall()
a2= (len(a1))+1
cursor.execute("select id_personas from ordenes_clientes")
b1 = cursor.fetchall()
b2=(len(b1))+1
cursor.execute("select id_producto_ingresado from ordenes_clientes")
c1 = cursor.fetchall()
c2=(len(c1))+1
cursor.execute("insert into ordenes_clientes values ({},{},{},12,'{}',{},{},'{}','Carnes','{}',0)".format(a2,b2,c2,nombre,cantidad,cedula,correo,fecha))
conexion.commit()
else:
value=1
value1=1
value2=1
# cursor.execute("insert into personas (id_personas,id_rol,nombre_personas,dir_personas,telf_personas,email_personas,clave_personas) values ({},2,'dylan','12',12,'12','12')".format(value)
cursor.execute("insert into ordenes_clientes values ({},{},{},12,'{}',{},{},'{}','Carnes','{}',0)".format(value,value1,value2,nombre,cantidad,cedula,correo,fecha))
conexion.commit()
### vegetales
class vegetales():
def ve(nombre,cantidad,cedula,correo,fecha):
nombre=nombre
cantidad=cantidad
cedula=cedula
correo=correo
fecha=fecha
cursor=conexion.cursor()
cursor.execute("select id_ordenes_clientes from ordenes_clientes")
id = cursor.fetchall()
if(id!=[]):
print('tiene datos')
print(id)
cursor.execute("select id_ordenes_clientes from ordenes_clientes")
a1 = cursor.fetchall()
a2= (len(a1))+1
cursor.execute("select id_personas from ordenes_clientes")
b1 = cursor.fetchall()
b2=(len(b1))+1
cursor.execute("select id_producto_ingresado from ordenes_clientes")
c1 = cursor.fetchall()
c2=(len(c1))+1
cursor.execute("insert into ordenes_clientes values ({},{},{},12,'{}',{},{},'{}','Vegetales','{}',0)".format(a2,b2,c2,nombre,cantidad,cedula,correo,fecha))
conexion.commit()
else:
value=1
value1=1
value2=1
# cursor.execute("insert into personas (id_personas,id_rol,nombre_personas,dir_personas,telf_personas,email_personas,clave_personas) values ({},2,'dylan','12',12,'12','12')".format(value)
cursor.execute("insert into ordenes_clientes values ({},{},{},12,'{}',{},{},'{}','Vegetales','{}',0)".format(value,value1,value2,nombre,cantidad,cedula,correo,fecha))
conexion.commit()
#### aqui se muestran los datos de la reserva
class alm():
def almacenados():
cursor=conexion.cursor()
query="SELECT * FROM ordenes_clientes, producto_ingresado WHERE ordenes_clientes.estado=0 and producto_ingresado.id_producto_ingresado= ordenes_clientes.id_producto_ingresado"
cursor.execute(query)
formulario=cursor.fetchall()
formulario=formulario
#print(formulario)
que="SELECT * FROM ordenes_clientes, producto_ingresado WHERE ordenes_clientes.estado=1 and producto_ingresado.id_producto_ingresado= ordenes_clientes.id_producto_ingresado"
cursor.execute(que)
elim=cursor.fetchall()
elim=elim
conexion.commit()
return formulario,elim
####restaurar
def restaurar(id):
cursor=conexion.cursor()
query=cursor.execute("UPDATE ordenes_clientes SET estado=0 WHERE id_ordenes_clientes=%s",(id))
# cursor.execute("SELECT * FROM ordenes_clientes WHERE cedula_ordenes_cliente=%s",(cedula))
# cursor.execute(query)
conexion.commit()
# return query
### aqui se eliminan los atos de la reserva
# class eliminar():
# def dest(cedula):
# cur=conexion.cursor()
# elm=cur.execute("DELETE FROM ordenes_clientes WHERE id_ordenes_clientes=%s",(cedula))
# conexion.commit()
# return elm
### aqui para editar las reservas
class update():
def actualizar(nombre,cantidad,cedula,correo,fecha,idP):
nombre=nombre
cantidad=cantidad
cedula=cedula
correo=correo
fecha=fecha
cursor=conexion.cursor()
cursor.execute("select id_ordenes_clientes from ordenes_clientes")
id = cursor.fetchall()
if(id!=[]):
print('tiene datos')
print(id)
cursor.execute("select id_ordenes_clientes from ordenes_clientes")
a1 = cursor.fetchall()
a2= (len(a1))+1
cursor.execute("select id_personas from ordenes_clientes")
b1 = cursor.fetchall()
b2=(len(b1))+1
cursor.execute("select id_producto_ingresado from ordenes_clientes")
c1 = cursor.fetchall()
c2=(len(c1))+1
# cursor.execute("insert into ordenes_clientes values ({},{},{},12,'{}',{},'{}','vegetales','{}')".format(a2,b2,c2,nombre,cedula,correo,fecha))
print(idP,'Mensajeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee')
#query="UPDATE ordenes_clientes SET nombre_ordenes_cliente='{}',cantidad_ordenes_cliente={},correo_ordenes_clientes='{}',fecha_ordenes_cliente='{}' WHERE id_ordenes_clientes={};".format(nombre,cantidad,correo,fecha,cedula,idP)
# query="UPDATE ordenes_clientes SET ({},{},{},12,'{}','{}','vegetales','{}' WHERE cedula_ordenes_cliente={})".format(a2,b2,c2,nombre,cedula,correo,fecha,cedula)
#datos=(nombre,cantidad,correo,fecha,cedula)
data=cursor.execute("UPDATE ordenes_clientes SET nombre_ordenes_cliente='{}',cantidad_ordenes_cliente={},correo_ordenes_clientes='{}',fecha_ordenes_cliente='{}' WHERE id_ordenes_clientes={};".format(nombre,cantidad,correo,fecha,idP))
conexion.commit()
else:
value=1
value1=1
value2=1
# cursor.execute("insert into personas (id_personas,id_rol,nombre_personas,dir_personas,telf_personas,email_personas,clave_personas) values ({},2,'dylan','12',12,'12','12')".format(value)
# cursor.execute("insert into ordenes_clientes values ({},{},{},12,'{}',{},'{}','vegetales','{}')".format(value,value1,value2,nombre,cedula,correo,fecha))
# query="UPDATE ordenes_clientes SET ({},{},{},12,'{}','{}','vegetales','{}' WHERE cedula_ordenes_cliente={})".format(value,value1,value2,nombre,correo,fecha,cedula)
print(idP,'Mensajeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee')
#query="UPDATE ordenes_clientes SET nombre_ordenes_cliente='{}',cantidad_ordenes_cliente={},correo_ordenes_clientes='{}',fecha_ordenes_cliente='{}' WHERE id_ordenes_clientes={};".format(nombre,cantidad,correo,fecha,cedula,idP)
# query="UPDATE ordenes_clientes SET id_ordenes_clientes,id_clientes,numero_ordenes_clientes,subtotal_ordenes_clientes,nombre_ordenes_cliente,,correo_ordenes_cliente=,categoria_ordenes_cliente,fecha_ordenes_cliente WHERE cedula_ordenes_clientes;".format(value,value1,value2,nombre,cedula,correo,fecha,cedula)
#datos=(nombre,cantidad,correo,fecha,cedula)
data=cursor.execute("UPDATE ordenes_clientes SET nombre_ordenes_cliente='{}',cantidad_ordenes_cliente={},correo_ordenes_clientes='{}',fecha_ordenes_cliente='{}' WHERE id_ordenes_clientes={};".format(nombre,cantidad,correo,fecha,idP))
conexion.commit()
return data
# @app.route('/edit/<int:cedula>')
class eed():
def editaa(id):
cursor=conexion.cursor()
cursor.execute("SELECT * FROM ordenes_clientes WHERE id_ordenes_clientes=%s",(id))
formulario=cursor.fetchall()
conexion.commit()
return formulario
# return render_template('edit.html',formulario=formulario)
# class eddi():
# def editara(cedula):
# cursor=conexion.cursor()
# form=cursor.execute("SELECT * FROM ordenes_clientes WHERE id_ordenes_clientes=%s",(cedula))
# formulario=cursor.fetchall()
# conexion.commit()
# formulario=formulario
# #print(formulario)
# conexion.commit()
# return formulario
class eliminar():
def dest(cedula):
cur=conexion.cursor()
elm=cur.execute("UPDATE ordenes_clientes SET estado=1 WHERE id_ordenes_clientes=%s",(cedula))
conexion.commit()
return elm
# class eliminar():
# def dest(cedula):
# cur=conexion.cursor()
# elm=cur.execute("DELETE FROM ordenes_clientes WHERE id_ordenes_clientes=%s",(cedula))
# conexion.commit()
# return elm
# def into():
# # sql="INSERT INTO 'Formulario'('cedula','nombre','apellido','edad','telefno','correo','pregunta1','pregunta2','pregunta3') VALUES (1766617813,'Gloria Alejandra','Molina Ron',20,0928739478,'gloria@gmail.com','te gustan los gatos','como estas hoy','que vas a hacer')"
# cur=conexion.cursor()
# sql="INSERT INTO ordenes_clientes (id_ordenes_clientes,id_clientes,numero_ordenes_clientes,fecha_ordenes_clientes,subtotal_ordenes_clientes,hora_ordenes_clientes,cedula_ordenes_cliente) VALUES (2,2,2,'03-11-2021',21,'12:00',1109825367) "
# cur.execute(sql)
# print(sql)
# conexion.commit()
# conexion.close()
|
{"/modules/backAdmin/modulo_clientes_registrados.py": ["/modules/database.py"], "/modules/backCliente/modulo_almacenados.py": ["/modules/database.py"], "/modules/backAdmin/modulo_productos_vendidos.py": ["/modules/database.py"], "/modules/backAdmin/modulo_reservas.py": ["/modules/database.py"], "/modules/backAdmin/modulo_productos_ingresados.py": ["/modules/database.py"], "/routes.py": ["/modules/backCliente/modulo_reserva_cliente.py", "/modules/users.py", "/modules/backCliente/modulo_almacenados.py", "/modules/backAdmin/modulo_productos_ingresados.py", "/modules/backAdmin/modulo_clientes_registrados.py", "/modules/backAdmin/modulo_reservas.py", "/modules/backAdmin/modulo_productos_vendidos.py"], "/modules/backCliente/modulo_reserva_cliente.py": ["/modules/database.py"], "/modules/users.py": ["/modules/database.py"]}
|
34,452,941
|
djgpujota/madjs
|
refs/heads/main
|
/modules/users.py
|
from modules.database import abrirConexion,cerrarConexion
# Importaciones externas
from flask_wtf import FlaskForm
from wtforms import StringField,SubmitField
from wtforms.validators import DataRequired,Email,Length
from wtforms.fields.simple import PasswordField
# Importaciones de modulos internos
from modules.database import cerrarConexion,abrirConexion
class Persona(FlaskForm):
nombre=StringField("Nombre",validators=[
DataRequired(message="El campo es obligatorio"),
Length(max=20, min=5, message="El campo debe tener entre 5 y 20 caracteres")
])
direccion=StringField("Direccion",validators=[
DataRequired(message="El campo es obligatorio"),
Length(min=9, max=50, message="El campo debe tener entre 9 y 50 caracteres.")
])
telefono=StringField("Telefono",validators=[
DataRequired(message="El campo es obligatorio"),
Length(min=7, max=11, message="El campo debe tener entre 9 y 11 caracteres.")
])
correo= StringField("Email",validators=[
DataRequired(message="El campo es obligatorio"),
Email(message="Email erroneo")
])
clave=PasswordField("Clave",validators=[
DataRequired(message="El campo es obligatorio"),
Length(min=4, max=10, message="Se requiere de 4 a 10 caracteres")
])
submit=SubmitField('Enviar')
class users():
def admin():
con = abrirConexion()
cursor = con.cursor()
cursor.execute("select id_personas from personas")
id = cursor.fetchall()
# Validaciones de id
# si existe algun dato registrado
cursor.execute("select nombre_personas from personas")
nombre = cursor.fetchall()
for i in nombre:
if (i[0] == 'admin'):
return
if(id!=[]):
cursor.execute("select id_personas from personas")
a1 = cursor.fetchall()
a2= (len(a1))+1
cursor.execute("insert into personas values ({},1,'admin','MAD.JS', 22222222, 'MADJS@gmail.com', 'MADJS1', 0)".format(a2))
con.commit()
else:
# Validacion por si no existe ningun dato registrado
value=1
cursor.execute("insert into personas values ({},1,'admin','MAD.JS', 22222222, 'MADJS@gmail.com', 'MADJS1',0)".format(value))
con.commit()
cerrarConexion(con)
def loguin(email, password):
correcto = 0
con= abrirConexion()
cursor = con.cursor()
cursor.execute("select * from personas")
consulta = cursor.fetchall()
for i in consulta:
if (email == i[5] and password == i[6]):
correcto = 1
return (correcto)
def registro(name,dir,telf,email,clave1):
name=name
dir=dir
telf=telf
email=email
clave1=clave1
# Llammamos a la conexion a la base de datos
con= abrirConexion()
conexion=con
#Creamos el cursor con la conexion anteriormente llamado
cursor= conexion.cursor()
cursor.execute("select id_personas from personas")
id = cursor.fetchall()
# Validaciones de id
# si existe algun dato registrado
if(id!=[]):
cursor.execute("select id_personas from personas")
a1 = cursor.fetchall()
a2= (len(a1))+1
cursor.execute("insert into personas values ({},2,'{}','{}',{},'{}','{}',0)".format(a2,name,dir,telf,email,clave1))
conexion.commit()
else:
# Validacion por si no existe ningun dato registrado
value=1
cursor.execute("insert into personas values ({},2,'{}','{}',{},'{}','{}',0)".format(value,name,dir,telf,email,clave1))
conexion.commit()
cerrarConexion(conexion)
# def actualizarCliente(id,name,dir,telf,email,clave1):
# id=id
# name=name
# dir=dir
# telf=telf
# email=email
# clave1=clave1
# # Llammamos a la conexion a la base de datos
# con= abrirConexion()
# conexion=con
# #Creamos el cursor con la conexion anteriormente llamado
# cursor= conexion.cursor()
# cursor.execute("update personas set nombre_personas='{}', dir_personas='{}', telf_personas={}, email_personas='{}', clave_personas='{}' where id_personas={}".format(name,dir,telf,email,clave1,id))
# conexion.commit()
# cerrarConexion(conexion)
# users.registro('Dylan2','alal',999,'das@gmail.com','1234')
|
{"/modules/backAdmin/modulo_clientes_registrados.py": ["/modules/database.py"], "/modules/backCliente/modulo_almacenados.py": ["/modules/database.py"], "/modules/backAdmin/modulo_productos_vendidos.py": ["/modules/database.py"], "/modules/backAdmin/modulo_reservas.py": ["/modules/database.py"], "/modules/backAdmin/modulo_productos_ingresados.py": ["/modules/database.py"], "/routes.py": ["/modules/backCliente/modulo_reserva_cliente.py", "/modules/users.py", "/modules/backCliente/modulo_almacenados.py", "/modules/backAdmin/modulo_productos_ingresados.py", "/modules/backAdmin/modulo_clientes_registrados.py", "/modules/backAdmin/modulo_reservas.py", "/modules/backAdmin/modulo_productos_vendidos.py"], "/modules/backCliente/modulo_reserva_cliente.py": ["/modules/database.py"], "/modules/users.py": ["/modules/database.py"]}
|
34,472,528
|
timothyleslie/compiler
|
refs/heads/master
|
/Lab1/lexer.py
|
digits = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9']
alphas = ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n',
'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z']
global current_state
global States
def generate_states():
states = []
for i in range(0, 20):
states.append(i)
return states
def is_digit(ch):
if ch in digits:
return True
else:
return False
def is_alpha(ch):
if ch in alphas:
return True
else:
return False
def analysis(ch):
global current_state
global States
if current_state == States[0]:
if is_alpha(ch):
current_state = States[14]
elif is_digit(ch):
current_state = States[16]
def scanner(text):
text_length = len(text)
for i in range(0, text_length):
analysis(text[i])
def main():
global States
global current_state
States = generate_states()
current_state = States[0]
fp = open('test.c', 'r')
scanner(fp.open)
if __name__ == '__main__':
main()
|
{"/main.py": ["/codegen/codegen.py", "/lexer/lexer.py", "/syntax/parser.py"]}
|
34,534,808
|
clouds16/intro-python
|
refs/heads/master
|
/enemies.py
|
class Enemies:
def __init__(self, name, ability, feature, health, attack, armor, speed):
self.name = name
self.ability = ability
self.feature = feature
self.speed = speed
self.attack = attack
self.health = health
self.armor = armor
self.magic_resist = 0
def getName(self):
return self.name
def getFeature(self):
return self.feature
def getAbility(self):
return self.ability
def getHealth(self):
return self.health
def getSpeed(self):
return self.speed
def getAttack(self):
return self.attack
def takeDamage(self, damage):
self.health = self.health - (damage - self.armor)
return self.health
forestBoss = Enemies("TreeMan ", "Root Slam", "Vines", 100, 30, 10, 5)
desertBoss = Enemies("Giant Lizard", "Tail Whip", "Vanish", 40, 50, 10, 200)
ruinsBoss = Enemies("Stone Golem", "Collosal Slam",
"Bullet Punch", 160, 40, 5, 5)
Ganon = Enemies("Ganandorf", "Void Crush", "Slash", 120, 60, 20, 20)
|
{"/gamemap.py": ["/enemies.py", "/player.py", "/textread.py", "/fight.py", "/items.py", "/sounds.py"], "/Testermap.py": ["/enemies.py", "/player.py", "/textread.py", "/fight.py", "/items.py"], "/textread.py": ["/player.py"], "/game.py": ["/enemies.py", "/player.py", "/textread.py", "/fight.py", "/items.py", "/gamemap.py", "/sounds.py"], "/locations.py": ["/items.py", "/enemies.py", "/textread.py", "/fight.py", "/player.py"], "/fight.py": ["/player.py", "/enemies.py", "/items.py", "/sounds.py"], "/createchar.py": ["/enemies.py", "/player.py"], "/items.py": ["/player.py", "/sounds.py"]}
|
34,534,809
|
clouds16/intro-python
|
refs/heads/master
|
/player.py
|
class Player:
def __init__(self, name, ability, item):
self.name = name
self.ability = ability
self.item = item
self.speed = 30
self.health = 100
self.attack = 50
self.armor = 20
self.inventory = {
"small potion": 20,
"medium potion": 40,
"speed potion": 30,
}
def getName(self):
return self.name
def getAbility(self):
return self.getAbility()
def getItem(self):
return self.item
def getHealth(self):
return self.health
def getSpeed(self):
return self.speed
def getAttack(self):
return self.attack
def setAttack(self, attack):
self.attack = attack + self.attack
def setSpeed(self, speed):
self.speed = speed
def takeDamage(self, damage):
self.health = self.health - (damage - self.armor)
return self.health
def useHealthPotion(self, item):
self.health = self.health + item
if self.health > 100:
self.health = 100
return self.health
def useSpeedPotion(self, item):
self.speed = self.speed + item
return self.speed
def addWeapon(self, weapon):
self.attack = self.attack + weapon
return self.attack
player = Player(str(input("Great HERO, what is your name?...")), str(input(
"What kind of abilities do you have?... ")), str(input("What kind of weapon do you use?... ")))
|
{"/gamemap.py": ["/enemies.py", "/player.py", "/textread.py", "/fight.py", "/items.py", "/sounds.py"], "/Testermap.py": ["/enemies.py", "/player.py", "/textread.py", "/fight.py", "/items.py"], "/textread.py": ["/player.py"], "/game.py": ["/enemies.py", "/player.py", "/textread.py", "/fight.py", "/items.py", "/gamemap.py", "/sounds.py"], "/locations.py": ["/items.py", "/enemies.py", "/textread.py", "/fight.py", "/player.py"], "/fight.py": ["/player.py", "/enemies.py", "/items.py", "/sounds.py"], "/createchar.py": ["/enemies.py", "/player.py"], "/items.py": ["/player.py", "/sounds.py"]}
|
34,534,810
|
clouds16/intro-python
|
refs/heads/master
|
/gamemap.py
|
from enemies import *
from player import *
from textread import *
from fight import *
from items import *
from sounds import *
####################### Main loop to play game ################
def straightMap(player):
# townSquare(player)
toDarkForest(player, forestBoss, "Sword of Light")
toDesert(player, desertBoss, "super potion")
toRuins(player, ruinsBoss, "Boss Key")
toCastle(player, Ganon)
def townSquare(player):
slowText("dialogues/townsquare.txt")
playsound(songs["townsquare"])
def toDarkForest(player, boss, item):
paths = {
"Currently": "Dark Forest"
}
slowText("dialogues/darkforest.txt")
print("You are currently in the {}".format(paths["Currently"]))
enemyfight = encounterEnemy(player, boss)
if enemyfight:
addItemToInvetory(player, item)
def toRuins(player, boss, item):
paths = {
"Currently": "Dark Forest"
}
print("you are currently in the {} . it looks like we can find the key to the castle here...".format(
paths["Currently"]))
slowText("dialogues/ruins.txt")
enemyfight = encounterEnemy(player, boss)
if enemyfight:
addItemToInvetory(player, item)
def toDesert(player, boss, item):
paths = {
"Currently": "Dark Forest"
}
slowText("dialogues/desert.txt")
enemyfight = encounterEnemy(player, boss)
if enemyfight:
addItemToInvetory(player, item)
# 333
def toCastle(player, boss):
playsound(songs["castle"])
slowText("dialogues/castle.txt")
if player.inventory["Boss Key"]:
print("You may proceed to the Castle")
paths = {
"Currently": "Castle"
}
print("You are currently in the {}".format(paths["Currently"]))
enemyfight = encounterEnemy(player, boss)
if enemyfight:
slowText("dialogues/victory.txt")
slowText("dialogues/endgame.txt")
else:
print("Game Over")
else:
print("You are not ready to proceed yet")
time.sleep(3)
|
{"/gamemap.py": ["/enemies.py", "/player.py", "/textread.py", "/fight.py", "/items.py", "/sounds.py"], "/Testermap.py": ["/enemies.py", "/player.py", "/textread.py", "/fight.py", "/items.py"], "/textread.py": ["/player.py"], "/game.py": ["/enemies.py", "/player.py", "/textread.py", "/fight.py", "/items.py", "/gamemap.py", "/sounds.py"], "/locations.py": ["/items.py", "/enemies.py", "/textread.py", "/fight.py", "/player.py"], "/fight.py": ["/player.py", "/enemies.py", "/items.py", "/sounds.py"], "/createchar.py": ["/enemies.py", "/player.py"], "/items.py": ["/player.py", "/sounds.py"]}
|
34,534,811
|
clouds16/intro-python
|
refs/heads/master
|
/Testermap.py
|
from enemies import *
from player import *
from textread import *
from fight import *
from items import *
def startMap(player):
destination = townSquare(player)
while True:
if destination == "Town Square":
destination = townSquare(player)
if destination == "Dark Forest":
destination = toDarkForest(player, forestBoss, "Sword of Light")
elif destination == "Castle":
destination = toCastle(player, Ganon)
elif destination == "Ruins":
destination = toRuins(player, ruinsBoss, "Boss Key")
elif destination == "Desert":
destination = toDesert(player, desertBoss, "super potion")
def townSquare(player):
paths = {
"Town Square": True,
"Dark Forest": True,
"Castle": True,
"Ruins": True,
"Desert": True,
"Currently": "Town Square"
}
counter = 0
if counter == 0:
slowText("dialogues/townsquare.txt")
print("You are Now in the town square, you may go toward the (Dark Forest) (Castle) (Ruins) or (Desert) or (map)")
# print("You are currently in the {}".format(paths["Currently"]))
else:
print("Hello {} good to see you again. You are safe here in the Town Square".format(
player.getName()))
print("You encounter a merchant that wants to talk to you... ")
npcdialogue = str(input("Would you like to find out more about he world?"))
if npcdialogue == "yes" or itempicked == "y":
print("This world has different areas you can explore :")
showMap()
worldinfo = str(
input("Which location would you like no know more about ? (e) to exit"))
if worldinfo == "Dark Forest" or worldinfo == "dark forest":
print("Legend has it... there is a magical sword in the forest...")
elif worldinfo == "Ruins" or worldinfo == "ruins":
print("Many believe that Ganon hid something very important in the ruins.. perhaps something to get into the castle")
elif worldinfo == "Desert" or worldinfo == "desert":
print(
"I have heard tales of a magical healing potion long forgotten in the desert")
else:
print("That is not a place on this world")
else:
print("On your way then Hero")
return chooseNewLocation(paths["Currently"])
def toDarkForest(player, boss, item):
paths = {
"Town Square": True,
"Dark Forest": True,
"Castle": True,
"Ruins": True,
"Desert": True,
"Currently": "Dark Forest"
}
slowText("dialogues/darkforest.txt")
print("You are currently in the {}".format(paths["Currently"]))
enemyfight = encounterEnemy(player, boss)
if enemyfight:
addItemToInvetory(player, item)
print(player.inventory)
return chooseNewLocation(paths["Currently"])
# routeToNewLocation(chooseNewLocation())
def toRuins(player, boss, item):
paths = {
"Town Square": True,
"Dark Forest": townSquare,
"Castle": True,
"Ruins": True,
"Desert": True,
"Currently": "Dark Forest"
}
slowText("dialogues/ruins.txt")
enemyfight = encounterEnemy(player, boss)
if enemyfight:
addItemToInvetory(player, item)
print(player.inventory)
return chooseNewLocation(paths["Currently"])
# routeToNewLocation(chooseNewLocation())
def toDesert(player, boss, item):
paths = {
"Town Square": True,
"Dark Forest": True,
"Castle": True,
"Ruins": True,
"Desert": True,
"Currently": "Dark Forest"
}
slowText("dialogues/desert.txt")
enemyfight = encounterEnemy(player, boss)
if enemyfight:
addItemToInvetory(player, item)
print(player.inventory)
return chooseNewLocation(paths["Currently"])
# routeToNewLocation(chooseNewLocation())
# 333
def toCastle(player, boss):
slowText("dialogues/castle.txt")
if player.inventory["Boss Key"]:
print("You may proceed to the Castle")
paths = {
"Town Square": False,
"Dark Forest": False,
"Castle": False,
"Ruins": False,
"Desert": False,
"Currently": "Castle"
}
print("You are currently in the {}".format(paths["Currently"]))
enemyfight = encounterEnemy(player, boss)
if enemyfight:
slowText("dialogues/endgame.txt")
else:
print("You have Lost... But you are still alive...")
if player.getHealth() > 0:
chooseNewLocation(paths["Currently"])
else:
print("Game Over")
else:
print("You are not ready to proceed yet")
time.sleep(3)
return chooseNewLocation("Castle")
def showMap():
locations = ["Town Square", "Dark Forest", "Castle", "Ruins", "Desert"]
print(locations)
def chooseNewLocation(currentlocation):
map = ["Town Square", "Dark Forest", "Castle", "Ruins", "Desert"]
counter = 0
while True:
if counter != 0:
print("You cannot go there! Try again!\n")
print("{0} you are currently in {1}".format(
player.getName(), currentlocation))
direction = str(input("Where would you like to go? "))
for items in map:
if direction == items:
return direction
elif direction == "map":
print(map)
break
def routeToNewLocation(destination):
if destination == "Dark Forest":
toDarkForest(player, forestBoss, "Sword of Light")
elif destination == "Castle":
toCastle(player, Ganon)
elif destination == "Ruins":
toRuins(player, ruinsBoss, "Boss Key")
elif destination == "Desert":
toDesert(player, desertBoss, "super potion")
# townSquare(player)
# startMap(player)
|
{"/gamemap.py": ["/enemies.py", "/player.py", "/textread.py", "/fight.py", "/items.py", "/sounds.py"], "/Testermap.py": ["/enemies.py", "/player.py", "/textread.py", "/fight.py", "/items.py"], "/textread.py": ["/player.py"], "/game.py": ["/enemies.py", "/player.py", "/textread.py", "/fight.py", "/items.py", "/gamemap.py", "/sounds.py"], "/locations.py": ["/items.py", "/enemies.py", "/textread.py", "/fight.py", "/player.py"], "/fight.py": ["/player.py", "/enemies.py", "/items.py", "/sounds.py"], "/createchar.py": ["/enemies.py", "/player.py"], "/items.py": ["/player.py", "/sounds.py"]}
|
34,534,812
|
clouds16/intro-python
|
refs/heads/master
|
/textread.py
|
from player import *
import time
import sys
def slowText(file):
dialogue = open(file).read().format(player.getName(),
player.ability, player.item)
for characters in dialogue:
print(characters, end='')
sys.stdout.flush()
time.sleep(.020)
time.sleep(2)
# beginJourney()
# player.makePlayer()
# enemies.createBoss()
# slowText("victory.txt")
|
{"/gamemap.py": ["/enemies.py", "/player.py", "/textread.py", "/fight.py", "/items.py", "/sounds.py"], "/Testermap.py": ["/enemies.py", "/player.py", "/textread.py", "/fight.py", "/items.py"], "/textread.py": ["/player.py"], "/game.py": ["/enemies.py", "/player.py", "/textread.py", "/fight.py", "/items.py", "/gamemap.py", "/sounds.py"], "/locations.py": ["/items.py", "/enemies.py", "/textread.py", "/fight.py", "/player.py"], "/fight.py": ["/player.py", "/enemies.py", "/items.py", "/sounds.py"], "/createchar.py": ["/enemies.py", "/player.py"], "/items.py": ["/player.py", "/sounds.py"]}
|
34,534,813
|
clouds16/intro-python
|
refs/heads/master
|
/game.py
|
from enemies import *
from player import *
from textread import *
from fight import *
from items import *
from gamemap import *
from sounds import *
def main():
playsound(songs["intro"])
slowText("dialogues/intro_dialogue.txt")
# startMap(player)
straightMap(player)
if __name__ == "__main__":
main()
|
{"/gamemap.py": ["/enemies.py", "/player.py", "/textread.py", "/fight.py", "/items.py", "/sounds.py"], "/Testermap.py": ["/enemies.py", "/player.py", "/textread.py", "/fight.py", "/items.py"], "/textread.py": ["/player.py"], "/game.py": ["/enemies.py", "/player.py", "/textread.py", "/fight.py", "/items.py", "/gamemap.py", "/sounds.py"], "/locations.py": ["/items.py", "/enemies.py", "/textread.py", "/fight.py", "/player.py"], "/fight.py": ["/player.py", "/enemies.py", "/items.py", "/sounds.py"], "/createchar.py": ["/enemies.py", "/player.py"], "/items.py": ["/player.py", "/sounds.py"]}
|
34,534,814
|
clouds16/intro-python
|
refs/heads/master
|
/locations.py
|
from items import *
from enemies import *
from textread import *
from fight import *
from player import *
class Map:
def __init__(self, name, enemy, item, textfile):
self.name = name
self.enemy = enemy
self.item = item
self.round = 0
self.textfile = textfile
def getName(self):
return self.name
def getEnemy(self):
return self.enemy
def getItem(self):
return self.item
def getRound(self):
return self.round
def updateRound(self):
self.round += 1
return self.round
def getText(self, script):
return self.textfile[script]
town = Map("Town Square", "npc", "none", [
"dialogues/townsquare.txt", "dialogues/returntotown.txt"])
castle = Map("Castle", "Ganon", "Dagger", [
"dialogues/endgame.txt", "dialogues/victory.txt"])
darkforest = Map("Dark Forest", "forestBoss",
"Sword of Light", "dialogues/darkforest.txt")
ruins = Map("Ruins", "Stone Golem", "Boss Key", "dialogues/ruins.txt")
desert = Map("Desert", "Giant Lizard", "super potion", "dialogues/desert.txt")
def startTownSquare(player, place):
if place.getRound() == 0:
slowText(place.getText(0))
place.updateRound()
else:
slowText(place.getText(1))
place.updateRound()
print("You encounter a merchant that wants to talk to you... ")
npcdialogue = str(input("Would you like to find out more about he world?"))
if npcdialogue == "yes" or npcdialogue == "y":
print("This world has different areas you can explore :")
showMap()
worldinfo = str(
input("Which location would you like no know more about ? (e) to exit"))
if worldinfo == "Dark Forest" or worldinfo == "dark forest":
print("Legend has it... there is a magical sword in the forest...")
elif worldinfo == "Ruins" or worldinfo == "ruins":
print("Many believe that Ganon hid something very important in the ruins.. perhaps something to get into the castle")
elif worldinfo == "Desert" or worldinfo == "desert":
print(
"I have heard tales of a magical healing potion long forgotten in the desert")
else:
print("That is not a place on this world")
else:
print("On your way then Hero")
return chooseNewLocation(place.getName())
def mapEvents(player, enemy, place):
slowText(place.getText(0))
enemyFight = encounterEnemy(player, enemy)
if enemyFight:
addItemToInvetory(player, place.getItem())
return chooseNewLocation(place.getName())
def routeToNewLocation(destination):
if destination == "Dark Forest":
toDarkForest(player, forestBoss, "Sword of Light")
elif destination == "Castle":
toCastle(player, Ganon)
elif destination == "Ruins":
toRuins(player, ruinsBoss, "Boss Key")
elif destination == "Desert":
toDesert(player, desertBoss, "super potion")
def showMap():
locations = ["Town Square", "Dark Forest", "Castle", "Ruins", "Desert"]
print(locations)
def chooseNewLocation(currentlocation):
map = ["Town Square", "Dark Forest", "Castle", "Ruins", "Desert"]
counter = 0
while True:
if counter != 0:
print("You cannot go there! Try again!\n")
print("{0} you are currently in {1}".format(
player.getName(), currentlocation))
direction = str(input("Where would you like to go? "))
for items in map:
if direction == items:
return direction
elif direction == "map":
print(map)
break
def startEvents(player):
destination = startTownSquare(player, town)
while True:
if destination == "Town Square":
destination = startTownSquare(player, town)
elif destination == "Dark Forest":
destination = mapEvents(player, forestBoss, darkforest)
elif destination == "Castle":
destination = mapEvents(player, Ganon, castle)
elif destination == "Ruins":
destination = mapEvents(player, ruinsBoss, ruins)
elif destination == "Desert":
destination = mapEvents(player, desertBoss, desert)
startEvents(player)
|
{"/gamemap.py": ["/enemies.py", "/player.py", "/textread.py", "/fight.py", "/items.py", "/sounds.py"], "/Testermap.py": ["/enemies.py", "/player.py", "/textread.py", "/fight.py", "/items.py"], "/textread.py": ["/player.py"], "/game.py": ["/enemies.py", "/player.py", "/textread.py", "/fight.py", "/items.py", "/gamemap.py", "/sounds.py"], "/locations.py": ["/items.py", "/enemies.py", "/textread.py", "/fight.py", "/player.py"], "/fight.py": ["/player.py", "/enemies.py", "/items.py", "/sounds.py"], "/createchar.py": ["/enemies.py", "/player.py"], "/items.py": ["/player.py", "/sounds.py"]}
|
34,534,815
|
clouds16/intro-python
|
refs/heads/master
|
/fight.py
|
from player import *
from enemies import *
from items import *
import random as r
import time
import os
from sounds import *
def encounterEnemy(player, enemy):
print("You have encountered a ", enemy.getName(),
"Enemy. Your current HP is :", player.getHealth(), "\nEnemy HP is: ", enemy.getHealth())
player_choice = input(
"\nFight (f) or use item (i):")
counter = 0
while True:
if counter >= 1:
player_choice = input(
"\nKeep fighting (f) or use item(i) ? ")
# if player_choice == "run" or player_choice == "r":
# runroll = runAway(player)
# if runroll:
# break
# PLayer chooses to fight
if player_choice == "fight" or player_choice == "f":
time.sleep(1)
fightEnemy(player, enemy)
# Player Death
if player.getHealth() <= 0:
playsound(songs["player-defeated"])
print("You have been defeated")
exit()
# Enemy Defeated
elif enemy.getHealth() <= 0:
print("you have defeated the boss!")
playsound(songs["defeat-enemy"])
return True
elif player_choice == "i":
useItem()
else:
print("you cannot do that right now!")
counter += 1
def runAway(player):
while True:
rand = r.randint(1, 10)
if rand > 6:
time.sleep(1)
print("You have successfully run away")
return True
else:
time.sleep(1)
print("You cannot run!, you took some damage while trying to run away")
print("Your health is now at {}".format(player.getHealth()))
player.takeDamage(30)
return False
# Turn Based fighting logic
def fightEnemy(player, enemy):
if player.getSpeed() > enemy.getSpeed():
time.sleep(.20)
if checkHealth(player):
enemy.takeDamage(player.getAttack())
print("{} uses {}".format(player.name,
player.getItem()))
print("{} hits {} first! {} is now at {} HP".format(
player.getName(), enemy.getName(), enemy.getName(), enemy.getHealth()))
if checkHealth(enemy):
player.takeDamage(enemy.getAttack())
print("{} uses {}".format(enemy.getName(), enemy.getAbility()))
print("{} Hits You Back, your health is now {}\n".format(
enemy.getName(), player.getHealth()))
else:
if checkHealth(enemy):
time.sleep(.20)
player.takeDamage(enemy.getAttack())
print("{} uses {}".format(player.name,
enemy.getName(), player.getItem()))
print("{} Hits You first, your health {}".format(
enemy.getName(), player.getHealth()))
if checkHealth(player):
enemy.takeDamage(player.getAttack())
print("{} uses {}".format(enemy.getName(), enemy.getAbility()))
print("{} hits {} f, {} is now at {} HP\n".format(
player.getName(), enemy.getName(), enemy.getName(), enemy.getHealth()))
def checkHealth(person):
personhealth = person.getHealth()
if personhealth > 0:
return True
# Test Loop
#encounterEnemy(player, forestBoss)
# encounterEnemy(player, speeder)
|
{"/gamemap.py": ["/enemies.py", "/player.py", "/textread.py", "/fight.py", "/items.py", "/sounds.py"], "/Testermap.py": ["/enemies.py", "/player.py", "/textread.py", "/fight.py", "/items.py"], "/textread.py": ["/player.py"], "/game.py": ["/enemies.py", "/player.py", "/textread.py", "/fight.py", "/items.py", "/gamemap.py", "/sounds.py"], "/locations.py": ["/items.py", "/enemies.py", "/textread.py", "/fight.py", "/player.py"], "/fight.py": ["/player.py", "/enemies.py", "/items.py", "/sounds.py"], "/createchar.py": ["/enemies.py", "/player.py"], "/items.py": ["/player.py", "/sounds.py"]}
|
34,534,816
|
clouds16/intro-python
|
refs/heads/master
|
/createchar.py
|
import enemies
import player
import time
import sys
def beginJourney():
dialogue = open("intro_dialogue.txt").read()
for characters in dialogue:
print(characters, end='')
sys.stdout.flush()
time.sleep(.020)
time.sleep(2)
# beginJourney()
# player.makePlayer()
# enemies.createBoss()
|
{"/gamemap.py": ["/enemies.py", "/player.py", "/textread.py", "/fight.py", "/items.py", "/sounds.py"], "/Testermap.py": ["/enemies.py", "/player.py", "/textread.py", "/fight.py", "/items.py"], "/textread.py": ["/player.py"], "/game.py": ["/enemies.py", "/player.py", "/textread.py", "/fight.py", "/items.py", "/gamemap.py", "/sounds.py"], "/locations.py": ["/items.py", "/enemies.py", "/textread.py", "/fight.py", "/player.py"], "/fight.py": ["/player.py", "/enemies.py", "/items.py", "/sounds.py"], "/createchar.py": ["/enemies.py", "/player.py"], "/items.py": ["/player.py", "/sounds.py"]}
|
34,534,817
|
clouds16/intro-python
|
refs/heads/master
|
/items.py
|
from player import *
import random
import time
from sounds import *
def showItems():
print(player.inventory)
# Main logic for items ################################333
def useItem():
print("These are the items currently in your inventory: ",
player.inventory.keys())
useanitem = input("which item would you like to use? ")
found = False
for items in player.inventory:
if items == useanitem:
itemEffects(useanitem)
playsound(songs["get-item"])
return True
print("item not found in inventory")
############################## secondary item usage logic #############################
def itemEffects(itempicked):
if itempicked == "small potion" or itempicked == "medium potion" or itempicked == "super potion":
player.useHealthPotion(player.inventory[itempicked])
print("You have restored {} HP, your health is now {}".format(
player.inventory[itempicked], player.getHealth()))
removeItemFromInventory(itempicked)
elif itempicked == "speed potion":
player.useSpeedPotion(player.inventory[itempicked])
print("You have increased {} Speed, your speed is now {}".format(
player.inventory[itempicked], player.getSpeed()))
removeItemFromInventory(itempicked)
elif itempicked == "Bow" or itempicked == "Sword of Light":
player.setAttack(player.inventory[itempicked])
print("You have now equipped {} , your attack is now {}".format(
player.inventory[itempicked], player.getAttack()))
removeItemFromInventory(itempicked)
def removeItemFromInventory(item):
player.inventory.pop(item)
print("{} has been consumed! ".format(item))
def addItemToInvetory(player, item):
player.inventory[item] = items[item]
print("You have added {} to your inventory".format(item))
print(player.inventory)
playsound(songs["get-item"])
items = {
"small potion": 20,
"medium potion": 40,
"super potion": 100,
"speed potion": 30,
"Bow": 45,
"Dagger": 25,
"Sword of Light": 80,
"Boots of Swiftness": 50,
"Boss Key": True
}
#addItemToInvetory(player, "Bow")
|
{"/gamemap.py": ["/enemies.py", "/player.py", "/textread.py", "/fight.py", "/items.py", "/sounds.py"], "/Testermap.py": ["/enemies.py", "/player.py", "/textread.py", "/fight.py", "/items.py"], "/textread.py": ["/player.py"], "/game.py": ["/enemies.py", "/player.py", "/textread.py", "/fight.py", "/items.py", "/gamemap.py", "/sounds.py"], "/locations.py": ["/items.py", "/enemies.py", "/textread.py", "/fight.py", "/player.py"], "/fight.py": ["/player.py", "/enemies.py", "/items.py", "/sounds.py"], "/createchar.py": ["/enemies.py", "/player.py"], "/items.py": ["/player.py", "/sounds.py"]}
|
34,534,818
|
clouds16/intro-python
|
refs/heads/master
|
/sounds.py
|
from playsound import playsound
songs = {
"intro": "sounds/intro-song.mp3",
"townsquare": "",
"get-item": "sounds/gain-item.mp3",
"defeat-enemy": "sounds/zelda-chest.mp3",
"castle": "sounds/soft-2.mp3",
"defeat-ganon": "sounds/defeat-boss.mp3",
"player-defeated": "sounds/player-defeated.mp3"
}
# playsound(songs["townsquare"])
# playsound(songs["get-item"])
# playsound(songs["defeat-enemy"])
# playsound(songs["castle"])
# playsound(songs["defeat-ganon"])
# playsound(songs["player-defeated"])
|
{"/gamemap.py": ["/enemies.py", "/player.py", "/textread.py", "/fight.py", "/items.py", "/sounds.py"], "/Testermap.py": ["/enemies.py", "/player.py", "/textread.py", "/fight.py", "/items.py"], "/textread.py": ["/player.py"], "/game.py": ["/enemies.py", "/player.py", "/textread.py", "/fight.py", "/items.py", "/gamemap.py", "/sounds.py"], "/locations.py": ["/items.py", "/enemies.py", "/textread.py", "/fight.py", "/player.py"], "/fight.py": ["/player.py", "/enemies.py", "/items.py", "/sounds.py"], "/createchar.py": ["/enemies.py", "/player.py"], "/items.py": ["/player.py", "/sounds.py"]}
|
34,575,480
|
nettan20/dateparser
|
refs/heads/master
|
/dateparser/conf.py
|
# -*- coding: utf-8 -*-
class Settings(object):
PREFER_DATES_FROM = 'current_period' # past, future, current_period
SUPPORT_BEFORE_COMMON_ERA = False
PREFER_DAY_OF_MONTH = 'current' # current, first, last
def __init__(self, **kwargs):
for key in kwargs:
setattr(self, key, kwargs[key])
def update(self, key, value):
setattr(self, key, value)
def reload_settings():
global settings
settings = Settings()
settings = Settings()
|
{"/tests/test_date_parser.py": ["/dateparser/conf.py"]}
|
34,593,997
|
tomastefanadrian/gammaHeaderFileProcessing
|
refs/heads/main
|
/headerFileProcessing.py
|
class headerFileProcessing(object):
def __init__( self ):
'''
Initializes the two members the class holds:
the file name and its contents.
'''
self.headerFileName = None
#get time data
def readDays(self, fileName):
if not(self.isValid(fileName)):
showError()
return 0
file1 = open(fileName, 'r')
count = 0
x=[]
while True:
count += 1
# Get next line from file
line = file1.readline()
# if line is empty
# end of file is reached
if not line:
break
#print("Line{}: {}".format(count, line.strip()))
if 'displacement (mm)' in line:
tmp=line.split(' ')
tmp_day=float(tmp[-1])
# print(np.rint(tmp_day))
x.append(np.rint(tmp_day))
file1.close()
days=np.array(x)
days=days.reshape((-1, 1))
return days
def showError():
print("Cannot open file")
#msg = QMessageBox()
#msg.setIcon(QMessageBox.Critical)
#msg.setText("Error opening file!")
#msg.setInformativeText('Cannot open gamma header file')
#msg.setWindowTitle("Error")
#msg.exec_()
|
{"/test.py": ["/headerFileProcessing.py"]}
|
34,594,697
|
kergalym/Korlan
|
refs/heads/master
|
/Engine/Renderer/gpu_instancing.py
|
import math
import struct
from random import random
from direct.task.TaskManagerGlobal import taskMgr
from panda3d.bullet import BulletCapsuleShape
from panda3d.bullet import BulletRigidBodyNode
from panda3d.bullet import ZUp
from panda3d.core import NodePath
from panda3d.core import LODNode
from panda3d.core import Texture, GeomEnums, OmniBoundingVolume
class GPUInstancing:
"""
This class implements GPU Instancing for foliage assets
"""
def __init__(self):
self.base = base
self.render = render
self.base.game_instance["gpu_instancing_cls"] = self
self._is_tree = False
self._total_instances = 0
def construct_prefab_lod(self, pattern):
prefab_lod = LODNode("{0}_LODNode".format(pattern))
prefab_lod_np = NodePath(prefab_lod)
prefab_lod_np.reparent_to(self.base.game_instance['foliage_np'])
return prefab_lod, prefab_lod_np
def setup_prefab_lod(self, prefab, prefab_lod_np, prefab_lod):
if "LOD0" in prefab.get_name():
prefab_lod.add_switch(50.0, 0.0)
elif "LOD1" in prefab.get_name():
prefab_lod.add_switch(500.0, 50.0)
elif "LOD2" in prefab.get_name():
prefab_lod.add_switch(1000.0, 500.0)
elif "LOD3" in prefab.get_name():
prefab_lod.add_switch(1500.0, 1000.0)
elif "LOD4" in prefab.get_name():
prefab_lod.add_switch(2000.0, 1500.0)
def _populate_instances(self, scene, placeholder, prefab, asset_type):
if self.base.game_settings['Main']['pbr_renderer'] == 'on':
matrices = []
floats = []
for i, node_path in enumerate(scene.find_all_matches("**/{0}*".format(placeholder))):
matrices.append(node_path.get_mat(render))
if asset_type == "tree":
node_path.set_scale(0.5)
self._add_colliders(prefab=prefab,
node_path=node_path,
asset_type=asset_type,
limit=200,
index=i)
self._total_instances += len(matrices)
print("Loaded", self._total_instances, "instances!")
buffer_texture = self._allocate_texture_storage(matrices, floats)
self._visualize(prefab, matrices, buffer_texture)
def populate_instances_with_brush(self, prefab, pos, count, density):
if self.base.game_settings['Main']['pbr_renderer'] == 'on':
matrices = []
floats = []
for i in range(count):
node_path = NodePath("{0}_instance".format(prefab.get_name()))
node_path.set_x(render, pos[0]+random()*int(density))
node_path.set_y(render, pos[1]+random()*int(density))
matrices.append(node_path.get_mat(render))
if "LOD0" in node_path.get_name():
self.add_collider(prefab=prefab,
node_path=node_path)
buffer_texture = self._allocate_texture_storage(matrices, floats)
self._visualize(prefab, matrices, buffer_texture)
def _add_colliders(self, prefab, node_path, asset_type, limit, index):
if asset_type == "tree":
if limit is not None and index < limit or limit is None:
# calculate trunk's width and height
min, max = prefab.get_tight_bounds()
size = max - min
actual_width = size[1]/size[1]
trunk_width = actual_width / 2
width = trunk_width
height = size[2]
# Add rigidbodies to place them physically
physics_world_np = self.base.game_instance['physics_world_np']
name = "{0}:BS".format(prefab.get_name())
node_path_rb = node_path.attach_new_node(BulletRigidBodyNode(name))
capsule = BulletCapsuleShape(width, height, ZUp)
node_path_rb.node().set_mass(0.0)
node_path_rb.node().add_shape(capsule)
physics_world_np.attach(node_path_rb.node())
node_path.set_pos(0, 0, -1)
node_path_rb.set_collide_mask(1)
def add_collider(self, prefab, node_path):
# calculate trunk's width and height
min, max = prefab.get_tight_bounds()
size = max - min
actual_width = size[1] / size[1]
trunk_width = actual_width / 2
width = trunk_width
height = size[2]
# Add rigidbodies to place them physically
physics_world_np = self.base.game_instance['physics_world_np']
name = "{0}:BS".format(prefab.get_name())
node_path_rb = node_path.attach_new_node(BulletRigidBodyNode(name))
capsule = BulletCapsuleShape(width, height, ZUp)
node_path_rb.node().set_mass(0.0)
node_path_rb.node().add_shape(capsule)
physics_world_np.attach(node_path_rb.node())
node_path.set_pos(0, 0, -1)
node_path_rb.set_collide_mask(1)
def _allocate_texture_storage(self, matrices, floats):
# Allocate storage for the matrices, each matrix has 16 elements,
# but because one pixel has four components, we need amount * 4 pixels.
buffer_texture = Texture()
buffer_texture.setup_buffer_texture(len(matrices) * 4,
Texture.T_float,
Texture.F_rgba32,
GeomEnums.UH_static)
# Serialize matrices to floats
ram_image = buffer_texture.modify_ram_image()
for idx, mat in enumerate(matrices):
for i in range(4):
for j in range(4):
floats.append(mat.get_cell(i, j))
# Write the floats to the texture
data = struct.pack("f" * len(floats), *floats)
ram_image.set_subdata(0, len(data), data)
return buffer_texture
def _visualize(self, prefab, matrices, buffer_texture):
# Load the effect
if self._is_tree:
is_render_shadow = True
else:
is_render_shadow = False
renderpipeline_np = self.base.game_instance["renderpipeline_np"]
renderpipeline_np.set_effect(prefab,
"{0}/Engine/Renderer/effects/basic_instancing.yaml".format(
self.base.game_dir),
{"render_gbuffer": True,
"render_forward": False,
"render_shadow": is_render_shadow,
"alpha_testing": True,
"normal_mapping": True})
prefab.set_shader_input("InstancingData", buffer_texture)
prefab.set_instance_count(len(matrices))
# We have do disable culling, so that all instances stay visible
prefab.node().set_bounds(OmniBoundingVolume())
prefab.node().set_final(True)
def set_gpu_instancing_to(self, scene, asset_type, pattern, placeholder):
"""
Sets GPU instancing for particular foliage asset
:param scene:
:param asset_type:
:param pattern:
:param placeholder:
:return:
"""
if (scene and isinstance(scene, NodePath)
and isinstance(asset_type, str)
and isinstance(pattern, str)
and isinstance(placeholder, str)):
if asset_type == "tree":
self._is_tree = True
else:
self._is_tree = False
# Define prefab LOD and reparent to render node
prefab_lod, prefab_lod_np = self.construct_prefab_lod(pattern=pattern)
# Find the asset object, we are going to in instance this object
# multiple times
if self.base.game_settings['Debug']['set_editor_mode'] == 'NO':
prefabs = scene.find_all_matches("**/{0}*".format(pattern))
if prefabs is not None:
for prefab in prefabs:
if "LODNode" not in prefab.get_name():
prefab.reparent_to(prefab_lod_np)
self.setup_prefab_lod(prefab=prefab,
prefab_lod_np=prefab_lod_np,
prefab_lod=prefab_lod)
if prefab_lod_np.get_num_children() > 0:
self._populate_instances(scene, placeholder, prefab_lod_np, asset_type)
|
{"/Settings/kmp_menu_settings.py": ["/Settings/menu_settings.py"], "/Settings/gfx_menu_settings.py": ["/Settings/menu_settings.py"], "/Engine/Renderer/rpcore/water/water_manager.py": ["/Engine/Renderer/rpcore/globals.py", "/Engine/Renderer/rpcore/water/gpu_fft.py"], "/Settings/UI/game_menu_ui.py": ["/Settings/menu_settings.py", "/Settings/game_menu_settings.py"], "/Editor/editor.py": ["/Editor/editor_ui.py", "/Editor/foliage.py", "/Editor/terrain.py"], "/Settings/dev_menu_settings.py": ["/Settings/menu_settings.py"], "/Engine/Quests/social_quests.py": ["/Engine/Quests/quest_logic.py", "/Engine/__init__.py"], "/Engine/Quests/story_quests.py": ["/Engine/__init__.py"], "/Settings/UI/keymap_menu_ui.py": ["/Settings/menu_settings.py", "/Settings/kmp_menu_settings.py"], "/Settings/UI/loading_ui.py": ["/Engine/Scenes/level_one.py", "/Settings/UI/rp_lights_manager_ui.py", "/Editor/editor.py", "/Settings/UI/hud_ui.py", "/Settings/UI/round_table_menu_ui.py", "/Engine/ChestInventory/chest_ui.py", "/Settings/UI/stat_ui.py"], "/Engine/Renderer/renderer.py": ["/Engine/Renderer/rpcore/water/projected_water.py"], "/main.py": ["/Engine/Renderer/renderer.py", "/Settings/Sound/sound.py", "/Settings/UI/menu_ui.py", "/Settings/gfx_menu_settings.py"], "/Settings/UI/options_menu_ui.py": ["/Settings/menu_settings.py", "/Settings/UI/game_menu_ui.py", "/Settings/UI/graphics_menu_ui.py", "/Settings/UI/sound_menu_ui.py", "/Settings/UI/keymap_menu_ui.py", "/Settings/UI/lang_menu_ui.py"], "/Settings/UI/dev_menu_ui.py": ["/Settings/menu_settings.py", "/Settings/dev_menu_settings.py"], "/Engine/Renderer/rpplugins/smaa/plugin.py": ["/Engine/Renderer/rpcore/globals.py", "/Engine/Renderer/rpplugins/smaa/smaa_stage.py", "/Engine/Renderer/rpplugins/smaa/jitters.py"], "/Settings/lng_menu_settings.py": ["/Settings/menu_settings.py"], "/Engine/FSM/player_fsm.py": ["/Engine/__init__.py"], "/Engine/Actors/Player/player_controller.py": ["/Engine/Actors/Player/state.py", "/Engine/FSM/player_fsm.py", "/Settings/Input/keyboard.py", "/Settings/Input/mouse.py", "/Engine/Inventory/sheet.py", "/Engine/Actors/Player/player_archery.py", "/Engine/Actors/Player/player_movement.py", "/Engine/Actors/Player/player_actions.py", "/Engine/__init__.py"], "/Engine/AI/npc_controller.py": ["/Engine/Physics/npc_damages.py", "/Engine/AI/npc_behavior.py", "/Engine/Actors/animator.py", "/Engine/__init__.py"], "/Settings/UI/graphics_menu_ui.py": ["/Settings/menu_settings.py", "/Settings/gfx_menu_settings.py"], "/Engine/async_level_loading.py": ["/Engine/Quests/social_quests.py", "/Engine/Quests/story_quests.py", "/Engine/Renderer/gpu_instancing.py", "/Engine/Actors/Player/player_controller.py", "/Engine/Actors/Player/state.py", "/Settings/Input/keyboard.py", "/Settings/Input/mouse.py", "/Engine/Actors/NPC/state.py"], "/Engine/Physics/physics_attr.py": ["/Engine/Physics/collision_solids.py", "/Engine/Physics/player_damages.py", "/Engine/Physics/npc_triggers.py", "/Engine/Physics/player_trigger.py"], "/Engine/Actors/Player/player_actions.py": ["/Engine/__init__.py"], "/Settings/game_menu_settings.py": ["/Settings/menu_settings.py"], "/Engine/Actors/animator.py": ["/Engine/__init__.py"], "/Settings/UI/rp_lights_manager_ui.py": ["/Settings/menu_settings.py"], "/Engine/FSM/npc_fsm.py": ["/Engine/__init__.py"], "/Engine/Inventory/sheet.py": ["/Engine/Inventory/inventory.py"], "/Settings/exit_menu_settings.py": ["/Settings/menu_settings.py", "/Settings/UI/unloading_ui.py"], "/Engine/AI/npc_directives.py": ["/Engine/__init__.py"], "/Settings/UI/cmd_dialogus_ui.py": ["/Settings/menu_settings.py"], "/Settings/sfx_menu_settings.py": ["/Settings/menu_settings.py"], "/Settings/UI/sound_menu_ui.py": ["/Settings/menu_settings.py", "/Settings/sfx_menu_settings.py"], "/Engine/Renderer/rpcore/water/gpu_fft.py": ["/Engine/Renderer/rpcore/globals.py"], "/Engine/Scenes/level_one.py": ["/Engine/Actors/Player/state.py", "/Engine/async_level_loading.py", "/Engine/Physics/physics_attr.py", "/Engine/AI/npc_controller.py", "/Engine/FSM/npc_fsm.py"], "/Engine/Actors/Player/player_movement.py": ["/Engine/__init__.py"], "/Settings/UI/menu_ui.py": ["/Engine/Scenes/playworker.py", "/Settings/menu_settings.py", "/Settings/dev_menu_settings.py", "/Settings/gfx_menu_settings.py", "/Settings/sfx_menu_settings.py", "/Settings/kmp_menu_settings.py", "/Settings/lng_menu_settings.py", "/Settings/UI/loading_ui.py", "/Settings/UI/dev_menu_ui.py", "/Settings/UI/options_menu_ui.py"], "/Engine/Renderer/rpcore/water/projected_water.py": ["/Engine/Renderer/rpcore/globals.py", "/Engine/Renderer/rpcore/water/water_manager.py"], "/Settings/UI/lang_menu_ui.py": ["/Settings/menu_settings.py", "/Settings/lng_menu_settings.py"], "/Engine/Inventory/inventory.py": ["/Engine/Inventory/item.py", "/Engine/Inventory/slot.py", "/Engine/Inventory/popup.py", "/Engine/Inventory/equip.py", "/Settings/menu_settings.py"], "/Engine/Actors/Player/player_archery.py": ["/Settings/Input/aim.py"], "/Settings/UI/exit_menu_ui.py": ["/Settings/menu_settings.py", "/Settings/exit_menu_settings.py"], "/Engine/AI/npc_behavior.py": ["/Engine/AI/npc_directives.py"]}
|
34,594,698
|
kergalym/Korlan
|
refs/heads/master
|
/Settings/Sound/sound.py
|
import logging
class Sound:
def __init__(self):
self.base = base
self.game_dir = base.game_dir
self.logging = logging
self.logging.basicConfig(filename="critical.log", level=logging.CRITICAL)
def openal_mgr(self):
""" Function : openal_mgr
Description : OpenAL manager
Input : None
Output : None
Return : None
"""
self.base.enable_all_audio()
self.base.enable_music(True)
self.base.enable_sound_effects(True)
sounds = self.base.sounds_collector()
if sounds and isinstance(sounds, dict):
self.base.sound_gui_click = self.base.loader.load_sfx(sounds.get('zapsplat_button_click'))
self.base.sound_sfx_nature = self.base.loader.load_sfx(sounds.get('forest birds'))
# TODO: do something with them
# m_sound = self.base.loader.load_sfx(sounds["theme"])
# sfx_mgr = self.base.sfx_manager_list[0]
# music_mgr = self.base.music_manager
# if m_sound.status() != m_sound.PLAYING:
# m_sound.play()
else:
self.logging.critical("CRITICAL: Sound files not found")
|
{"/Settings/kmp_menu_settings.py": ["/Settings/menu_settings.py"], "/Settings/gfx_menu_settings.py": ["/Settings/menu_settings.py"], "/Engine/Renderer/rpcore/water/water_manager.py": ["/Engine/Renderer/rpcore/globals.py", "/Engine/Renderer/rpcore/water/gpu_fft.py"], "/Settings/UI/game_menu_ui.py": ["/Settings/menu_settings.py", "/Settings/game_menu_settings.py"], "/Editor/editor.py": ["/Editor/editor_ui.py", "/Editor/foliage.py", "/Editor/terrain.py"], "/Settings/dev_menu_settings.py": ["/Settings/menu_settings.py"], "/Engine/Quests/social_quests.py": ["/Engine/Quests/quest_logic.py", "/Engine/__init__.py"], "/Engine/Quests/story_quests.py": ["/Engine/__init__.py"], "/Settings/UI/keymap_menu_ui.py": ["/Settings/menu_settings.py", "/Settings/kmp_menu_settings.py"], "/Settings/UI/loading_ui.py": ["/Engine/Scenes/level_one.py", "/Settings/UI/rp_lights_manager_ui.py", "/Editor/editor.py", "/Settings/UI/hud_ui.py", "/Settings/UI/round_table_menu_ui.py", "/Engine/ChestInventory/chest_ui.py", "/Settings/UI/stat_ui.py"], "/Engine/Renderer/renderer.py": ["/Engine/Renderer/rpcore/water/projected_water.py"], "/main.py": ["/Engine/Renderer/renderer.py", "/Settings/Sound/sound.py", "/Settings/UI/menu_ui.py", "/Settings/gfx_menu_settings.py"], "/Settings/UI/options_menu_ui.py": ["/Settings/menu_settings.py", "/Settings/UI/game_menu_ui.py", "/Settings/UI/graphics_menu_ui.py", "/Settings/UI/sound_menu_ui.py", "/Settings/UI/keymap_menu_ui.py", "/Settings/UI/lang_menu_ui.py"], "/Settings/UI/dev_menu_ui.py": ["/Settings/menu_settings.py", "/Settings/dev_menu_settings.py"], "/Engine/Renderer/rpplugins/smaa/plugin.py": ["/Engine/Renderer/rpcore/globals.py", "/Engine/Renderer/rpplugins/smaa/smaa_stage.py", "/Engine/Renderer/rpplugins/smaa/jitters.py"], "/Settings/lng_menu_settings.py": ["/Settings/menu_settings.py"], "/Engine/FSM/player_fsm.py": ["/Engine/__init__.py"], "/Engine/Actors/Player/player_controller.py": ["/Engine/Actors/Player/state.py", "/Engine/FSM/player_fsm.py", "/Settings/Input/keyboard.py", "/Settings/Input/mouse.py", "/Engine/Inventory/sheet.py", "/Engine/Actors/Player/player_archery.py", "/Engine/Actors/Player/player_movement.py", "/Engine/Actors/Player/player_actions.py", "/Engine/__init__.py"], "/Engine/AI/npc_controller.py": ["/Engine/Physics/npc_damages.py", "/Engine/AI/npc_behavior.py", "/Engine/Actors/animator.py", "/Engine/__init__.py"], "/Settings/UI/graphics_menu_ui.py": ["/Settings/menu_settings.py", "/Settings/gfx_menu_settings.py"], "/Engine/async_level_loading.py": ["/Engine/Quests/social_quests.py", "/Engine/Quests/story_quests.py", "/Engine/Renderer/gpu_instancing.py", "/Engine/Actors/Player/player_controller.py", "/Engine/Actors/Player/state.py", "/Settings/Input/keyboard.py", "/Settings/Input/mouse.py", "/Engine/Actors/NPC/state.py"], "/Engine/Physics/physics_attr.py": ["/Engine/Physics/collision_solids.py", "/Engine/Physics/player_damages.py", "/Engine/Physics/npc_triggers.py", "/Engine/Physics/player_trigger.py"], "/Engine/Actors/Player/player_actions.py": ["/Engine/__init__.py"], "/Settings/game_menu_settings.py": ["/Settings/menu_settings.py"], "/Engine/Actors/animator.py": ["/Engine/__init__.py"], "/Settings/UI/rp_lights_manager_ui.py": ["/Settings/menu_settings.py"], "/Engine/FSM/npc_fsm.py": ["/Engine/__init__.py"], "/Engine/Inventory/sheet.py": ["/Engine/Inventory/inventory.py"], "/Settings/exit_menu_settings.py": ["/Settings/menu_settings.py", "/Settings/UI/unloading_ui.py"], "/Engine/AI/npc_directives.py": ["/Engine/__init__.py"], "/Settings/UI/cmd_dialogus_ui.py": ["/Settings/menu_settings.py"], "/Settings/sfx_menu_settings.py": ["/Settings/menu_settings.py"], "/Settings/UI/sound_menu_ui.py": ["/Settings/menu_settings.py", "/Settings/sfx_menu_settings.py"], "/Engine/Renderer/rpcore/water/gpu_fft.py": ["/Engine/Renderer/rpcore/globals.py"], "/Engine/Scenes/level_one.py": ["/Engine/Actors/Player/state.py", "/Engine/async_level_loading.py", "/Engine/Physics/physics_attr.py", "/Engine/AI/npc_controller.py", "/Engine/FSM/npc_fsm.py"], "/Engine/Actors/Player/player_movement.py": ["/Engine/__init__.py"], "/Settings/UI/menu_ui.py": ["/Engine/Scenes/playworker.py", "/Settings/menu_settings.py", "/Settings/dev_menu_settings.py", "/Settings/gfx_menu_settings.py", "/Settings/sfx_menu_settings.py", "/Settings/kmp_menu_settings.py", "/Settings/lng_menu_settings.py", "/Settings/UI/loading_ui.py", "/Settings/UI/dev_menu_ui.py", "/Settings/UI/options_menu_ui.py"], "/Engine/Renderer/rpcore/water/projected_water.py": ["/Engine/Renderer/rpcore/globals.py", "/Engine/Renderer/rpcore/water/water_manager.py"], "/Settings/UI/lang_menu_ui.py": ["/Settings/menu_settings.py", "/Settings/lng_menu_settings.py"], "/Engine/Inventory/inventory.py": ["/Engine/Inventory/item.py", "/Engine/Inventory/slot.py", "/Engine/Inventory/popup.py", "/Engine/Inventory/equip.py", "/Settings/menu_settings.py"], "/Engine/Actors/Player/player_archery.py": ["/Settings/Input/aim.py"], "/Settings/UI/exit_menu_ui.py": ["/Settings/menu_settings.py", "/Settings/exit_menu_settings.py"], "/Engine/AI/npc_behavior.py": ["/Engine/AI/npc_directives.py"]}
|
34,619,028
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/rh/endpoints/race_explorer_endpoints.py
|
from flask import request, json
from flask.blueprints import Blueprint
from rh.util.RHUtils import VTX_TABLE
from rh.app import web
from rh.app import race_explorer_core as racex
def createBlueprint(rhconfig, TIMER_ID, INTERFACE, RHData, rhserver):
APP = Blueprint('race_explorer', __name__, static_url_path='/race-explorer', static_folder='../../../race-explorer/build')
@APP.route('/mqttConfig')
def mqtt_config():
return {
'timerAnnTopic': rhconfig.MQTT['TIMER_ANN_TOPIC'],
'timerCtrlTopic': rhconfig.MQTT['TIMER_CTRL_TOPIC'],
'raceAnnTopic': rhconfig.MQTT['RACE_ANN_TOPIC'],
'sensorAnnTopic': rhconfig.MQTT['SENSOR_ANN_TOPIC']
}
@APP.route('/raceResults')
def race_results():
"""
Return race results.
---
responses:
200:
description: Race results
content:
application/json:
schema:
$ref: static/schemas/race-results.json
application/jsonl:
schema:
$ref: static/schemas/race-result.json
"""
if 'application/json' in request.accept_mimetypes:
return race_results_json()
elif 'application/jsonl' in request.accept_mimetypes:
return race_results_jsonl()
else:
return '', 406
@APP.route('/raceResults.jsonl')
def race_results_jsonl():
msgs = racex.export_results(RHData)
return '\n'.join([json.dumps(msg) for msg in msgs]), 200, {'Content-Type': 'application/jsonl'}
@APP.route('/raceResults.json')
def race_results_json():
msgs = racex.export_results(RHData)
results = racex.pilot_results(msgs)
return results, 200, {'Content-Type': 'application/json'}
@APP.route('/raceMetrics')
def race_metrics_get():
msgs = racex.export_results(RHData)
results = racex.pilot_results(msgs)
event_data = racex.export_event(RHData)
results = racex.calculate_metrics(results, event_data)
return results, 200, {'Content-Type': 'application/json'}
@APP.route('/raceMetrics', methods=['POST'])
def race_metrics_post():
results = request.get_json()
event_data = racex.export_event(RHData)
results = racex.calculate_metrics(results, event_data)
return results, 200, {'Content-Type': 'application/json'}
@APP.route('/eventLeaderboard')
def event_leaderboard():
leaderboard = racex.export_leaderboard(RHData)
return leaderboard, 200, {'Content-Type': 'application/json'}
@APP.route('/raceEvent')
def race_event_get():
"""
Return event setup.
---
responses:
200:
description: Event setup
content:
application/json:
schema:
$ref: static/schemas/race-event.json
"""
data = racex.export_event(RHData)
return data
@APP.route('/raceEvent', methods=['PUT'])
def race_event_put():
"""
Sets event info.
---
requestBody:
content:
application/json:
schema:
$ref: static/schemas/race-event.json
"""
data = request.get_json()
racex.import_event(data, rhserver)
return '', 204
@APP.route('/raceEvent', methods=['POST'])
def race_event_post():
if 'sync' in request.args:
web.sync_event(rhserver)
data = racex.export_event(RHData)
return data
elif 'results' in request.args:
web.upload_results(rhserver)
return ''
@APP.route("/raceClasses")
def race_classes_get():
"""
Gets race classes.
---
requestBody:
content:
application/json: {}
"""
race_formats_by_id = {0: 'Free'}
for race_format in RHData.get_raceFormats():
race_formats_by_id[race_format.id] = race_format.name
roots = {}
rhroots = [rhraceclass for rhraceclass in RHData.get_raceClasses() if rhraceclass.parent_id is None]
raceclasses_by_id = {}
q = []
q.extend(rhroots)
while q:
rhraceclass = q.pop()
raceclass = {'description': rhraceclass.description, 'children': {}}
raceclass['format'] = race_formats_by_id[rhraceclass.format_id]
raceclasses_by_id[rhraceclass.id] = raceclass
if rhraceclass.parent_id:
parent_raceclass = raceclasses_by_id[rhraceclass.parent_id]
children = parent_raceclass['children']
else:
children = roots
children[rhraceclass.name] = raceclass
q.extend(rhraceclass.children)
return {'classes': roots}
@APP.route("/raceClasses", methods=['PUT'])
def race_classes_put():
"""
Sets race classes.
---
requestBody:
content:
application/json: {}
"""
data = request.get_json()
existing_race_class_names = set()
rhraceclasses_by_name = {}
for rhraceclass in RHData.get_raceClasses():
existing_race_class_names.add(rhraceclass.name)
rhraceclasses_by_name[rhraceclass.name] = rhraceclass
q = []
def addNodes(children, parent_id):
q.extend(children)
for race_class_name, race_class in children:
rhraceclass = rhraceclasses_by_name.get(race_class_name)
if rhraceclass:
RHData.alter_raceClass({'id': rhraceclass.id,
'name': race_class_name,
'description': race_class['description'],
'parent_id': parent_id})
existing_race_class_names.remove(race_class_name)
else:
rhraceclass = RHData.add_raceClass(init={
'name': race_class_name,
'description': race_class['description'],
'parent_id': parent_id})
rhraceclasses_by_name[race_class_name] = rhraceclass
addNodes(data['classes'].items(), None)
while q:
race_class_name, race_class = q.pop()
rhraceclass = rhraceclasses_by_name[race_class_name]
addNodes(race_class['children'].items(), rhraceclass.id)
for race_class_name in existing_race_class_names:
rhraceclass = rhraceclasses_by_name[race_class_name]
if rhraceclass:
RHData.delete_raceClass(rhraceclass.id)
return '', 204
@APP.route('/trackLayout')
def track_layout_get():
"""
Return track layout.
---
responses:
200:
description: Track layout
content:
application/json:
schema:
$ref: static/schemas/race-track.json
"""
track = RHData.get_optionJson('trackLayout')
if not track or not track.get('locationType') or not track.get('layout'):
track = rhserver['DEFAULT_TRACK']
RHData.set_optionJson('trackLayout', track)
return track
@APP.route('/trackLayout', methods=['PUT'])
def track_layout_put():
"""
Sets track layout.
---
requestBody:
content:
application/json:
schema:
$ref: static/schemas/race-track.json
"""
data = request.get_json()
RHData.set_optionJson('trackLayout', data)
return '', 204
@APP.route('/pilots')
def pilots_get():
rhpilots = RHData.get_pilots()
pilots = {}
for rhpilot in rhpilots:
pilots[rhpilot.callsign] = {
'name': rhpilot.name,
'url': rhpilot.url
}
return {'pilots': pilots}
@APP.route('/pilots', methods=['PUT'])
def pilots_put():
data = request.get_json()
existing_pilot_callsigns = set()
rhpilots_by_callsign = {}
for rhpilot in RHData.get_pilots():
existing_pilot_callsigns.add(rhpilot.callsign)
rhpilots_by_callsign[rhpilot.callsign] = rhpilot
for callsign, pilot_data in data['pilots'].items():
rhpilot = rhpilots_by_callsign.get(callsign)
if rhpilot:
RHData.alter_pilot({'pilot_id': rhpilot.id,
'callsign': callsign,
'name': pilot_data['name']})
existing_pilot_callsigns.remove(callsign)
else:
rhpilot = RHData.add_pilot(init={
'callsign': callsign,
'name': pilot_data['name']})
rhpilots_by_callsign[callsign] = rhpilot
for callsign in existing_pilot_callsigns:
rhpilot = rhpilots_by_callsign[callsign]
if rhpilot:
RHData.delete_pilot(rhpilot.id)
return '', 204
@APP.route('/timerMapping')
def timer_mapping_get():
timerMapping = RHData.get_optionJson('timerMapping')
if not timerMapping:
timerMapping = {
TIMER_ID: {
nm.addr: [{'location': 'Start/finish', 'seat': node.index} for node in nm.nodes]
for nm in INTERFACE.node_managers
}
}
RHData.set_optionJson('timerMapping', timerMapping)
return timerMapping
@APP.route('/timerMapping', methods=['PUT'])
def timer_mapping_put():
data = request.get_json()
RHData.set_optionJson('timerMapping', data)
return '', 204
@APP.route('/timerSetup')
def timer_setup():
"""
Return timer setup.
---
responses:
200:
description: Timer setup
content:
application/jsonl: {}
"""
if 'application/jsonl' in request.accept_mimetypes:
return timer_setup_jsonl()
else:
return '', 406
@APP.route('/timerSetup.jsonl')
def timer_setup_jsonl():
msgs = []
for node_manager in INTERFACE.node_managers:
msg = {'timer': TIMER_ID, 'nodeManager': node_manager.addr, 'type': node_manager.__class__.TYPE}
msgs.append(msg)
for node in node_manager.nodes:
msg = {'timer': TIMER_ID, 'nodeManager': node_manager.addr, 'node': node.multi_node_index, 'frequency': node.frequency}
if node.bandChannel is not None:
msg['bandChannel'] = node.bandChannel
if node.enter_at_level is not None:
msg['enterTrigger'] = node.enter_at_level
if node.exit_at_level is not None:
msg['exitTrigger'] = node.exit_at_level
if hasattr(node, 'threshold') and node.threshold is not None:
msg['threshold'] = node.threshold
if hasattr(node, 'gain') and node.gain is not None:
msg['gain'] = node.gain
msgs.append(msg)
return '\n'.join([json.dumps(msg) for msg in msgs]), 200, {'Content-Type': 'application/jsonl'}
@APP.route('/vtxTable')
def vtx_table():
return VTX_TABLE
return APP
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,029
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/rh/sensors/linux_sensor.py
|
# coding=UTF-8
import logging
from . import Sensor, Reading
logger = logging.getLogger(__name__)
def file_url(file):
return 'file://' + file
class TemperatureSensor(Sensor):
def __init__(self, file, name):
super().__init__(url=file_url(file), name=name)
self.file = file
self.description = 'Core temperature'
self.update()
def update(self):
with open(self.file, 'r') as f:
self._temp = float(f.read())/1000.0
@Reading(units='°C')
def temperature(self):
return self._temp
class BatterySensor(Sensor):
def __init__(self, file, name):
super().__init__(url=file_url(file), name=name)
self.file = file
self.description = 'Battery'
self.update()
def update(self):
with open(self.file+'/temp', 'r') as f:
self._temp = float(f.read())/10.0
with open(self.file+'/current_now', 'r') as f:
self._current = float(f.read())/1000.0
with open(self.file+'/voltage_now', 'r') as f:
self._voltage = float(f.read())/1000000.0
with open(self.file+'/capacity', 'r') as f:
self._capacity = float(f.read())
@Reading(units='°C')
def temperature(self):
return self._temp
@Reading(units='A')
def current(self):
return self._current
@Reading(units='V')
def voltage(self):
return self._voltage
@Reading(units='Ah')
def capacity(self):
return self._capacity
def discover(config, *args, **kwargs):
sensors = []
file = '/sys/class/thermal/thermal_zone0/temp'
url = file_url(file)
sensor_config = config.get(url, {})
if sensor_config.get('enabled', True):
try:
with open(file, 'r') as f:
name = sensor_config.get('name', 'Core')
sensors.append(TemperatureSensor(file, name))
except IOError as err:
lvl = logging.INFO if sensor_config else logging.DEBUG
logger.log(lvl, 'Core temperature not available ({0})'.format(err))
file = '/sys/class/power_supply/battery'
url = file_url(file)
sensor_config = config.get(url, {})
if sensor_config.get('enabled', True):
try:
with open(file+'/present', 'r') as f:
if int(f.read()) == 1:
name = sensor_config.get('name', 'Battery')
sensors.append(BatterySensor(file, name))
except IOError as err:
lvl = logging.INFO if sensor_config else logging.DEBUG
logger.log(lvl, 'Battery status not available ({0})'.format(err))
return sensors
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,030
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/rh/tools/signal_detection.py
|
import sys
import csv
from rh.app import Database
from flask import Flask
import json
import rh.util.persistent_homology as ph
import matplotlib.pyplot as plt
from typing import Dict, List, Tuple
def load_races(db_file):
APP = Flask(__name__)
APP.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///../../' + db_file
APP.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
DB = Database.DB
DB.init_app(APP)
DB.app = APP
races = []
q = DB.session.query(
Database.SavedPilotRace.id,
Database.SavedRaceMeta.round_id,
Database.SavedRaceMeta.heat_id,
Database.SavedPilotRace.node_index,
Database.SavedPilotRace.pilot_id,
DB.func.count(Database.SavedRaceLap.id),
Database.SavedPilotRace.enter_at,
Database.SavedPilotRace.exit_at,
Database.SavedPilotRace.history_times,
Database.SavedPilotRace.history_values
).join(Database.SavedRaceMeta, Database.SavedPilotRace.race_id==Database.SavedRaceMeta.id) \
.outerjoin(Database.SavedRaceLap, (Database.SavedPilotRace.id==Database.SavedRaceLap.pilotrace_id) & (Database.SavedRaceMeta.id==Database.SavedRaceLap.race_id)) \
.group_by(
Database.SavedPilotRace.id,
Database.SavedRaceMeta.round_id,
Database.SavedRaceMeta.heat_id,
Database.SavedPilotRace.node_index,
Database.SavedPilotRace.pilot_id,
Database.SavedPilotRace.enter_at,
Database.SavedPilotRace.exit_at,
Database.SavedPilotRace.history_times,
Database.SavedPilotRace.history_values
)
for rec in q:
history_times = json.loads(rec[-2])
history_values = json.loads(rec[-1])
races.append(rec[0:-2] + (history_times,history_values))
races.sort(key=lambda race: race[0])
return races
def list_races(races):
for i, race in enumerate(races):
print("[{}] ID {} round {} heat {} node {} pilot {} laps {} enter {} exit {}".format(i, *race[0:-2]))
def analyze_race(race, show_plots=True):
print("ID {} round {} heat {} node {} pilot {} laps {} enter {} exit {}".format(*race[0:-2]))
lap_count = race[-5]
rssi_times = race[-2]
rssi_values = race[-1]
if rssi_values:
ccs = ph.calculatePeakPersistentHomology(rssi_values)
ccs = ph.sortByLifetime(ccs)
n = lap_count if lap_count else len(ccs)
print("Top {} peaks:\n{}".format(n, [str(cc) for cc in ccs[0:n]]))
min_bound, max_bound = ph.findBreak(ccs)
threshold = (min_bound + max_bound)/2
print("Estimated laps ({}): {}\n".format(threshold, len([cc for cc in ccs if cc.lifetime()>threshold])))
if show_plots:
_fig, axs = plt.subplots(1, 3, figsize=(8,4))
axs[0].plot(rssi_times, rssi_values)
ph.plotPersistenceDiagram(axs[1], ccs)
ph.plotLifetimes(axs[2], ccs)
plt.show()
return (race[3], min_bound, max_bound)
else:
return (race[3], 0, 255)
def export(race, csv_path):
rssi_times = race[-2]
rssi_values = race[-1]
with open(csv_path, 'w', newline='') as f:
writer = csv.writer(f)
for i in range(len(rssi_times)):
writer.writerow([int(rssi_times[i]*1000), rssi_values[i]])
if __name__ == '__main__':
db_file = sys.argv[1] if len(sys.argv) > 1 else 'database.db'
races = load_races(db_file)
node_bounds: Dict[int,Tuple[List[int],List[int]]] = {}
for race in races:
node, min_bound, max_bound = analyze_race(race, show_plots=False)
if node not in node_bounds:
node_bounds[node] = ([], [])
node_bounds[node][0].append(min_bound)
node_bounds[node][1].append(max_bound)
for node, bounds in node_bounds.items():
lower_bound = max(bounds[0])
upper_bound = min(bounds[1])
threshold = (lower_bound + upper_bound)/2
print("Node {}: threshold {} ({}-{})".format(node, threshold, lower_bound, upper_bound))
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,031
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/rh/util/RHGPIO.py
|
# Utility class for Raspberry Pi GPIO functions
import time
try:
import RPi.GPIO as GPIO
RealRPiGPIOFlag = True
except ImportError:
from fake_rpi.RPi import GPIO
RealRPiGPIOFlag = False
except: # need extra exception catch for Travis CI tests
from fake_rpi.RPi import GPIO
RealRPiGPIOFlag = False
RHGPIO_S32ID_PIN = 25 # input is tied low on S32_BPill PCB
S32BPillBoardFlag = False
def isRealRPiGPIO():
return RealRPiGPIOFlag
def isS32BPillBoard():
return S32BPillBoardFlag
def setS32BPillBoardFlag():
global S32BPillBoardFlag
S32BPillBoardFlag = True
# if input tied low then set flag identifying S32_BPill board
GPIO.setmode(GPIO.BCM)
GPIO.setup(RHGPIO_S32ID_PIN, GPIO.IN, pull_up_down=GPIO.PUD_UP)
time.sleep(0.05)
S32BPillBoardFlag = RealRPiGPIOFlag and not GPIO.input(RHGPIO_S32ID_PIN)
GPIO.setup(RHGPIO_S32ID_PIN, GPIO.IN)
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,032
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/rh/tools/plot_rssi.py
|
import numpy as np
from scipy.fft import rfft, rfftfreq
import matplotlib.pyplot as plt
import sys
import csv
import rh.util.persistent_homology as ph
from scipy.ndimage import median_filter
fs = 1000 # sample freq
median_window_size = 5 # should be odd
ph_history_size = 12
rssi_list = []
with open(sys.argv[1]) as f:
reader = csv.reader(f)
for r in reader:
rssi_list.append(float(r[0]))
ts = np.arange(len(rssi_list))/fs
rssis = np.array(rssi_list)
rssis = median_filter(rssis, median_window_size, origin=(median_window_size-1)//2)
def plot_signal(rssis):
F = rfft(rssis, norm='forward')
freqs = rfftfreq(len(rssis), 1/fs)
fig, axs = plt.subplots(1, 2, figsize=(12,6))
fig.canvas.manager.set_window_title('Signal')
axs[0].set_title('Signal')
axs[0].set_ylabel('RSSI')
axs[0].set_xlabel('Time / s')
axs[0].plot(ts, rssis)
axs[1].set_title('Spectrum')
axs[1].set_xlabel('Frequency / Hz')
axs[1].plot(freqs, np.abs(F))
fig.tight_layout()
plt.show(block=False)
plot_signal(rssis)
def plot_ph(title, ccs):
def add_threshold_line(axs, threshold):
xlim = axs.get_xlim()
axs.plot(xlim, [threshold, threshold], '--', c='tomato')
axs.annotate('threshold', (xlim[0], threshold), xytext=(3,3), textcoords='offset points', fontsize='x-small')
def add_threshold_diagonal(axs, threshold):
xlim = axs.get_xlim()
axs.plot(xlim, [xlim[0] + threshold, xlim[1] + threshold], '--', c='tomato')
def add_tooltip(axs):
tooltip = axs.annotate('', (0,0), xytext=(3,3), textcoords='offset points', fontsize='x-small')
tooltip.set_visible(False)
axs._tooltip = tooltip
def on_hover_tooltip(event):
axs = event.inaxes
if axs is not None and axs.collections and hasattr(axs, '_tooltip'):
tooltip = axs._tooltip
contains_values, info = axs.collections[0].contains(event)
if contains_values:
tooltip.xy = (event.xdata, event.ydata)
tooltip.set_text("{} values".format(len(info['ind'])) if len(info['ind']) > 1 else "1 value")
tooltip.set_visible(True)
else:
tooltip.xy = (0, 0)
tooltip.set_text('')
tooltip.set_visible(False)
axs.get_figure().canvas.draw_idle()
min_bound, max_bound = ph.findBreak(ccs)
threshold = (min_bound + max_bound)/2
fig, axs = plt.subplots(1, 3, figsize=(12,4))
fig.canvas.manager.set_window_title(title)
fig.canvas.mpl_connect('motion_notify_event', on_hover_tooltip)
axs[0].set_title('Sample lifetimes')
ph.plotSampleLifetimes(axs[0], ts, ccs)
add_threshold_line(axs[0], threshold)
axs[1].set_title('Persistence diagram')
ph.plotPersistenceDiagram(axs[1], ccs)
add_threshold_diagonal(axs[1], threshold)
add_tooltip(axs[1])
axs[2].set_title('Persistence lifetimes')
ph.plotLifetimes(axs[2], ccs)
add_threshold_line(axs[2], threshold)
add_tooltip(axs[2])
fig.tight_layout()
plt.show(block=False)
ccs = ph.calculatePeakPersistentHomology(rssis)
ccs = ph.sortByLifetime(ccs)
plot_ph('Persistent Homology', ccs)
rt_ccs = [ph.calculateRealtimePeakPersistentHomology(rssis[:i+1], ph_history_size) for i in range(len(rssis))]
rt_ccs = ph.sortByLifetime([cc for cc in rt_ccs if cc is not None])
plot_ph('Realtime Persistent Homology', rt_ccs)
plt.show()
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,033
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/rh/interface/LapRFInterface.py
|
'''LapRF interface layer.'''
import logging
import gevent
import serial
import socket
from .BaseHardwareInterface import BaseHardwareInterface, BaseHardwareInterfaceListener
from .Node import Node, NodeManager
from rh.sensors import Sensor, Reading
from . import laprf_protocol as laprf
from . import ExtremumFilter, ensure_iter, RssiSample
from rh.helpers import serial_url, socket_url
from rh.util import ms_counter, millis_to_secs
logger = logging.getLogger(__name__)
RESPONSE_WAIT_MS = 500
WRITE_CHILL_TIME_MS = 10
def micros_to_millis(t: int) -> int:
return round(t/1000)
def millivolts_to_volts(v: int) -> float:
return v/1000
class LapRFNodeManager(NodeManager):
TYPE = "LapRF"
def __init__(self, addr, io_stream):
super().__init__()
self.max_rssi_value = 3500
self.addr = addr
self.io_stream = io_stream
self.stream_buffer = bytearray()
self.voltage = None
self.min_lap_time = None
self.race_start_rtc_time_ms = 0
self.race_start_time_request_ts_ms = None
self.last_write_ts_ms = 0
def _create_node(self, index, multi_node_index):
return LapRFNode(index, multi_node_index, self)
@property
def is_configured(self):
for node in self.nodes:
if not node.is_configured:
return False
return True
def write(self, data):
chill_remaining_ms = self.last_write_ts_ms + WRITE_CHILL_TIME_MS - ms_counter()
if chill_remaining_ms > 0:
gevent.sleep(millis_to_secs(chill_remaining_ms))
self.io_stream.write(data)
self.last_write_ts_ms = ms_counter()
def read(self):
return self.io_stream.read(512)
def close(self):
self.io_stream.close()
class LapRFSensor(Sensor):
def __init__(self, node_manager):
super().__init__(node_manager.addr, "LapRF")
self.description = "LapRF"
self.node_manager = node_manager
@Reading(units='V')
def voltage(self):
return self.node_manager.voltage if self.node_manager.voltage is not None else None
class LapRFNode(Node):
def __init__(self, index, multi_node_index, manager):
super().__init__(index=index, multi_node_index=multi_node_index, manager=manager)
self.is_configured = False
self._threshold: float = 0
self.gain: int = 0
self.history_filter = ExtremumFilter()
self.pass_count = 0
def reset(self):
super().reset()
self.history_filter = ExtremumFilter()
@property
def threshold(self):
return self._threshold
@threshold.setter
def threshold(self, value):
self._threshold = value
self.enter_at_level = value
self.exit_at_level = value
class LapRFInterfaceListener(BaseHardwareInterfaceListener):
def on_threshold_changed(self, node, threshold):
pass
def on_gain_changed(self, node, gain):
pass
class LapRFInterface(BaseHardwareInterface):
def __init__(self, addr_streams, listener=None):
super().__init__(
listener=listener if listener is not None else LapRFInterfaceListener()
)
addr_streams = ensure_iter(addr_streams)
for addr_stream in addr_streams:
node_manager = LapRFNodeManager(*addr_stream)
self.node_managers.append(node_manager)
for node_manager in self.node_managers:
with node_manager:
for index in range(laprf.MAX_SLOTS):
node = node_manager.add_node(index)
self.nodes.append(node)
node_manager.write(laprf.encode_set_min_lap_time_record(1))
node_manager.write(laprf.encode_get_rf_setup_record())
self._wait_for_configuration(node_manager, node_manager)
if not node_manager.is_configured:
raise Exception("LapRF did not respond with RF setup information")
self.sensors.append(LapRFSensor(node_manager))
def _wait_for_configuration(self, configurable_obj, node_manager):
config_start_ts_ms = ms_counter()
while not configurable_obj.is_configured and ms_counter() < config_start_ts_ms + RESPONSE_WAIT_MS:
if self.update_thread:
gevent.sleep(millis_to_secs(RESPONSE_WAIT_MS))
else:
self._poll(node_manager)
def _update(self):
nm_sleep_interval = self.update_sleep/max(len(self.node_managers), 1)
if self.node_managers:
for node_manager in self.node_managers:
self._poll(node_manager)
gevent.sleep(nm_sleep_interval)
else:
gevent.sleep(nm_sleep_interval)
def _poll(self, node_manager):
with node_manager:
data = node_manager.read()
if data:
end = data.rfind(laprf.EOR)
if end == -1:
node_manager.stream_buffer.extend(data)
return
records = laprf.decode(node_manager.stream_buffer + data[:end+1])
node_manager.stream_buffer = bytearray(data[end+1:])
for record in records:
self._process_message(node_manager, record)
def _process_message(self, node_manager, record: laprf.Event):
if isinstance(record, laprf.StatusEvent):
assert record.battery_voltage is not None
node_manager.voltage = millivolts_to_volts(record.battery_voltage)
rssi_ts_ms = ms_counter()
for idx, rssi in enumerate(record.last_rssi):
if rssi is not None:
node = node_manager.nodes[idx]
node.current_rssi = RssiSample(rssi_ts_ms, rssi)
node.node_peak_rssi = max(rssi, node.node_peak_rssi)
node.node_nadir_rssi = min(rssi, node.node_nadir_rssi)
filtered_ts_ms, filtered_rssi = node.history_filter.filter(rssi_ts_ms, rssi)
self.append_rssi_history(node, filtered_ts_ms, filtered_rssi)
elif isinstance(record, laprf.PassingEvent):
assert record.slot_index is not None and record.slot_index > 0
assert record.rtc_time is not None
node_idx = record.slot_index - 1
node = node_manager.nodes[node_idx]
pass_peak_rssi = record.peak_height
node.node_peak_rssi = max(record.peak_height, node.node_peak_rssi)
lap_ts_ms = micros_to_millis(record.rtc_time) - node_manager.race_start_rtc_time_ms
if self.is_racing:
node.pass_history.append(RssiSample(lap_ts_ms + self.race_start_time_ms, pass_peak_rssi))
node.pass_count += 1
self._notify_pass(node, lap_ts_ms, BaseHardwareInterface.LAP_SOURCE_REALTIME, None)
elif isinstance(record, laprf.RFSetupEvent):
assert record.slot_index is not None and record.slot_index > 0
node_idx = record.slot_index - 1
node = node_manager.nodes[node_idx]
node.band_idx = record.band
node.channel_idx = record.channel
old_frequency = node.frequency
old_bandChannel = node.bandChannel
if record.enabled:
node.frequency = record.frequency
if record.band is not None and record.band >= 1 and record.band <= len(laprf.LIVE_TIME_BANDS) and record.channel is not None and record.channel >= 1 and record.channel <= laprf.MAX_CHANNELS:
node.bandChannel = laprf.LIVE_TIME_BANDS[record.band-1] + str(record.channel)
else:
node.bandChannel = None
else:
node.frequency = 0
node.bandChannel = None
old_threshold = node.threshold
old_gain = node.gain
node.threshold = record.threshold
node.gain = record.gain
node.is_configured = True
if node.frequency != old_frequency:
self._notify_frequency_changed(node)
if node.bandChannel != old_bandChannel:
self._notify_frequency_changed(node)
if node.threshold != old_threshold:
self._notify_threshold_changed(node)
if node.gain != old_gain:
self._notify_gain_changed(node)
elif isinstance(record, laprf.TimeEvent):
assert record.rtc_time is not None
if node_manager.race_start_time_request_ts_ms is not None:
server_oneway_ms = round((ms_counter() - node_manager.race_start_time_request_ts_ms)/2)
node_manager.race_start_rtc_time_ms = micros_to_millis(record.rtc_time) - server_oneway_ms
node_manager.race_start_time_request_ts_ms = None
elif isinstance(record, laprf.SettingsEvent):
if record.min_lap_time:
node_manager.min_lap_time = record.min_lap_time
else:
logger.warning("Unsupported record: {}".format(record))
def on_race_start(self, race_start_time_ms):
super().on_race_start(race_start_time_ms)
data = laprf.encode_get_rtc_time_record()
for node_manager in self.node_managers:
node_manager.race_start_time_request_ts = ms_counter()
node_manager.write(data)
def set_enter_at_level(self, node_index, level):
self.set_threshold(node_index, level)
def set_exit_at_level(self, node_index, level):
self.set_threshold(node_index, level)
def set_threshold(self, node_index, threshold):
if threshold >= 0 and threshold <= laprf.MAX_THRESHOLD:
node = self.nodes[node_index]
self.set_rf_setup(node, node.frequency, node.band_idx, node.channel_idx, node.gain, threshold)
def set_gain(self, node_index, gain):
if gain >= 0 and gain <= laprf.MAX_GAIN:
node = self.nodes[node_index]
self.set_rf_setup(node, node.frequency, node.band_idx, node.channel_idx, gain, node.threshold)
def set_frequency(self, node_index, frequency, band=None, channel=None):
node = self.nodes[node_index]
try:
band_idx = laprf.LIVE_TIME_BANDS.index(band) + 1 if band else 0
except ValueError:
band_idx = 0
channel_idx = channel if channel else 0
self.set_rf_setup(node, frequency, band_idx, channel_idx, node.gain, node.threshold)
def set_rf_setup(self, node, frequency, band_idx, channel_idx, gain, threshold):
node_manager = node.manager
slot_index = node.multi_node_index + 1
enabled = True if frequency else False
node_manager.write(laprf.encode_set_rf_setup_record(slot_index, enabled, band_idx, channel_idx, frequency if frequency else 0, gain, threshold))
node.is_configured = False
node_manager.write(laprf.encode_get_rf_setup_record(slot_index))
self._wait_for_configuration(node, node_manager)
if not node.is_configured:
logger.error("LapRF did not respond with RF setup information for node {}".format(node))
if node.frequency != frequency:
logger.error("LapRF ignored our request to change the frequency of node {} (requested {}, is {})".format(node, frequency, node.frequency))
if node.threshold != threshold:
logger.error("LapRF ignored our request to change the threshold of node {} (requested {}, is {})".format(node, threshold, node.threshold))
def _notify_threshold_changed(self, node):
self.listener.on_threshold_changed(node, node.threshold)
def _notify_gain_changed(self, node):
self.listener.on_gain_changed(node, node.gain)
class SocketStream:
def __init__(self, socket):
self.socket = socket
def write(self, data):
self.socket.send(data)
def read(self, max_size):
return self.socket.recv(max_size)
def close(self):
self.socket.close()
SERIAL_SCHEME = 'serial:'
SOCKET_SCHEME = 'socket://'
def _normalize_addr(addr):
if not addr.startswith(SERIAL_SCHEME) and not addr.startswith(SOCKET_SCHEME):
# addr is not a url
if addr.startswith('/'):
# assume serial/file
addr = serial_url(addr)
else:
# assume simple <host>[:<port>]
host_port = addr.split(':')
if len(host_port) == 1:
host_port = (host_port[0], 5403)
addr = socket_url(host_port[0], host_port[1])
return addr
def _create_stream(addr):
if addr.startswith(SERIAL_SCHEME):
port = addr[len(SERIAL_SCHEME):]
io_stream = serial.Serial(port=port, baudrate=115200, timeout=0.25)
elif addr.startswith(SOCKET_SCHEME):
# strip any trailing /
end_pos = -1 if addr[-1] == '/' else len(addr)
socket_addr = addr[len(SOCKET_SCHEME):end_pos]
host_port = socket_addr.split(':')
if len(host_port) == 1:
host_port = (host_port[0], 5403)
io_stream = SocketStream(socket.create_connection(host_port))
else:
raise ValueError("Unsupported address: {}".format(addr))
return io_stream
def get_hardware_interface(config, *args, **kwargs):
addrs = ensure_iter(config.LAPRF['ADDRESS'])
addr_streams = []
for addr in addrs:
addr = _normalize_addr(addr)
io_stream = _create_stream(addr)
addr_streams.append((addr, io_stream))
return LapRFInterface(addr_streams)
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,034
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/rh/apis/mqtt_api.py
|
from rh.helpers.mqtt_helper import make_topic, split_topic
from rh.interface import RssiSample, LifetimeSample
from rh.interface.BaseHardwareInterface import BaseHardwareInterface, BaseHardwareInterfaceListener
from rh.util.RHUtils import FREQS
from . import NodeRef, RESET_FREQUENCY
import logging
import json
logger = logging.getLogger(__name__)
def get_rssi_sample(payload):
ts = int(payload['timestamp'])
rssi = int(payload['rssi']) if 'rssi' in payload else None
return RssiSample(ts, rssi)
def get_lifetime_sample(payload):
ts = int(payload['timestamp'])
lifetime = int(payload['lifetime']) if 'lifetime' in payload else None
return LifetimeSample(ts, lifetime)
class MqttAPI:
def __init__(self, mqtt_client, ann_topic: str, timer_id: str, hw: BaseHardwareInterface, listener: BaseHardwareInterfaceListener):
self.hw_interface = hw
self.listener = listener
self.client = mqtt_client
self.ann_topic = ann_topic
self.timer_id = timer_id
def _subscribe_to(self, node_topic, handler):
timer_topic = self.timer_id if self.timer_id is not None else '+'
topic = make_topic(self.ann_topic, [timer_topic, '+', '+', node_topic])
self.client.message_callback_add(topic, handler)
self.client.subscribe(topic)
def _unsubscibe_from(self, node_topic):
timer_topic = self.timer_id if self.timer_id is not None else '+'
topic = make_topic(self.ann_topic, [timer_topic, '+', '+', node_topic])
self.client.unsubscribe(topic)
self.client.message_callback_remove(topic)
def start(self):
logger.info('MQTT API started')
self._subscribe_to('enter', self.enter_handler)
self._subscribe_to('exit', self.exit_handler)
self._subscribe_to('pass', self.pass_handler)
self._subscribe_to('sample', self.sample_handler)
self._subscribe_to('history', self.history_handler)
self._subscribe_to('frequency', self.set_frequency_handler)
self._subscribe_to('bandChannel', self.set_bandChannel_handler)
self._subscribe_to('enterTrigger', self.set_enter_handler)
self._subscribe_to('exitTrigger', self.set_exit_handler)
def stop(self):
self._unsubscibe_from('enter')
self._unsubscibe_from('exit')
self._unsubscibe_from('pass')
self._unsubscibe_from('sample')
self._unsubscibe_from('history')
self._unsubscibe_from('frequency')
self._unsubscibe_from('bandChannel')
self._unsubscibe_from('enterTrigger')
self._unsubscibe_from('exitTrigger')
logger.info('MQTT API stopped')
def _get_node_ref_from_topic(self, topic):
topic_names = split_topic(topic)
if len(topic_names) >= 4:
timer_id = topic_names[-4]
nm_addr = topic_names[-3]
multi_node_index = int(topic_names[-2])
if timer_id == self.timer_id:
for node_manager in self.hw_interface.node_managers:
if node_manager.addr == nm_addr and multi_node_index < len(node_manager.nodes):
node = node_manager.nodes[multi_node_index]
return NodeRef(timer_id, nm_addr, multi_node_index, node)
else:
return NodeRef(timer_id, nm_addr, multi_node_index, None)
return None
def enter_handler(self, client, userdata, msg):
node_ref = self._get_node_ref_from_topic(msg.topic)
if node_ref:
enter_info = json.loads(msg.payload.decode('utf-8'))
ts, rssi = get_rssi_sample(enter_info)
lifetime = enter_info.get('lifetime')
self.listener.on_enter_triggered(node_ref, ts, rssi, lifetime)
def exit_handler(self, client, userdata, msg):
node_ref = self._get_node_ref_from_topic(msg.topic)
if node_ref:
exit_info = json.loads(msg.payload.decode('utf-8'))
ts, rssi = get_rssi_sample(exit_info)
lifetime = exit_info.get('lifetime')
self.listener.on_exit_triggered(node_ref, ts, rssi, lifetime)
def pass_handler(self, client, userdata, msg):
node_ref = self._get_node_ref_from_topic(msg.topic)
if node_ref:
pass_info = json.loads(msg.payload.decode('utf-8'))
if pass_info['source'] == 'realtime':
lap_source = BaseHardwareInterface.LAP_SOURCE_REALTIME
elif pass_info['source'] == 'manual':
lap_source = BaseHardwareInterface.LAP_SOURCE_MANUAL
else:
lap_source = None
if lap_source is not None:
ts, rssi = get_rssi_sample(pass_info)
self.listener.on_pass(node_ref, ts, lap_source, rssi)
def sample_handler(self, client, userdata, msg):
node_ref = self._get_node_ref_from_topic(msg.topic)
if node_ref:
sample_info = json.loads(msg.payload.decode('utf-8'))
if 'rssi' in sample_info:
ts, rssi = get_rssi_sample(sample_info)
self.listener.on_rssi_sample(node_ref, ts, rssi)
elif 'lifetime' in sample_info:
ts, lifetime = get_lifetime_sample(sample_info)
self.listener.on_lifetime_sample(node_ref, ts, lifetime)
def history_handler(self, client, userdata, msg):
node_ref = self._get_node_ref_from_topic(msg.topic)
if node_ref:
history_info = json.loads(msg.payload.decode('utf-8'))
ts = int(history_info['timestamp'])
rssi = int(history_info['rssi'])
duration = int(history_info['duration'])
self.listener.on_extremum_history(node_ref, ts, rssi, duration)
def set_frequency_handler(self, client, userdata, msg):
node_ref = self._get_node_ref_from_topic(msg.topic)
if node_ref:
try:
if msg.payload:
freq_bandChannel = msg.payload.decode('utf-8').split(',')
freq = int(freq_bandChannel[0])
if len(freq_bandChannel) >= 2:
bandChannel = freq_bandChannel[1]
band = bandChannel[0]
channel = int(bandChannel[1])
else:
band = None
channel = None
self.listener.on_frequency_changed(node_ref, freq, band, channel)
else:
self.listener.on_frequency_changed(node_ref, 0)
except:
logger.warning('Invalid frequency message')
def set_bandChannel_handler(self, client, userdata, msg):
node_ref = self._get_node_ref_from_topic(msg.topic)
if node_ref:
if msg.payload:
bandChannel = msg.payload.decode('utf-8')
if bandChannel in FREQS:
freq = FREQS[bandChannel]
band = bandChannel[0]
channel = int(bandChannel[1])
self.listener.on_frequency_changed(node_ref, freq, band, channel)
else:
self.listener.on_frequency_changed(node_ref, RESET_FREQUENCY)
def set_enter_handler(self, client, userdata, msg):
node_ref = self._get_node_ref_from_topic(msg.topic)
if node_ref:
try:
level = int(msg.payload.decode('utf-8'))
self.listener.on_enter_trigger_changed(node_ref, level)
except:
logger.warning('Invalid enter trigger message')
def set_exit_handler(self, client, userdata, msg):
node_ref = self._get_node_ref_from_topic(msg.topic)
if node_ref:
try:
level = int(msg.payload.decode('utf-8'))
self.listener.on_exit_trigger_changed(node_ref, level)
except:
logger.warning('Invalid exit trigger message')
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,035
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/rh/tools/plot_spi.py
|
import csv
import sys
from typing import List, Dict, Tuple
import matplotlib.pyplot as plt
pins: Dict[int,Tuple[List[int],List[int]]] = {}
with open(sys.argv[1]) as f:
reader = csv.reader(f)
header = next(reader)
for i, r in enumerate(reader):
t = int(r[0])
pin = int(r[1])
v = int(r[2])
if pin >= 18: # SPI pins
pin_data = pins.get(pin)
if not pin_data:
pin_data = ([], [])
pins[pin] = pin_data
pin_data[0].append(i)
pin_data[1].append(v)
# continually current state of other pins
if i > 0:
for other, pin_data in pins.items():
if other != pin:
pin_data[0].append(i)
pin_data[1].append(pin_data[1][-1])
for pin, pin_data in pins.items():
plt.plot(pin_data[0], pin_data[1], label=str(pin))
plt.legend(loc='upper right')
plt.show()
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,036
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/rh/app/race_explorer_core.py
|
import itertools
import json
from rh.app import RHRace
import numpy as np
from collections import namedtuple
UNCLASSIFIED = 'Unclassified'
RACE_FORMAT_FASTEST_CONSECUTIVE = 'fastest-consecutive'
RACE_FORMAT_MOST_LAPS_QUICKEST_TIME = 'most-laps-quickest-time'
LEADERBOARD_BEST = 'best'
LEADERBOARD_HEAT_POSITIONS = 'heatPositions'
def export_results(RHData):
event_name = RHData.get_option('eventName', '')
msgs = []
for race in RHData.get_savedRaceMetas():
race_id = race.id
round_idx = race.round_id - 1
heat_id = race.heat_id
heat = RHData.get_heat(heat_id)
stage_id = heat.stage_id
pilotraces = RHData.get_savedPilotRaces_by_savedRaceMeta(race.id)
for pilotrace in pilotraces:
pilot = RHData.get_pilot(pilotrace.pilot_id)
if pilot:
pilotlaps = RHData.get_savedRaceLaps_by_savedPilotRace(pilotrace.id)
laps = []
for lap_id,pilotlap in enumerate(pilotlaps):
laps.append({'lap': lap_id, 'timestamp': pilotlap.lap_time_stamp, 'location': 0, 'seat': pilotlap.node_index})
lapsplits = RHData.get_lapSplits_by_lap(race_id, pilotrace.node_index, lap_id)
for lapsplit in lapsplits:
laps.append({'lap': lap_id, 'timestamp': lapsplit.split_time_stamp, 'location': lapsplit.split_id+1, 'seat': lapsplit.node_index})
msg = {'event': event_name, 'stage': 'id:'+str(stage_id), 'round': round_idx, 'heat': 'id:'+str(heat_id), 'pilot': pilot.callsign, 'laps': laps}
msgs.append(msg)
return msgs
def pilot_results(msgs):
results = {'pilots': {}}
results_by_pilot = results['pilots']
for msg in msgs:
eventName = msg['event']
stageIdx = msg['stage']
roundIdx = msg['round']
heatIdx = msg['heat']
pilot = msg['pilot']
if pilot not in results_by_pilot:
results_by_pilot[pilot] = {'events': {}}
event_results = results_by_pilot[pilot]['events']
if eventName not in event_results:
event_results[eventName] = {'stages': {}}
event_stages = event_results[eventName]['stages']
if stageIdx not in event_stages:
event_stages[stageIdx] = {'heats': {}}
heats = event_stages[stageIdx]['heats']
if heatIdx not in heats:
heats[heatIdx] = {'rounds': []}
heat_rounds = heats[heatIdx]['rounds']
while roundIdx >= len(heat_rounds):
heat_rounds.append(None)
heat_round = heat_rounds[roundIdx]
if heat_round is None:
heat_round = {'laps': []}
heat_rounds[roundIdx] = heat_round
laps = heat_round['laps']
laps.extend(msg['laps'])
return results
def export_event(RHData):
pilots = {}
pilots_by_id = {}
for rhpilot in RHData.get_pilots():
pilot_data = {'name': rhpilot.name}
if rhpilot.data:
pilot_data.update(rhpilot.data)
pilots[rhpilot.callsign] = pilot_data
pilots_by_id[rhpilot.id] = rhpilot
race_formats = {'Free': {'start': 'first-pass', 'duration': 0}}
race_formats_by_id = {0: 'Free'}
for race_format in RHData.get_raceFormats():
race_formats[race_format.name] = export_race_format(race_format)
race_formats_by_id[race_format.id] = race_format.name
race_classes = {UNCLASSIFIED: {'description': "Default class"}}
race_classes_by_id = {0: UNCLASSIFIED}
for race_class in RHData.get_raceClasses():
race_format_name = race_formats_by_id[race_class.format_id]
race_classes[race_class.name] = {
'description': race_class.description,
'format': race_format_name,
'children': {child.name: {} for child in race_class.children}
}
race_classes_by_id[race_class.id] = race_class.name
seats = []
current_profile = RHData.get_optionInt('currentProfile')
profile = RHData.get_profile(current_profile)
freqs = json.loads(profile.frequencies)
for f_b_c in zip(freqs['f'], freqs['b'], freqs['c']):
fbc = {'frequency': f_b_c[0]}
if f_b_c[1] and f_b_c[2]:
fbc['bandChannel'] = f_b_c[1] + str(f_b_c[2])
seats.append(fbc)
event_formats = {}
event_classes = {}
stages = []
prev_stage_name = None
for heat_idx, rhheat in enumerate(RHData.get_heats()):
heat_seats = [None] * len(seats)
for heat_node in RHData.get_heatNodes_by_heat(rhheat.id):
if heat_node.node_index < len(heat_seats) and heat_node.pilot_id in pilots_by_id:
heat_seats[heat_node.node_index] = pilots_by_id[heat_node.pilot_id].callsign
race_name = rhheat.note if rhheat.note else 'Heat '+str(heat_idx+1)
race_class_name = race_classes_by_id[rhheat.class_id]
if race_class_name not in event_classes:
race_class = race_classes[race_class_name]
event_classes[race_class_name] = race_class
race_format_name = race_class.get('format')
if race_format_name is not None and race_format_name not in event_formats:
event_formats[race_format_name] = race_formats[race_format_name]
race = {
'id': str(rhheat.id),
'name': race_name,
'class': race_class_name,
'seats': heat_seats
}
stage_name = rhheat.stage.name
if stage_name != prev_stage_name:
races = []
stage = {'id': str(rhheat.stage_id), 'name': stage_name, 'heats': races}
if rhheat.stage.data:
stage.update(rhheat.stage.data)
stages.append(stage)
prev_stage_name = stage_name
races.append(race)
event_classes = RHData.get_optionJson('eventClasses', event_classes)
data = export_event_basic(RHData)
data.update({
'pilots': pilots,
'formats': event_formats,
'classes': event_classes,
'seats': seats,
'stages': stages
})
data.update(RHData.get_optionJson('eventMetadata', {}))
return data
def export_event_basic(RHData):
event_name = RHData.get_option('eventName', "")
event_desc = RHData.get_option('eventDescription', "")
event_url = RHData.get_option('eventURL', "")
data = {
'name': event_name,
'description': event_desc,
'url': event_url,
}
return data
def export_race_format(race_format):
start = 'start-line' if race_format.start_behavior == RHRace.StartBehavior.FIRST_LAP else 'first-pass'
consecutive_laps = 0
if race_format.win_condition == RHRace.WinCondition.FASTEST_3_CONSECUTIVE:
objective = RACE_FORMAT_FASTEST_CONSECUTIVE
consecutive_laps = 3
elif race_format.win_condition == RHRace.WinCondition.MOST_PROGRESS:
objective = RACE_FORMAT_MOST_LAPS_QUICKEST_TIME
else:
objective = None
json = {
'start': start,
'duration': race_format.race_time_sec + race_format.lap_grace_sec,
'objective': objective,
'maxLaps': race_format.number_laps_win
}
if consecutive_laps:
json['consecutiveLaps'] = consecutive_laps
return json
def import_event(data, rhserver):
event_name = data['name']
race_classes = data['classes'] if 'classes' in data else {}
seats = data['seats']
pilots = data['pilots']
stages = data['stages']
RHData = rhserver['RHDATA']
RHData.set_option('eventName', event_name)
RHData.set_optionJson('eventClasses', race_classes)
if 'description' in data:
RHData.set_option('eventDescription', data['description'])
if 'url' in data:
RHData.set_option('eventURL', data['url'])
event_metadata = {}
if 'date' in data:
event_metadata['date'] = data['date']
RHData.set_optionJson('eventMetadata', event_metadata)
profile_data = {'profile_name': event_name,
'frequencies': {'b': [s['bandChannel'][0] if 'bandChannel' in s else None for s in seats],
'c': [int(s['bandChannel'][1]) if 'bandChannel' in s else None for s in seats],
'f': [s['frequency'] for s in seats]
}
}
profile = RHData.upsert_profile(profile_data)
raceFormat_ids_by_name = {}
for rhraceformat in RHData.get_raceFormats():
raceFormat_ids_by_name[rhraceformat.name] = rhraceformat.id
raceClass_ids = {}
for rhraceclass in RHData.get_raceClasses():
raceClass_ids[rhraceclass.name] = rhraceclass.id
pilot_ids = {}
for rhpilot in RHData.get_pilots():
pilot_ids[rhpilot.callsign] = rhpilot.id
for race_class_name, race_class in race_classes.items():
raceClass_id = raceClass_ids.get(race_class_name)
if not raceClass_id:
class_data = {
'name': race_class_name
}
if 'description' in race_class:
class_data['description'] = race_class['description']
raceFormat_name = race_class.get('format')
if raceFormat_name in raceFormat_ids_by_name:
class_data['format_id'] = raceFormat_ids_by_name[raceFormat_name]
rhraceclass = RHData.add_raceClass(class_data)
raceClass_ids[race_class_name] = rhraceclass.id
for callsign, pilot in pilots.items():
pilot_id = pilot_ids.get(callsign)
if not pilot_id:
pilot_data = {
'callsign': callsign,
'name': pilot['name']
}
if 'url' in pilot:
pilot_data['url'] = pilot['url']
extra_data = {}
for extra_field in ['ifpvId', 'multigpId']:
if extra_field in pilot:
extra_data[extra_field] = pilot[extra_field]
if extra_data:
pilot_data['data'] = extra_data
rhpilot = RHData.add_pilot(pilot_data)
pilot_ids[callsign] = rhpilot.id
rhheats = RHData.get_heats()
h = 0
for stage in stages:
rhheat = None
for heat in stage['heats']:
if h < len(rhheats):
rhheat = rhheats[h]
heat_nodes = RHData.get_heatNodes_by_heat(rhheat.id)
for seat_index in range(len(heat_nodes), len(heat['seats'])):
RHData.add_heatNode(rhheat.id, seat_index)
for seat,callsign in enumerate(heat['seats']):
if callsign in pilot_ids:
heat_data = {'heat': rhheat.id, 'note': heat['name'], 'stage': stage['name'], 'node': seat, 'pilot': pilot_ids[callsign]}
heat_class = heat.get('class', UNCLASSIFIED)
if heat_class != UNCLASSIFIED:
heat_data['class'] = raceClass_ids[heat_class]
RHData.alter_heat(heat_data)
else:
heat_data = {'note': heat['name'], 'stage': stage['name']}
heat_class = heat.get('class', UNCLASSIFIED)
if heat_class != UNCLASSIFIED:
heat_data['class'] = raceClass_ids[heat_class]
heat_pilots = {}
for seat,callsign in enumerate(heat['seats']):
if callsign in pilot_ids:
heat_pilots[seat] = pilot_ids[callsign]
rhheat = RHData.add_heat(init=heat_data, initPilots=heat_pilots)
h += 1
if rhheat:
stage_data = {}
if 'type' in stage:
stage_data['type'] = stage['type']
if 'leaderboards' in stage:
stage_data['leaderboards'] = stage['leaderboards']
rhheat.stage.data = stage_data
for i in range(len(rhheats)-1, h-1, -1):
RHData.delete_heat(rhheats[i].id)
RHData.commit()
rhserver['on_set_profile']({'profile': profile.id})
rhserver['emit_pilot_data']()
rhserver['emit_heat_data']()
def calculate_metrics(results, event_data):
event_name = event_data['name']
for pilot_result in results['pilots'].values():
event_result = pilot_result['events'].get(event_name, {})
for stage_idx, stage_result in event_result['stages'].items():
stage_info = lookup_by_index_or_id(event_data['stages'], stage_idx)
stage_classes = set()
for heat_idx, heat_result in stage_result['heats'].items():
if stage_info:
heat_info = lookup_by_index_or_id(stage_info['heats'], heat_idx)
race_class_name = heat_info.get('class', UNCLASSIFIED)
heat_result['class'] = race_class_name
race_class = event_data['classes'].get(race_class_name, {})
stage_classes.add(race_class_name)
else:
race_class = {}
if race_class:
race_format = event_data['formats'].get(race_class.get('format', ''))
else:
race_format = {}
for race_result in heat_result['rounds']:
race_metrics = calculate_race_metrics(race_result, race_format)
race_result['metrics'] = race_metrics
heat_result['metrics'] = aggregate_metrics([r['metrics'] for r in heat_result['rounds']], race_format)
stage_result['metrics'] = {}
stage_metrics = stage_result['metrics']
for race_class in stage_classes:
stage_metrics[race_class] = aggregate_metrics([h['metrics'] for h in stage_result['heats'].values() if h['class'] == race_class], race_format)
event_result['metrics'] = {}
event_metrics = event_result['metrics']
for race_class in event_data['classes']:
stage_metrics = [s['metrics'][race_class] for s in event_result['stages'].values() if race_class in s['metrics']]
event_metrics[race_class] = aggregate_metrics(stage_metrics, race_format)
return results
INTER_SEAT_LAP_THRESHOLD = 1000 # ms
def calculate_race_metrics(race, race_format):
Lap = namedtuple('Lap', ['timestamps', 'seats'])
laps_t = []
for lap in race['laps']:
if lap['location'] == 0:
ts = lap['timestamp']
seat = lap['seat']
if len(laps_t) > 0:
prev_lap_t = laps_t[-1]
if ts - prev_lap_t.timestamps[-1] < INTER_SEAT_LAP_THRESHOLD and seat not in prev_lap_t.seats:
# merge into previous lap
prev_lap_t.timestamps.append(ts)
prev_lap_t.seats[seat] = lap
lap = None
if lap is not None:
laps_t.append(Lap(timestamps=[ts], seats={seat: lap}))
lap_timestamps = [round(np.mean(lap_t.timestamps)) for lap_t in laps_t]
if race_format.get('start', 'first-pass') == 'start-line':
start_time = 0
else:
start_time = lap_timestamps[0] if lap_timestamps else None
lap_timestamps = lap_timestamps[1:]
lap_count = len(lap_timestamps)
race_time = lap_timestamps[-1] - start_time if lap_count else 0
lap_times = [lap_timestamps[i] - (lap_timestamps[i-1] if i-1 >= 0 else start_time) for i in range(len(lap_timestamps))]
metrics = {
'lapCount': lap_count,
'time': race_time,
'lapTimes': lap_times,
'fastest': np.min(lap_times) if lap_times else None,
'mean': round(np.mean(lap_times)) if lap_times else None,
'stdDev': round(np.std(lap_times)) if lap_times else None
}
if race_format.get('objective') == RACE_FORMAT_FASTEST_CONSECUTIVE:
n = race_format['consecutiveLaps']
metrics['fastest'+str(n)+'Consecutive'] = best_n_consecutive(lap_times, n)
return metrics
def aggregate_metrics(metrics, race_format):
lap_count = np.sum([r['lapCount'] for r in metrics])
race_time = np.sum([r['time'] for r in metrics])
lap_times = list(itertools.chain(*[r['lapTimes'] for r in metrics]))
agg_metrics = {
'lapCount': lap_count,
'time': race_time,
'lapTimes': lap_times,
'fastest': np.min(lap_times) if lap_times else None,
'mean': round(np.mean(lap_times)) if lap_times else None,
'stdDev': round(np.std(lap_times)) if lap_times else None
}
if race_format.get('objective') == RACE_FORMAT_FASTEST_CONSECUTIVE:
n = race_format['consecutiveLaps']
metric_name = 'fastest' + str(n) + 'Consecutive'
consecutive_totals = [np.sum(r[metric_name]) for r in metrics]
if consecutive_totals:
idx = np.argmin(consecutive_totals)
agg_metrics[metric_name] = metrics[idx][metric_name]
return agg_metrics
def best_n_consecutive(arr, n):
consecutive_totals = [np.sum(arr[i:i+n]) for i in range(len(arr)+1-n)]
if consecutive_totals:
idx = np.argmin(consecutive_totals)
return arr[idx:idx+n]
else:
return []
ID_PREFIX = 'id:'
def lookup_by_index_or_id(arr, key):
if isinstance(key, str) and key.startswith(ID_PREFIX):
entry_id = key[len(ID_PREFIX):]
matching_entries = [e for e in arr if e['id'] == entry_id]
return matching_entries[0] if matching_entries else []
else:
return arr[int(key)]
def calculate_leaderboard(results, event_data):
event_name = event_data['name']
for stage_idx, stage_info in enumerate(event_data['stages']):
stage_id = ID_PREFIX + stage_info['id'] if 'id' in stage_info else stage_idx
for heat_idx, heat_info in enumerate(stage_info['heats']):
heat_id = ID_PREFIX + heat_info['id'] if 'id' in heat_info else heat_idx
race_class_name = heat_info['class']
race_class_info = event_data['classes'].get(race_class_name)
if race_class_info and 'formats' in event_data:
race_format = event_data['formats'].get(race_class_info.get('format'))
else:
race_format = {}
heat_psrs = []
for pilot in heat_info['seats']:
pilot_results = results['pilots'].get(pilot)
if pilot_results:
pilot_stages = pilot_results['events'][event_name]['stages']
if stage_id in pilot_stages:
pilot_heats = pilot_stages[stage_id]['heats']
if heat_id in pilot_heats:
metrics = pilot_heats[heat_id]['metrics']
heat_psrs.append(to_psr(pilot, metrics, race_format))
heat_info['ranking'] = rank_psrs(heat_psrs)
stage_psrs_by_class = {}
for pilot, pilot_result in results['pilots'].items():
pilot_stages = pilot_result['events'][event_name]['stages']
if stage_id in pilot_stages:
stage_metrics_by_class = pilot_stages[stage_id]['metrics']
for race_class_name, metrics in stage_metrics_by_class.items():
race_class_info = event_data['classes'].get(race_class_name)
if race_class_info and 'formats' in event_data:
race_format = event_data['formats'].get(race_class_info.get('format'))
else:
race_format = {}
class_psrs = stage_psrs_by_class.get(race_class_name)
if not class_psrs:
class_psrs = []
stage_psrs_by_class[race_class_name] = class_psrs
class_psrs.append(to_psr(pilot, metrics, race_format))
if not stage_info.get('leaderboards'):
# default if no leaderboard config is present
stage_info['leaderboards'] = {race_class_name: {'method': LEADERBOARD_BEST} for race_class_name in stage_psrs_by_class.keys()}
stage_leaderboards = stage_info['leaderboards']
race_classes_by_name = {}
for race_class_name, race_class in event_data['classes'].items():
race_classes_by_name[race_class_name] = race_class
for parent_race_class_name, leaderboard in stage_leaderboards.items():
race_class_names = []
q = []
q.append(parent_race_class_name)
while q:
race_class_name = q.pop()
race_class_names.append(race_class_name)
race_class = race_classes_by_name.get(race_class_name)
if race_class and 'children' in race_class:
q.extend(race_class['children'].keys())
method = leaderboard['method']
if method == LEADERBOARD_BEST:
stage_psrs = []
for race_class_name in race_class_names:
stage_psrs.extend(stage_psrs_by_class.get(race_class_name, []))
leaderboard['ranking'] = rank_psrs(stage_psrs)
elif method == LEADERBOARD_HEAT_POSITIONS:
heat_positions = leaderboard['heatPositions']
n = max([heat_pos[0] for heat_pos in heat_positions])
races = get_previous_n_races(event_data['stages'], stage_idx+1, race_class_names, n)
ranking = []
for heat_pos in heat_positions:
pilot_result = races[heat_pos[0]-1]['ranking'][heat_pos[1]-1]
ranking.append({'pilot': pilot_result['pilot']})
leaderboard['ranking'] = ranking
else:
raise ValueError("Unsupported method: " + method)
return event_data
def get_previous_n_races(stages, stage_idx, race_class_names, n):
if stage_idx-1 < 0 or stage_idx-1 >= len(stages):
return None
races = [None] * n
for i in range(stage_idx-1, -1, -1):
stage = stages[i]
for heat in reversed(stage['heats']):
if heat.get('class') in race_class_names:
races[n-1] = heat
n -= 1
if n == 0:
return races
return None
def to_psr(pilot, metrics, race_format):
race_objective = race_format.get('objective', RACE_FORMAT_MOST_LAPS_QUICKEST_TIME)
if race_objective == RACE_FORMAT_MOST_LAPS_QUICKEST_TIME:
score = (-metrics['lapCount'], metrics['time'])
result = (metrics['lapCount'], metrics['time'])
elif race_objective == RACE_FORMAT_FASTEST_CONSECUTIVE:
n = race_format['consecutiveLaps']
metric_name = 'fastest' + str(n) + 'Consecutive'
score = metrics[metric_name]
result = score
else:
raise ValueError("Unsupported objective: " + race_format['objective'])
return pilot, score, result
def rank_psrs(psrs):
psrs.sort(key=lambda psr: psr[1])
return list(map(lambda psr: {'pilot': psr[0], 'result': psr[2]}, psrs))
def export_leaderboard(RHData):
msgs = export_results(RHData)
results = pilot_results(msgs)
event_data = export_event(RHData)
results = calculate_metrics(results, event_data)
leaderboard = calculate_leaderboard(results, event_data)
return leaderboard
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,037
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/rh/endpoints/ota_endpoints.py
|
import logging
from pathlib import Path
from flask import request, send_file
from flask.blueprints import Blueprint
logger = logging.getLogger(__name__)
def createBlueprint():
APP = Blueprint('ota', __name__)
@APP.route('/ota/')
def ota_upgrade():
user_agent = request.headers.get('User-Agent')
if user_agent == 'ESP32-http-Update':
firmware_path = 'build_esp32/rhnode.bin'
provided_version = request.headers['X-Esp32-Version']
elif user_agent == 'ESP8266-http-Update':
firmware_path = 'build_esp8266/rhnode.bin'
provided_version = request.headers['x-ESP8266-version']
else:
return "", 501
current_version = None
config_file = Path("node")/Path(firmware_path).parent/'sketch/config.h'
with open(config_file, 'rt') as f:
for line in f:
if line.startswith("#define FIRMWARE_VERSION"):
current_version = line.split(' ')[-1][1:-2]
break
if not current_version:
raise Exception("Could not find FIRMWARE_VERSION in {}".format(config_file))
if float(current_version[1:]) > float(provided_version[1:]):
bin_path = Path("../node")/firmware_path
logger.info("OTA upgrade from {} to {} using {}".format(provided_version, current_version, bin_path))
return send_file(bin_path, mimetype='application/octet-stream')
else:
logger.info("No OTA upgrade available for {} (available {})".format(provided_version, current_version))
return "", 304
return APP
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,038
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/rh/helpers/i2c_helper.py
|
'''RotorHazard I2C interface layer.'''
from . import i2c_url
import gevent.lock
import os
import logging
from time import perf_counter
I2C_CHILL_TIME = float(os.environ.get('RH_I2C_SLEEP', '0.015')) # Delay after i2c read/write
logger = logging.getLogger(__name__)
class I2CBus(object):
def __init__(self, bus):
self.id = bus
self.i2c_rlock_obj = gevent.lock.RLock() # for limiting i2c to 1 read/write at a time
self.i2c_timestamp = -1
def url_of(self, addr):
return i2c_url(self.id, addr)
def i2c_end(self):
self.i2c_timestamp = perf_counter()
def i2c_sleep(self):
if self.i2c_timestamp == -1:
return
time_remaining = self.i2c_timestamp + I2C_CHILL_TIME - perf_counter()
if (time_remaining > 0):
gevent.sleep(time_remaining)
def with_i2c(self, callback):
val = None
if callable(callback):
with self.i2c_rlock_obj:
self.i2c_sleep()
try:
val = callback()
finally:
self.i2c_end()
return val
def with_i2c_quietly(self, callback):
try:
self.with_i2c(callback)
except IOError as err:
logger.info('I2C error: {0}'.format(err))
def create(rhconfig):
bus_ids = rhconfig.HARDWARE['I2C_BUSES']
buses = []
for bus_id in bus_ids:
logger.debug('Starting I2C on bus {0}'.format(bus_id))
buses.append(I2CBus(bus_id))
return buses
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,039
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/rh/app/RHRace.py
|
'''Class to hold race management variables.'''
from enum import IntEnum
from rh.util import RHUtils
from typing import Dict
class RHRace():
'''Class to hold race management variables.'''
def __init__(self):
# setup/options
self._num_nodes = 0
self.current_heat = 1 # heat ID
self.current_round = 1
self.current_stage = None
self.node_pilots: Dict[int,RHPilot] = {} # current race pilots, by node, filled on heat change
self._format = None # raceformat object
# sequence
self.scheduled = False # Whether to start a race when time
self.scheduled_time = 0 # Start race when time reaches this value
self.start_token = False # Check start thread matches correct stage sequence
# status
self.race_status = RaceStatus.READY
self.timer_running = False
self.start_time = 0 # datetime
self.start_time_ms = 0 # monotonic
self.start_time_epoch_ms = 0 # ms since 1970-01-01
self.start_time_delay_secs = 0 # random-length race-start delay
self.node_passes = {} # current race lap objects, by node
self.node_splits = {}
self.node_has_finished = {}
self.any_races_started = False
# concluded
self.finish_time_ms = 0 # Monotonic, updated when race finishes
self.finish_time_epoch_ms = 0 # ms since 1970-01-01
self.end_time_ms = 0 # Monotonic, updated when race is stopped
self.end_time_epoch_ms = 0 # ms since 1970-01-01
# leaderboard/cache
self.result_fn = lambda current_race: None
self.team_result_fn = lambda current_race: None
self.status_message = '' # Race status message (winner, team info)
self.win_status = WinStatus.NONE # whether race is won
self.modification_count = 0
'''
Lap Object (dict) for node_laps:
lap_number
lap_time_stamp
lap_time
lap_time_formatted
source
deleted
'''
@property
def num_nodes(self):
return self._num_nodes
@num_nodes.setter
def num_nodes(self, new_value):
self._num_nodes = new_value
self.reset()
@property
def format(self):
return self._format
@format.setter
def format(self, new_race_format):
self._format = new_race_format
self.modification_count += 1
@property
def results(self):
return self.result_fn(self)
@property
def team_results(self):
return self.team_result_fn(self)
def reset(self):
self.node_passes = {idx: [] for idx in range(self._num_nodes)}
self.node_splits = {idx: [] for idx in range(self._num_nodes)}
self.modification_count += 1
def set_current_pilots(self, rhdata):
self.node_pilots = {}
for idx in range(self.num_nodes):
self.node_pilots[idx] = None
for heatNode in rhdata.get_heatNodes_by_heat(self.current_heat):
if heatNode.pilot_id != RHUtils.PILOT_ID_NONE:
db_pilot = rhdata.get_pilot(heatNode.pilot_id)
self.node_pilots[heatNode.node_index] = RHPilot(db_pilot)
self.modification_count += 1
def init_node_finished_flags(self, heatNodes):
self.node_has_finished = {}
for heatNode in heatNodes:
if heatNode.node_index < self.num_nodes:
if heatNode.pilot_id != RHUtils.PILOT_ID_NONE:
self.node_has_finished[heatNode.node_index] = False
else:
self.node_has_finished[heatNode.node_index] = None
def set_node_finished_flag(self, node_index):
self.node_has_finished[node_index] = True
def get_node_finished_flag(self, node_index):
return self.node_has_finished.get(node_index, None)
def check_all_nodes_finished(self):
return False not in self.node_has_finished.values()
def add_new_pass(self, node_index, lap_data):
self.node_passes[node_index].append(lap_data)
self.modification_count += 1
def get_valid_laps(self, late_lap_flag=False):
# return valid (non-deleted) lap objects
filtered = {}
if not late_lap_flag:
for node_index in self.node_passes:
filtered[node_index] = list(filter(lambda lap : lap['deleted'] == False, self.node_passes[node_index]))
else:
for node_index in self.node_passes:
filtered[node_index] = list(filter(lambda lap : \
(lap['deleted'] == False or lap.get('late_lap', False)), self.node_passes[node_index]))
return filtered
def any_laps_recorded(self):
for node_index in range(self._num_nodes):
if len(self.node_passes[node_index]) > 0:
return True
return False
RACE_START_DELAY_EXTRA_SECS = 0.9 # amount of extra time added to prestage time
class RHPilot:
def __init__(self, db_pilot):
self._id = db_pilot.id
self._name = db_pilot.name
self._callsign = db_pilot.callsign
self._team = db_pilot.team
self._phonetic = db_pilot.phonetic if db_pilot.phonetic else db_pilot.callsign
@property
def id(self):
return self._id
@property
def name(self):
return self._name
@property
def callsign(self):
return self._callsign
@property
def phonetic(self):
return self._phonetic
@property
def team(self):
return self._team
class RaceMode(IntEnum):
FIXED_TIME = 0
NO_TIME_LIMIT = 1
class StartBehavior(IntEnum):
HOLESHOT = 0
FIRST_LAP = 1
STAGGERED = 2
class StagingTones(IntEnum):
TONES_NONE = 0
TONES_ONE = 1
TONES_ALL = 2
TONES_3_2_1 = 3
class MinLapBehavior(IntEnum):
HIGHLIGHT_SHORT_LAPS = 0
DISCARD_SHORT_LAPS = 1
class WinCondition(IntEnum):
NONE = 0
MOST_PROGRESS = 1 # most laps in fastest time
FIRST_TO_LAP_X = 2
FASTEST_LAP = 3
FASTEST_3_CONSECUTIVE = 4
MOST_LAPS = 5 # lap count only
MOST_LAPS_OVERTIME = 6 # lap count only, laps and time after T=0
class WinStatus:
NONE = 0
TIE = 1
PENDING_CROSSING = 2
DECLARED = 3
OVERTIME = 4
class RaceStatus:
READY = 0
STAGING = 3
RACING = 1
DONE = 2
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,040
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/rh/tools/rssi_dump.py
|
import gevent.monkey
gevent.monkey.patch_all()
import logging
import sys
from . import get_interface
from rh.interface import RHInterface
def start(port, freq, write_buffer):
INTERFACE = get_interface(port)
for node in INTERFACE.nodes:
INTERFACE.set_mode(node.index, RHInterface.RSSI_HISTORY_MODE)
INTERFACE.set_frequency(node.index, freq)
count = 1
dataBuffer = []
try:
while True:
gevent.sleep(0.1)
for node in INTERFACE.nodes:
data = INTERFACE.read_rssi_history(node.index)
if data is not None and len(data) > 0:
for rssi in data:
if rssi == 0xFF: # end of data or no data
if len(dataBuffer) > 0:
filename = "rssi_dump_{}.csv".format(count)
write_buffer(filename, dataBuffer)
dataBuffer = []
count += 1
else:
dataBuffer.append(rssi)
except:
filename = 'rssi_dump.csv'
write_buffer(filename, dataBuffer)
INTERFACE.close()
raise
def write_buffer(filename, buf):
with open(filename, 'w') as f:
for v in buf:
f.write('{}\n'.format(v))
print("Written {} ({})".format(filename, len(buf)))
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
if len(sys.argv) < 3:
print('Please specify a serial port, e.g. COM12 (or I2C address, e.g. i2c:1/0x08, or socket port, e.g. :5005), and a frequency.')
exit()
port = sys.argv[1]
freq = int(sys.argv[2])
start(port, freq, write_buffer)
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,041
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/rh/orgs/multigp_org.py
|
import re as regex
import requests
TIMEOUT = 5
class MultiGP:
def __init__(self, api_key):
self.api_key = api_key
def is_pilot_url(self, url):
matches = regex.match('https://([a-z]+)\.multigp\.com/pilots/view/\?pilot=(.*)', url)
if matches:
return (matches.group(1), matches.group(2))
else:
return None
def get_pilot_data(self, url, pilot_id):
callsign = pilot_id[1]
pilot_data = {}
pilot_data['callsign'] = callsign
host = pilot_id[0] + '.multigp.com'
profile_url = 'https://' + host + '/mgp/user/view/' + callsign
# bypass CORS
headers = {'Referer': url, 'Host':host, 'X-Requested-With': 'XMLHttpRequest'}
resp = requests.get(profile_url, headers=headers, timeout=TIMEOUT)
logo_match = regex.search("<img id=\"profileImage\"(?:.*)(?=src)src=\"([^\"]*)\"", resp.text)
if logo_match:
pilot_data['logo'] = logo_match.group(1)
return pilot_data
def is_event_url(self, url):
matches = regex.match('https://([a-z]+)\.multigp\.com/mgp/multigpwebservice/race/view\?id=([0-9]+)', url)
if matches:
return (matches.group(1), matches.group(2))
else:
matches = regex.match('https://([a-z]+)\.multigp\.com/mgp/multigpwebservice/race/view/id/([0-9]+)', url)
if matches:
return (matches.group(1), matches.group(2))
else:
return None
def get_event_data(self, url, event_id):
if self.api_key:
data = {'apiKey': self.api_key}
resp = requests.post(url, json=data, timeout=TIMEOUT)
mgp_data = resp.json()
host = event_id[0] + '.multigp.com'
event_data = self.convert_multigp_json(mgp_data, host)
return event_data
else:
return {}
def convert_multigp_json(self, mgp_data, host):
data = mgp_data['data']
event_name = data['name']
event_date = data['startDate']
race_class_name = 'Open'
seats = []
pilots = {}
heats = []
for entry in data['entries']:
callsign = entry['userName']
name = entry['firstName'] + ' ' + entry['lastName']
pilots[callsign] = {'name': name, 'url': 'https://'+host+'/pilots/view/?pilot='+callsign, 'multigpId': entry['pilotId']}
freq = entry['frequency']
band = entry['band']
channel = entry['channel']
heat_idx = int(entry['group']) - 1
seat_idx = int(entry['groupSlot']) - 1
if heat_idx == 0:
while seat_idx >= len(seats):
seats.append(None)
seat = {'frequency': freq}
if band and channel:
seat['bandChannel'] = band+str(channel)
seats[seat_idx] = seat
while heat_idx >= len(heats):
heats.append(None)
heat = heats[heat_idx]
if not heat:
heat = {'name': 'Heat '+str(heat_idx+1),
'class': race_class_name,
'seats': []}
heats[heat_idx] = heat
heat_seats = heat['seats']
while seat_idx >= len(heat_seats):
heat_seats.append(None)
heat_seats[seat_idx] = callsign
event_data = {
'name': event_name,
'date': event_date,
'classes': {race_class_name: {}},
'seats': seats,
'pilots': pilots,
'stages': [
{'name': 'Qualifying',
'heats': heats}
]
}
return event_data
def upload_results(self, event_id, leaderboards):
if not self.api_key:
return
host = event_id[0] + '.multigp.com'
results_url = 'https://'+host+'/mgp/multigpwebservice/race/captureOverallRaceResult?id='+event_id[1]
final_stage_leaderboards = leaderboards['stages'][-1]['leaderboards']
if not final_stage_leaderboards:
return
final_leaderboard = next(iter(final_stage_leaderboards.values()))
pilots = leaderboards['pilots']
ranking = []
pos = 1
for entry in final_leaderboard['ranking']:
pilot = pilots[entry['pilot']]
pilot_id = pilot.get('multigpId')
if pilot_id is not None:
ranking.append({
'orderNumber': pos,
'pilotId': pilot_id
})
pos += 1
data = {
'apiKey': self.api_key,
'data': {
'raceId': event_id[1],
'bracketResults': ranking
}
}
resp = requests.post(results_url, json=data, timeout=TIMEOUT)
ok_data = resp.json()
return ok_data['status']
def discover(config, *args, **kwargs):
api_key = config.GENERAL.get('MULTIGP_API_KEY') if config else None
return [MultiGP(api_key)]
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,042
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/rh/interface/RHInterface.py
|
'''RotorHazard hardware interface layer.'''
import os
import gevent
import logging
from collections import deque
import json
import rh.interface.nodes as node_pkg
from rh.util.Plugins import Plugins
from . import pack_8, unpack_8, unpack_8_signed, pack_16, unpack_16, SampleHistory
from .BaseHardwareInterface import BaseHardwareInterface
from .Node import Node, NodeManager
from rh.util import Averager
DEFAULT_WARN_LOOP_TIME = 1500
DEFAULT_RECORD_BUFFER_SIZE = 10000
JSONL_RECORD_FORMAT = 'jsonl'
BINARY_RECORD_FORMAT = 'bin'
DEFAULT_RECORD_FORMAT = JSONL_RECORD_FORMAT
STATS_WINDOW_SIZE = 100
READ_ADDRESS = 0x01 # Gets i2c address of arduino (1 byte)
READ_MODE = 0x02
READ_FREQUENCY = 0x03 # Gets channel frequency (2 byte)
READ_RSSI = 0x04
READ_TIME_MILLIS = 0x05 # read current 'millis()' time value
READ_MULTINODE_COUNT = 0x06 # read # of nodes handled by processor
READ_CURNODE_INDEX = 0x07 # read index of current node for processor
READ_LAP_STATS = 0x08
READ_ENTER_STATS = 0x09
READ_EXIT_STATS = 0x10
READ_ENTER_AT_LEVEL = 0x11
READ_EXIT_AT_LEVEL = 0x12
READ_RHFEAT_FLAGS = 0x19 # read feature flags value
READ_REVISION_CODE = 0x1A # read NODE_API_LEVEL and verification value
READ_RSSI_STATS = 0x20
READ_ANALYTICS = 0x21
READ_RSSI_HISTORY = 0x22
READ_SCAN_HISTORY = 0x23
READ_ADC = 0x34
READ_FW_VERSION = 0x3C # read firmware version string
READ_FW_BUILDDATE = 0x3D # read firmware build date string
READ_FW_BUILDTIME = 0x3E # read firmware build time string
READ_FW_PROCTYPE = 0x3F # read node processor type
WRITE_MODE = 0x42
WRITE_FREQUENCY = 0x43 # Sets frequency (2 byte)
WRITE_CURNODE_INDEX = 0x47 # write index of current node for processor
WRITE_ENTER_AT_LEVEL = 0x51
WRITE_EXIT_AT_LEVEL = 0x52
SEND_STATUS_MESSAGE = 0x75 # send status message from server to node
FORCE_END_CROSSING = 0x78 # kill current crossing flag regardless of RSSI value
JUMP_TO_BOOTLOADER = 0x7E # jump to bootloader for flash update
TIMER_MODE = 0
SCANNER_MODE = 1
RSSI_HISTORY_MODE = 2
# upper-byte values for SEND_STATUS_MESSAGE payload (lower byte is data)
STATMSG_SDBUTTON_STATE = 0x01 # shutdown button state (1=pressed, 0=released)
STATMSG_SHUTDOWN_STARTED = 0x02 # system shutdown started
STATMSG_SERVER_IDLE = 0x03 # server-idle tick message
FW_TEXT_BLOCK_SIZE = 16 # length of data returned by 'READ_FW_...' fns
# prefix strings for finding text values in firmware '.bin' files
FW_VERSION_PREFIXSTR = "FIRMWARE_VERSION: "
FW_BUILDDATE_PREFIXSTR = "FIRMWARE_BUILDDATE: "
FW_BUILDTIME_PREFIXSTR = "FIRMWARE_BUILDTIME: "
FW_PROCTYPE_PREFIXSTR = "FIRMWARE_PROCTYPE: "
# features flags for value returned by READ_RHFEAT_FLAGS command
RHFEAT_STM32_MODE = 0x0004 # STM 32-bit processor running multiple nodes
RHFEAT_JUMPTO_BOOTLDR = 0x0008 # JUMP_TO_BOOTLOADER command supported
RHFEAT_IAP_FIRMWARE = 0x0010 # in-application programming of firmware supported
RHFEAT_PH = 0x0100
MIN_FREQ = 5645
MAX_FREQ = 5945
MIN_ADC_VALUE = 5 # reject nodes with ADC readings below this value
logger = logging.getLogger(__name__)
def unpack_rssi(node, data):
return unpack_8(data)
def unpack_time_since(node, cmd, data):
ms_since = unpack_16(data)
if ms_since >= 0xFFFF:
logger.warning("Command {:#04x}: maximum lookback time exceeded on node {}".format(cmd, node))
return ms_since
def has_data(data):
return data is not None and len(data) > 0
class RHNodeManager(NodeManager):
TYPE = "RH"
MAX_RETRY_COUNT = 2
def __init__(self):
super().__init__()
self.curr_multi_node_index = None
self.api_level = 0
self.max_rssi_value = 255
self.rhfeature_flags = 0
self.firmware_version_str = None
self.firmware_proctype_str = None
self.firmware_timestamp_str = None
def _create_node(self, index, multi_node_index):
node = RHNode(index, multi_node_index, self)
return node
def _select_one(self, node):
return True
def _select_multi(self, node):
if self.curr_multi_node_index != node.multi_node_index:
curr_select = self.select
self.select = self._select_one
self.curr_multi_node_index = node.set_and_validate_value_8(WRITE_CURNODE_INDEX, READ_CURNODE_INDEX, node.multi_node_slot_index)
self.select = curr_select
return self.curr_multi_node_index == node.multi_node_index
def read_revision_code(self):
self.api_level = 0
try:
rev_code = self.get_value_16(READ_REVISION_CODE, RHNodeManager.MAX_RETRY_COUNT)
# check verification code
if rev_code and (rev_code >> 8) == 0x25:
self.api_level = rev_code & 0xFF
except Exception:
logger.exception('Error fetching READ_REVISION_CODE from {}'.format(self.addr))
return self.api_level
def read_address(self):
node_addr = None
try:
node_addr = self.get_value_8(READ_ADDRESS, RHNodeManager.MAX_RETRY_COUNT)
except Exception:
logger.exception('Error fetching READ_ADDRESS from {}'.format(self.addr))
return node_addr
def read_multinode_count(self):
multi_count = None
try:
multi_count = self.get_value_8(READ_MULTINODE_COUNT, RHNodeManager.MAX_RETRY_COUNT)
except Exception:
logger.exception('Error fetching READ_MULTINODE_COUNT from {}'.format(self.addr))
return multi_count
def read_feature_flags(self):
self.rhfeature_flags = 0
try:
self.rhfeature_flags = self.get_value_16(READ_RHFEAT_FLAGS, RHNodeManager.MAX_RETRY_COUNT)
except Exception:
logger.exception('Error fetching READ_RHFEAT_FLAGS from {}'.format(self.addr))
return self.rhfeature_flags
def read_firmware_version(self):
'''Reads firmware version string'''
self.firmware_version_str = None
try:
data = self.read_command(READ_FW_VERSION, FW_TEXT_BLOCK_SIZE, RHNodeManager.MAX_RETRY_COUNT)
self.firmware_version_str = bytearray(data).decode("utf-8").rstrip('\0') \
if data is not None else None
except Exception:
logger.exception('Error fetching READ_FW_VERSION from {}'.format(self.addr))
return self.firmware_version_str
def read_string(self, command, max_retries=MAX_RETRY_COUNT):
data = self.read_command(command, FW_TEXT_BLOCK_SIZE, max_retries)
return bytearray(data).decode("utf-8").rstrip('\0') \
if data is not None else None
def read_firmware_proctype(self):
'''Reads firmware processor-type string'''
self.firmware_proctype_str = None
try:
self.firmware_proctype_str = self.read_string(READ_FW_PROCTYPE, RHNodeManager.MAX_RETRY_COUNT)
except Exception:
logger.exception('Error fetching READ_FW_PROCTYPE from {}'.format(self.addr))
return self.firmware_proctype_str
def read_firmware_timestamp(self):
'''Reads firmware build date/time strings'''
self.firmware_timestamp_str = None
try:
data = self.read_string(READ_FW_BUILDDATE, RHNodeManager.MAX_RETRY_COUNT)
if data is not None:
self.firmware_timestamp_str = data
data = self.read_string(READ_FW_BUILDTIME, RHNodeManager.MAX_RETRY_COUNT)
if data is not None:
self.firmware_timestamp_str += " " + data
except Exception:
logger.exception('Error fetching READ_FW_DATE/TIME from {}'.format(self.addr))
return self.firmware_timestamp_str
def send_status_message(self, msgTypeVal, msgDataVal):
# send status message to node
try:
data = ((msgTypeVal & 0xFF) << 8) | (msgDataVal & 0xFF)
self.set_value_16(SEND_STATUS_MESSAGE, data)
return True
except Exception:
logger.exception('Error sending status message to {}'.format(self.addr))
return False
def discover_nodes(self, next_index):
self.read_revision_code()
if self.api_level >= 36:
self.max_rssi_value = 255
self.read_feature_flags()
multi_count = self.read_multinode_count()
if multi_count is None or multi_count > 32:
logger.error('Bad READ_MULTINODE_COUNT value {} fetched from {}'.format(multi_count, self.addr))
multi_count = 1
elif multi_count == 0:
logger.warning('Fetched READ_MULTINODE_COUNT value of zero from {} (no vrx modules detected)'.format(self.addr))
if multi_count > 0:
self.select = self._select_multi if multi_count > 1 else self._select_one
info_strs = ["API level={}".format(self.api_level)]
if self.read_firmware_version():
info_strs.append("fw version={}".format(self.firmware_version_str))
if self.read_firmware_proctype():
info_strs.append("fw type={}".format(self.firmware_proctype_str))
if self.read_firmware_timestamp():
info_strs.append("fw timestamp: {}".format(self.firmware_timestamp_str))
if multi_count == 1:
logger.info("Node found at {}: {}".format(self.addr, ', '.join(info_strs)))
else:
logger.info("Multi-node (with {} modules) found at {}: {}".format(multi_count, self.addr, ', '.join(info_strs)))
for slot in range(multi_count):
node = self.add_node(next_index)
node.multi_node_slot_index = slot
adc_val = node.read_adc()
if adc_val != 0xFFFF and adc_val > MIN_ADC_VALUE:
logger.info("Node {} (slot {}) added at {}".format(next_index+1, node.multi_node_slot_index+1, node.addr))
next_index += 1
else:
logger.info("Ignoring non-operational node (slot {} at {} reported an ADC value of {})".format(node.multi_node_slot_index+1, node.addr, adc_val))
self.remove_node(node)
return True
elif self.api_level > 0:
logger.error('Unsupported API level {} - please upgrade'.format(self.api_level))
return False
else:
logger.error('Unable to fetch revision code from {}'.format(self.addr))
return False
class RHNode(Node):
def __init__(self, index, multi_node_index, manager):
super().__init__(index, multi_node_index, manager)
self.lifetime_history = SampleHistory()
self.used_history_count = 0
self.empty_history_count = 0
self._loop_time_stats = Averager(STATS_WINDOW_SIZE)
self._roundtrip_stats = Averager(STATS_WINDOW_SIZE)
self.data_logger = None
@Node.loop_time.setter # type: ignore
def loop_time(self, v):
Node.loop_time.fset(self, v)
self._loop_time_stats.append(v)
def reset(self):
super().reset()
self._loop_time_stats.clear()
def read_adc(self):
adc_data = self.read_command(READ_ADC, 2)
return unpack_16(adc_data)
def read_rssi(self):
rssi_data = self.read_command(READ_RSSI, 3)
return unpack_rssi(self, rssi_data)
def read_frequency(self):
freq_data = self.read_command(READ_FREQUENCY, 2)
return unpack_16(freq_data)
def get_sent_time_ms(self):
server_roundtrip_ms = self.io_response_ms - self.io_request_ms
server_oneway_ms = round(server_roundtrip_ms / 2)
sent_timestamp_ms = self.io_response_ms - server_oneway_ms
return sent_timestamp_ms, server_roundtrip_ms
def unpack_rssi(self, data):
sent_timestamp_ms = None
node_rssi = None
lap_count = None
is_crossing = None
if has_data(data):
sent_timestamp_ms, _ = self.get_sent_time_ms()
rssi_val = unpack_rssi(self, data)
if self.is_valid_rssi(rssi_val):
node_rssi = rssi_val
lap_count = unpack_8(data[1:])
is_crossing = (unpack_8(data[2:]) == 1)
if self.data_logger is not None:
self.data_logger.data_buffer.append((
READ_RSSI,
data,
(node_rssi, lap_count, is_crossing)
))
return sent_timestamp_ms, node_rssi, lap_count, is_crossing
def unpack_rssi_stats(self, data):
peak_rssi = None
nadir_rssi = None
if has_data(data):
rssi_val = unpack_rssi(self, data)
if self.is_valid_rssi(rssi_val):
peak_rssi = rssi_val
rssi_val = unpack_rssi(self, data[1:])
if self.is_valid_rssi(rssi_val):
nadir_rssi = rssi_val
if self.data_logger is not None:
self.data_logger.data_buffer.append((
READ_RSSI_STATS,
data,
(peak_rssi, nadir_rssi)
))
return peak_rssi, nadir_rssi
def unpack_trigger_stats(self, cmd, data):
trigger_count = None
trigger_timestamp_ms = None
trigger_rssi = None
trigger_lifetime = None
if has_data(data):
sent_timestamp_ms, _ = self.get_sent_time_ms()
trigger_count = unpack_8(data)
ms_since_trigger = unpack_time_since(self, cmd, data[1:])
trigger_timestamp_ms = sent_timestamp_ms - ms_since_trigger
rssi_val = unpack_rssi(self, data[3:])
if self.is_valid_rssi(rssi_val):
trigger_rssi = rssi_val
trigger_lifetime = unpack_8(data[4:])
if self.data_logger is not None:
self.data_logger.data_buffer.append((
cmd,
data,
(trigger_count, ms_since_trigger, trigger_rssi, trigger_lifetime)
))
return trigger_count, trigger_timestamp_ms, trigger_rssi, trigger_lifetime
def unpack_lap_stats(self, data):
lap_count = None
lap_timestamp_ms = None
lap_peak_rssi = None
lap_nadir_rssi = None
if has_data(data):
sent_timestamp_ms, server_roundtrip_ms = self.get_sent_time_ms()
self._roundtrip_stats.append(server_roundtrip_ms)
lap_count = unpack_8(data)
ms_since_lap = unpack_time_since(self, READ_LAP_STATS, data[1:])
lap_timestamp_ms = sent_timestamp_ms - ms_since_lap
rssi_val = unpack_rssi(self, data[3:])
if self.is_valid_rssi(rssi_val):
lap_peak_rssi = rssi_val
rssi_val = unpack_rssi(self, data[4:])
if self.is_valid_rssi(rssi_val):
lap_nadir_rssi = rssi_val
if self.data_logger is not None:
self.data_logger.data_buffer.append((
READ_LAP_STATS,
data,
(lap_count, ms_since_lap, lap_peak_rssi, lap_nadir_rssi)
))
return lap_count, lap_timestamp_ms, lap_peak_rssi, lap_nadir_rssi
def unpack_analytics(self, data):
sent_timestamp_ms = None
lifetime = None
loop_time = None
extremum_rssi = None
extremum_timestamp_ms = None
extremum_duration_ms = None
if has_data(data):
sent_timestamp_ms, _ = self.get_sent_time_ms()
lifetime = unpack_8_signed(data)
loop_time = unpack_16(data[1:])
rssi_val = unpack_rssi(self, data[3:])
if self.is_valid_rssi(rssi_val):
extremum_rssi = rssi_val
self.used_history_count += 1
else:
self.empty_history_count += 1
ms_since_first_time = unpack_time_since(self, READ_ANALYTICS, data[4:]) # ms *since* the first time
extremum_timestamp_ms = sent_timestamp_ms - ms_since_first_time
extremum_duration_ms = unpack_16(data[6:])
if self.data_logger is not None:
self.data_logger.data_buffer.append((
READ_ANALYTICS,
data,
(lifetime, loop_time, extremum_rssi, ms_since_first_time, extremum_duration_ms)
))
return sent_timestamp_ms, lifetime, loop_time, extremum_rssi, extremum_timestamp_ms, extremum_duration_ms
def poll_command(self, command, size):
# as we are continually polling, no need to retry command
return self.read_command(command, size, max_retries=0, log_level=logging.DEBUG)
def summary_stats(self):
msg = ["Node {}".format(self)]
msg.append("\tComm round-trip (ms): {}".format(self._roundtrip_stats.formatted(1)))
msg.append("\tLoop time (us): {}".format(self._loop_time_stats.formatted(0)))
total_count = self.used_history_count + self.empty_history_count
msg.append("\tRSSI history buffering utilisation: {:.2%}".format(self.used_history_count/total_count if total_count > 0 else 0))
logger.debug('\n'.join(msg))
class RHInterface(BaseHardwareInterface):
def __init__(self, *args, **kwargs):
super().__init__()
self.update_count = 0
self.warn_loop_time = kwargs['warn_loop_time'] if 'warn_loop_time' in kwargs else DEFAULT_WARN_LOOP_TIME
self.FW_TEXT_BLOCK_SIZE = FW_TEXT_BLOCK_SIZE
self.FW_VERSION_PREFIXSTR = FW_VERSION_PREFIXSTR
self.FW_BUILDDATE_PREFIXSTR = FW_BUILDDATE_PREFIXSTR
self.FW_BUILDTIME_PREFIXSTR = FW_BUILDTIME_PREFIXSTR
self.FW_PROCTYPE_PREFIXSTR = FW_PROCTYPE_PREFIXSTR
self.fwupd_serial_port = None # serial port for in-app update of node firmware
self.node_managers = Plugins(suffix='node')
self.discover_nodes(*args, **kwargs)
self.data_logger_buffer_size = int(os.environ.get('RH_RECORD_BUFFER', DEFAULT_RECORD_BUFFER_SIZE))
self.data_logger_format = os.environ.get('RH_RECORD_FORMAT', DEFAULT_RECORD_FORMAT)
for node in self.nodes:
node.frequency = node.get_value_16(READ_FREQUENCY)
if not node.frequency:
raise RuntimeError('Unable to read frequency value from node {0}'.format(node))
if node.manager.api_level >= 36:
rssi_stats_data = node.read_command(READ_RSSI_STATS, 2)
node.unpack_rssi_stats(rssi_stats_data)
node.enter_at_level = self.get_value_rssi(node, READ_ENTER_AT_LEVEL)
node.exit_at_level = self.get_value_rssi(node, READ_EXIT_AT_LEVEL)
logger.debug("Node {}: Freq={}, EnterAt={}, ExitAt={}".format(\
node, node.frequency, node.enter_at_level, node.exit_at_level))
else:
logger.warning("Node {} has obsolete API_level ({})".format(node, node.manager.api_level))
for node_manager in self.node_managers:
if node_manager.rhfeature_flags:
# if first node manager supports in-app fw update then save port name
if (not self.fwupd_serial_port) and hasattr(node_manager, 'serial_io') and \
(node_manager.rhfeature_flags & (RHFEAT_STM32_MODE|RHFEAT_IAP_FIRMWARE)) != 0:
self.fwupd_serial_port = node_manager.serial_io.name
break
def discover_nodes(self, *args, **kwargs):
self.node_managers.discover(node_pkg, includeOffset=True, *args, **kwargs)
for manager in self.node_managers:
self.nodes.extend(manager.nodes)
def start(self):
for node in self.nodes:
if "RH_RECORD_NODE_{0}".format(node.index+1) in os.environ:
self.start_data_logger(node.index)
super().start()
def stop(self):
super().stop()
for node in self.nodes:
self.stop_data_logger(node.index)
def start_data_logger(self, node_index):
node = self.nodes[node_index]
if node.data_logger is None:
file_format = 'b' if self.data_logger_format == BINARY_RECORD_FORMAT else 't'
f = open("node_data_{}.{}".format(node.index+1, self.data_logger_format), 'a'+file_format)
logger.info("Data logging started for node {0} ({1})".format(node, f.name))
f.data_buffer = deque([], self.data_logger_buffer_size)
node.data_logger = f
def stop_data_logger(self, node_index):
node = self.nodes[node_index]
f = node.data_logger
if f is not None:
self._flush_data_logger(f, True)
f.close()
logger.info("Stopped data logging for node {0} ({1})".format(node, f.name))
node.data_logger = None
def _flush_data_logger(self, f, force=False):
buf = f.data_buffer
if force or len(buf) > buf.maxlen:
for r in buf:
r_cmd, r_bytes, r_values = r
if self.data_logger_format == BINARY_RECORD_FORMAT:
f.write(r_cmd)
f.write(len(r_bytes))
f.write(r_bytes)
else:
f.write(json.dumps({'cmd': r_cmd, 'data': r_values})+'\n')
buf.clear()
#
# Update Loop
#
def _update(self):
node_sleep_interval = self.update_sleep/max(len(self.nodes), 1)
if self.nodes:
rssi_stats_node_idx = self.update_count % len(self.nodes)
for node in self.nodes:
if node.scan_enabled and callable(self.read_scan_history):
freqs, rssis = self.read_scan_history(node.index)
for freq, rssi in zip(freqs, rssis):
node.scan_data[freq] = rssi
elif node.frequency:
rssi_data = node.poll_command(READ_RSSI, 3)
timestamp, rssi, pass_count, is_crossing = node.unpack_rssi(rssi_data)
if timestamp is not None and rssi is not None and pass_count is not None and is_crossing is not None:
has_new_lap, has_entered, has_exited = self.is_new_lap(node, timestamp, rssi, pass_count, is_crossing)
if has_entered:
cmd = READ_ENTER_STATS
crossing_data = node.read_command(cmd, 5)
trigger_count, trigger_timestamp, trigger_rssi, trigger_lifetime = node.unpack_trigger_stats(cmd, crossing_data)
if trigger_count is not None and trigger_timestamp is not None and trigger_rssi is not None and trigger_lifetime is not None:
self.process_enter_trigger(node, trigger_count, trigger_timestamp, trigger_rssi, trigger_lifetime)
if has_exited:
cmd = READ_EXIT_STATS
crossing_data = node.read_command(cmd, 5)
trigger_count, trigger_timestamp, trigger_rssi, trigger_lifetime = node.unpack_trigger_stats(cmd, crossing_data)
if trigger_count is not None and trigger_timestamp is not None and trigger_rssi is not None and trigger_lifetime is not None:
self.process_exit_trigger(node, trigger_count, trigger_timestamp, trigger_rssi, trigger_lifetime)
if has_new_lap:
lap_stats_data = node.read_command(READ_LAP_STATS, 5)
lap_count, pass_timestamp, pass_peak_rssi, pass_nadir_rssi = node.unpack_lap_stats(lap_stats_data)
if lap_count is not None and pass_timestamp is not None:
self.process_lap_stats(node, lap_count, pass_timestamp, pass_peak_rssi, pass_nadir_rssi)
analytic_data = node.poll_command(READ_ANALYTICS, 8)
timestamp, lifetime, loop_time, extremum_rssi, extremum_timestamp, extremum_duration = node.unpack_analytics(analytic_data)
if timestamp is not None and lifetime is not None and loop_time is not None:
self.process_analytics(node, timestamp, lifetime, loop_time, extremum_rssi, extremum_timestamp, extremum_duration)
if node.index == rssi_stats_node_idx:
rssi_stats_data = node.poll_command(READ_RSSI_STATS, 2)
peak_rssi, nadir_rssi = node.unpack_rssi_stats(rssi_stats_data)
self.process_rssi_stats(node, peak_rssi, nadir_rssi)
self.process_capturing(node)
self._restore_lowered_thresholds(node)
if node.loop_time > self.warn_loop_time:
logger.warning("Abnormal loop time for node {}: {}us ({})".format(node, node.loop_time, node._loop_time_stats.formatted(0)))
if node.data_logger is not None:
self._flush_data_logger(node.data_logger)
# end mode specific code
gevent.sleep(node_sleep_interval)
# end for each node
self.update_count += 1
else:
gevent.sleep(node_sleep_interval)
#
# Internal helper functions for setting single values
#
def set_and_validate_value_rssi(self, node, write_command, read_command, in_value):
return node.set_and_validate_value_8(write_command, read_command, in_value)
def get_value_rssi(self, node, command):
return node.get_value_8(command)
def transmit_frequency(self, node, frequency):
return node.set_and_validate_value_16(
WRITE_FREQUENCY,
READ_FREQUENCY,
frequency, not_set_handler=self.frequency_not_set_handler)
def frequency_not_set_handler(self, in_freq, out_freq):
if out_freq == 0xFFFF:
if in_freq < MIN_FREQ:
return "Unsupported frequency: minimum is {}".format(MIN_FREQ)
elif in_freq > MAX_FREQ:
return "Unsupported frequency: maximum is {}".format(MAX_FREQ)
else:
return "Failed to set VRX register using SPI"
else:
return ""
def transmit_enter_at_level(self, node, level):
return self.set_and_validate_value_rssi(node,
WRITE_ENTER_AT_LEVEL,
READ_ENTER_AT_LEVEL,
level)
def transmit_exit_at_level(self, node, level):
return self.set_and_validate_value_rssi(node,
WRITE_EXIT_AT_LEVEL,
READ_EXIT_AT_LEVEL,
level)
#
# External functions for setting data
#
def set_mode(self, node_index, mode):
node = self.nodes[node_index]
node.mode = node.set_and_validate_value_8(
WRITE_MODE,
READ_MODE,
mode)
def set_frequency_scan(self, node_index, scan_enabled):
'''Frequency scanning protocol'''
node = self.nodes[node_index]
if scan_enabled != node.scan_enabled:
if scan_enabled:
node.scan_enabled = scan_enabled
# reset/clear data
node.scan_data = {}
self.set_mode(node_index, SCANNER_MODE)
else:
self.set_mode(node_index, TIMER_MODE)
# reset/clear data
node.scan_data = {}
# restore original frequency
original_freq = node.frequency
node.frequency = 0
self.set_frequency(node_index, original_freq)
node.scan_enabled = scan_enabled
def force_end_crossing(self, node_index):
node = self.nodes[node_index]
node.set_value_8(FORCE_END_CROSSING, 0)
def jump_to_bootloader(self):
for node_manager in self.node_managers:
if (node_manager.rhfeature_flags & RHFEAT_JUMPTO_BOOTLDR) != 0 and hasattr(node_manager, 'jump_to_bootloader'):
node_manager.jump_to_bootloader()
return
logger.info("Unable to find any nodes with jump-to-bootloader support")
def read_scan_history(self, node_index):
node = self.nodes[node_index]
data = node.read_command(READ_SCAN_HISTORY, 9)
freqs = []
rssis = []
if data is not None and len(data) > 0:
for i in range(0, len(data), 3):
freq = unpack_16(data[i:])
rssi = unpack_8(data[i+2:])
if freq > 0 and node.is_valid_rssi(rssi):
freqs.append(freq)
rssis.append(rssi)
return freqs, rssis
def read_rssi_history(self, node_index):
node = self.nodes[node_index]
return node.read_command(READ_RSSI_HISTORY, 16)
def send_status_message(self, msgTypeVal, msgDataVal):
sent_count = 0
for node_manager in self.node_managers:
if node_manager.send_status_message(msgTypeVal, msgDataVal):
sent_count += 1
return sent_count > 0
def send_shutdown_button_state(self, stateVal):
return self.send_status_message(STATMSG_SDBUTTON_STATE, stateVal)
def send_shutdown_started_message(self):
return self.send_status_message(STATMSG_SHUTDOWN_STARTED, 0)
def send_server_idle_message(self):
return self.send_status_message(STATMSG_SERVER_IDLE, 0)
def get_hardware_interface(*args, **kwargs):
'''Returns the RotorHazard interface object.'''
return RHInterface(*args, **kwargs)
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,043
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/rh/interface/nodes/socket_node.py
|
'''RotorHazard socket interface layer.'''
import logging
import socket
from rh.helpers import socket_url
from .. import RHInterface as rhi
logger = logging.getLogger(__name__)
class SocketNodeManager(rhi.RHNodeManager):
def __init__(self, socket_obj):
super().__init__()
self.socket_io = socket_obj
sock_addr = self.socket_io.getsockname()
self.addr = socket_url(sock_addr[0], sock_addr[1])
def _read_command(self, command, size):
self.socket_io.sendall(bytearray([command]))
data = bytearray()
remaining = size + 1
while remaining > 0:
partial = self.socket_io.recv(remaining)
remaining -= len(partial)
data.extend(partial)
return data
def _write_command(self, command, data):
data_with_cmd = bytearray()
data_with_cmd.append(command)
data_with_cmd.extend(data)
self.socket_io.sendall(data_with_cmd)
def close(self):
self.socket_io.close()
def discover(idxOffset, config, *args, **kwargs):
node_managers = []
config_sock_ports = getattr(config, 'SOCKET_PORTS', [])
if config_sock_ports:
next_index = idxOffset
for port in config_sock_ports:
with socket.socket() as server:
server.bind(('', port))
server.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
server.settimeout(5)
logger.info("Listening on {}".format(port))
server.listen()
try:
while True: # while server socket doesn't time-out
conn, client_addr = server.accept()
logger.info("Connection from {}:{}".format(client_addr[0], client_addr[1]))
conn.settimeout(2)
node_manager = SocketNodeManager(conn)
if node_manager.discover_nodes(next_index):
next_index += len(node_manager.nodes)
node_managers.append(node_manager)
else:
conn.close()
except socket.timeout:
pass
return node_managers
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,044
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/rh/leds/led_handler_character.py
|
'''LED visual effects'''
# to use this handler, run:
# sudo apt-get install libjpeg-dev
# sudo pip install pillow
from . import ColorVal, setPixels, stagingEffects
from rh.events.eventmanager import Evt
from rh.events.led_event_manager import LEDEffect, LEDEvent
from rh.app.RHRace import RaceStatus
import gevent
from PIL import Image, ImageFont, ImageDraw
FONT_PATH = 'rh/static/fonts'
def dataHandler(args):
if 'data' in args:
if args['data'] == 'staging':
args['time'] = 0
if 'hide_stage_timer' not in args or not args['hide_stage_timer']:
if 'pi_starts_at_ms' in args:
def effect_fn(diff_ms):
args['text'] = int(diff_ms/1000)
printCharacter(args)
stagingEffects(args['pi_starts_at_ms'], effect_fn)
else:
args['text'] = 'X'
printCharacter(args)
# standard methods
elif args['data'] == 'lap_number':
if args['lap']['lap_number'] > 0:
args['text'] = args['lap']['lap_number']
else:
return False
elif args['data'] == 'lap_time':
args['text'] = '{0:.1f}'.format(args['lap']['lap_time'] / 1000)
elif args['data'] == 'position':
for line in args['RACE'].results['by_race_time']:
if args['node_index'] == line['node']:
args['text'] = line['position']
break
elif args['data'] == 'heat_id':
args['text'] = args['heat_id']
elif args['data'] == 'message':
args['text'] = args['message']
printCharacter(args)
else:
return False
def printCharacter(args):
if 'strip' in args:
strip = args['strip']
else:
return False
if 'text' in args:
text = str(args['text'])
else:
return False
if 'color' in args:
color = convertColor(args['color'])
else:
color = convertColor(ColorVal.WHITE)
height = args['ledRows']
width = strip.numPixels() // height
im = Image.new('RGB', [width, height])
draw = ImageDraw.Draw(im)
use_small_flag = True
if height >= 16:
font = ImageFont.truetype(FONT_PATH+"/RotorHazardPanel16.ttf", 16)
w, h = font.getsize(text)
if w <= width - 1:
use_small_flag = False
h = 16
if use_small_flag:
font = ImageFont.truetype(FONT_PATH+"/RotorHazardPanel8.ttf", 8)
w, h = font.getsize(text)
h = 8
draw.text((int((width-w)/2) + 1, int((height-h)/2)), text, font=font, fill=(color))
img = im.rotate(90 * args['panelRotate'])
setPixels(strip, img, args['invertedPanelRows'])
strip.show()
def scrollText(args):
if 'strip' in args:
strip = args['strip']
else:
return False
if args['data'] == 'message':
text = str(args['message'])
elif args['data'] == 'lap_time':
text = str(args['lap']['lap_time_formatted'])
else:
return False
if 'color' in args:
color = convertColor(args['color'])
else:
color = convertColor(ColorVal.WHITE)
height = args['ledRows']
width = strip.numPixels() // height
im = Image.new('RGB', [width, height])
draw = ImageDraw.Draw(im)
if height >= 16:
font = ImageFont.truetype(FONT_PATH+"/RotorHazardPanel16.ttf", 16)
w, h = font.getsize(text)
h = 16
else:
font = ImageFont.truetype(FONT_PATH+"/RotorHazardPanel8.ttf", 8)
w, h = font.getsize(text)
h = 8
draw_y = int((height-h)/2)
for i in range(-width, w + width):
draw.rectangle((0, 0, width, height), fill=(0, 0, 0))
draw.text((-i, draw_y), text, font=font, fill=(color))
img = im.rotate(90 * args['panelRotate'])
setPixels(strip, img, args['invertedPanelRows'])
strip.show()
gevent.sleep(10/1000.0)
def multiLapGrid(args):
if 'strip' in args:
strip = args['strip']
else:
return False
if 'RACE' in args:
RACE = args['RACE']
else:
return False
if args['RACE'].results and 'by_race_time' in args['RACE'].results:
leaderboard = args['RACE'].results['by_race_time']
else:
return False
height = args['ledRows']
width = strip.numPixels() // height
im = Image.new('RGB', [width, height])
draw = ImageDraw.Draw(im)
if height < 16:
return False
half_height = height/2
half_width = width/2
if height >= 32:
font = ImageFont.truetype(FONT_PATH+"/RotorHazardPanel16.ttf", 16)
font_h = 16
else:
font = ImageFont.truetype(FONT_PATH+"/RotorHazardPanel8.ttf", 8)
font_h = 8
active_nodes = []
for line in leaderboard:
active_nodes.append(line['node'])
active_nodes.sort()
for line in leaderboard:
if line['node'] < 4:
if line['laps']:
if line['laps'] <= 19:
text = str(line['laps'])
else:
text = '+'
else:
if RACE.race_status == RaceStatus.DONE:
text = str(line['laps'])
else:
# first callsign character
text = line['callsign'][0]
w, h = font.getsize(text)
h = font_h
color = convertColor(args['manager'].getDisplayColor(line['node'], from_result=True))
# draw positions
if active_nodes.index(line['node']) == 0:
pos_x = int((half_width - w)/2)
pos_y = int(((half_height) - h)/2)
elif active_nodes.index(line['node']) == 1:
pos_x = int(((half_width - w)/2) + half_width)
pos_y = int(((half_height) - h)/2)
elif active_nodes.index(line['node']) == 2:
pos_x = int((half_width - w)/2)
pos_y = int((((half_height) - h)/2) + half_height)
elif active_nodes.index(line['node']) == 3:
pos_x = int(((half_width - w)/2) + half_width)
pos_y = int((((half_height) - h)/2) + half_height)
draw.text((pos_x + 1, pos_y), text, font=font, fill=color)
img = im.rotate(90 * args['panelRotate'])
setPixels(strip, img, args['invertedPanelRows'])
strip.show()
def clearPixels(strip):
for i in range(strip.numPixels()):
strip.setPixelColor(i, ColorVal.NONE)
def convertColor(color):
return color >> 16, (color >> 8) % 256, color % 256
def discover(config, *args, **kwargs):
effects = [
LEDEffect(
"textLapNumber",
"Text: Lap Count",
dataHandler, {
'manual': False,
'include': [Evt.RACE_LAP_RECORDED],
'exclude': [Evt.ALL],
'recommended': [Evt.RACE_LAP_RECORDED]
}, {
'ledRows': config['LED_ROWS'],
'panelRotate': config['PANEL_ROTATE'],
'invertedPanelRows': config['INVERTED_PANEL_ROWS'],
'data': 'lap_number',
'time': 5
}
),
LEDEffect(
"textLapTime",
"Text: Lap Time",
dataHandler, {
'manual': False,
'include': [Evt.RACE_LAP_RECORDED],
'exclude': [Evt.ALL],
'recommended': [Evt.RACE_LAP_RECORDED]
}, {
'ledRows': config['LED_ROWS'],
'panelRotate': config['PANEL_ROTATE'],
'invertedPanelRows': config['INVERTED_PANEL_ROWS'],
'data': 'lap_time',
'time': 8
}
),
LEDEffect(
"textPosition",
"Text: Position",
dataHandler, {
'manual': False,
'include': [Evt.RACE_LAP_RECORDED],
'exclude': [Evt.ALL],
'recommended': [Evt.RACE_LAP_RECORDED]
}, {
'ledRows': config['LED_ROWS'],
'panelRotate': config['PANEL_ROTATE'],
'invertedPanelRows': config['INVERTED_PANEL_ROWS'],
'data': 'position',
'time': 8
}
),
LEDEffect(
"scrollLapTime",
"Text Scroll: Lap Time",
scrollText, {
'manual': False,
'include': [Evt.RACE_LAP_RECORDED],
'exclude': [Evt.ALL],
'recommended': [Evt.RACE_LAP_RECORDED]
}, {
'ledRows': config['LED_ROWS'],
'panelRotate': config['PANEL_ROTATE'],
'invertedPanelRows': config['INVERTED_PANEL_ROWS'],
'data': 'lap_time',
'time': 2
}
),
LEDEffect(
"textMessage",
"Text Scroll: Message",
scrollText, {
'manual': False,
'include': [Evt.MESSAGE_INTERRUPT, Evt.MESSAGE_STANDARD, Evt.STARTUP],
'exclude': [Evt.ALL],
'recommended': [Evt.MESSAGE_INTERRUPT, Evt.MESSAGE_STANDARD, Evt.STARTUP]
}, {
'ledRows': config['LED_ROWS'],
'panelRotate': config['PANEL_ROTATE'],
'invertedPanelRows': config['INVERTED_PANEL_ROWS'],
'data': 'message',
'time': 0
}
),
LEDEffect(
"textRaceWin",
"Text Scroll: Race Winner",
scrollText, {
'manual': False,
'include': [Evt.RACE_WIN],
'exclude': [Evt.ALL],
'recommended': [Evt.RACE_WIN]
}, {
'ledRows': config['LED_ROWS'],
'panelRotate': config['PANEL_ROTATE'],
'invertedPanelRows': config['INVERTED_PANEL_ROWS'],
'data': 'message',
'time': 2
}
),
LEDEffect(
"textStaging",
"Text: Countdown",
dataHandler, {
'manual': False,
'include': [Evt.RACE_STAGE],
'exclude': [Evt.ALL],
'recommended': [Evt.RACE_STAGE]
}, {
'ledRows': config['LED_ROWS'],
'panelRotate': config['PANEL_ROTATE'],
'invertedPanelRows': config['INVERTED_PANEL_ROWS'],
'data': 'staging',
'time': 5
}
),
]
if (config['LED_ROWS'] >= 16):
effects.append(
LEDEffect(
"textLapGrid",
"Text: 4-Node Lap Count",
multiLapGrid, {
'include': [LEDEvent.IDLE_DONE, LEDEvent.IDLE_RACING],
'recommended': [
Evt.RACE_STAGE,
Evt.RACE_LAP_RECORDED,
Evt.RACE_FINISH,
Evt.RACE_WIN,
Evt.RACE_STOP]
}, {
'ledRows': config['LED_ROWS'],
'panelRotate': config['PANEL_ROTATE'],
'invertedPanelRows': config['INVERTED_PANEL_ROWS'],
'time': 4
}
)
)
return effects
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,045
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/rh/helpers/mqtt_helper.py
|
import gevent
import paho.mqtt.client as mqtt_client
from collections import UserDict
def make_topic(root: str, parts: list):
topic = root
if root and parts:
topic += '/'
topic += '/'.join([p.replace('%', '%25').
replace('/', '%2F').
replace('#', '%23').
replace('+', '%2B') if not p in ['+', '#'] else p for p in parts])
return topic
def split_topic(topic: str):
parts = topic.split('/')
return [p.replace('%2B', '+').
replace('%23', '#').
replace('%2F', '/').
replace('%25', '%') for p in parts]
def create_client(mqttConfig, prefix):
def get_value(key, default_value=None):
return mqttConfig.get(prefix+key, mqttConfig.get(key, default_value))
broker = get_value('BROKER')
if not broker:
raise Exception("MQTT not configured")
client_id = get_value('CLIENT_ID')
client = mqtt_client.Client(client_id=client_id)
username = get_value('USERNAME')
if username:
client.username_pw_set(username, get_value('PASSWORD'))
client_cert = get_value('CLIENT_CERT')
private_key = get_value('PRIVATE_KEY')
if client_cert and private_key:
client.tls_set(certfile=client_cert, keyfile=private_key)
client.connect(broker, get_value('PORT', 1883))
gevent.spawn(client.loop_forever)
return client
class MqttHelper(UserDict):
def close(self):
for client in self.data.values():
client.disconnect()
def create(rhconfig):
mqttConfig = rhconfig.MQTT
timer_client = create_client(mqttConfig, 'TIMER_')
race_client = create_client(mqttConfig, 'RACE_')
helper = MqttHelper()
helper['timer'] = timer_client
helper['race'] = race_client
return helper
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,046
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/rh/interface/MqttInterface.py
|
from .BaseHardwareInterface import BaseHardwareInterface, BaseHardwareInterfaceListener
from rh.helpers.mqtt_helper import make_topic, split_topic
from rh.util import RHTimeFns
from rh.util.RHUtils import FREQS
import logging
import json
from typing import Optional
logger = logging.getLogger(__name__)
def json_timestamp(t: int) -> str:
if not type(t) == int:
raise ValueError('Millisecond timestamp is not an integer ({})'.format(t))
return str(t)
class MqttInterface(BaseHardwareInterfaceListener):
def __init__(self, mqtt_client, ann_topic: str, ctrl_topic: str, timer_id: str, hw_interface: BaseHardwareInterface):
self.hw_interface = hw_interface
self.hw_interface.listener = self
self.client = mqtt_client
self.ann_topic = ann_topic
self.ctrl_topic = ctrl_topic
self.timer_id = timer_id
def start(self):
for node_manager in self.hw_interface.node_managers:
self._mqtt_node_manager_start(node_manager)
for node in node_manager.nodes:
self._mqtt_node_start(node)
def stop(self):
for node_manager in self.hw_interface.node_managers:
self._mqtt_node_manager_stop(node_manager)
def on_rssi_sample(self, node, ts: int, rssi: int):
self._mqtt_publish_rssi(node, ts, rssi)
def on_lifetime_sample(self, node, ts: int, lifetime: int):
self._mqtt_publish_lifetime(node, ts, lifetime)
def on_enter_triggered(self, node, cross_ts: int, cross_rssi: int, cross_lifetime: Optional[int]=None):
self._mqtt_publish_enter(node, cross_ts, cross_rssi, cross_lifetime)
def on_exit_triggered(self, node, cross_ts: int , cross_rssi: int, cross_lifetime: Optional[int]=None):
self._mqtt_publish_exit(node, cross_ts, cross_rssi, cross_lifetime)
def on_pass(self, node, lap_ts: int, lap_source, pass_rssi: int):
self._mqtt_publish_pass(node, lap_ts, lap_source, pass_rssi)
def on_extremum_history(self, node, extremum_timestamp: int, extremum_rssi: int, extremum_duration: int):
self._mqtt_publish_history(node, extremum_timestamp, extremum_rssi, extremum_duration)
def on_frequency_changed(self, node, frequency: int, band: Optional[str]=None, channel: Optional[int]=None):
self._mqtt_publish_bandChannel(node, band+str(channel) if band and channel else None)
self._mqtt_publish_frequency(node, frequency)
def on_enter_trigger_changed(self, node, level: int):
self._mqtt_publish_enter_trigger(node, level)
def on_exit_trigger_changed(self, node, level: int):
self._mqtt_publish_exit_trigger(node, level)
def _mqtt_node_manager_start(self, node_manager):
self._mqtt_node_subscribe_to(node_manager, "frequency", self._mqtt_set_frequency)
self._mqtt_node_subscribe_to(node_manager, "bandChannel", self._mqtt_set_bandChannel)
self._mqtt_node_subscribe_to(node_manager, "enterTrigger", self._mqtt_set_enter_trigger)
self._mqtt_node_subscribe_to(node_manager, "exitTrigger", self._mqtt_set_exit_trigger)
msg = {'type': node_manager.__class__.TYPE, 'startTime': RHTimeFns.getEpochTimeNow()}
self.client.publish(make_topic(self.ann_topic, [self.timer_id, node_manager.addr]), json.dumps(msg))
def _mqtt_node_subscribe_to(self, node_manager, node_topic, handler):
ctrlTopicFilter = make_topic(self.ctrl_topic, [self.timer_id, node_manager.addr, '+', node_topic])
self.client.message_callback_add(ctrlTopicFilter, lambda client, userdata, msg: handler(node_manager, client, userdata, msg))
self.client.subscribe(ctrlTopicFilter)
def _mqtt_node_manager_stop(self, node_manager):
msg = {'stopTime': RHTimeFns.getEpochTimeNow()}
self.client.publish(make_topic(self.ann_topic, [self.timer_id, node_manager.addr]), json.dumps(msg))
self._mqtt_node_unsubscribe_from(node_manager, "frequency")
self._mqtt_node_unsubscribe_from(node_manager, "bandChannel")
self._mqtt_node_unsubscribe_from(node_manager, "enterTrigger")
self._mqtt_node_unsubscribe_from(node_manager, "exitTrigger")
def _mqtt_node_unsubscribe_from(self, node_manager, node_topic):
ctrlTopicFilter = make_topic(self.ctrl_topic, [self.timer_id, node_manager.addr, '+', node_topic])
self.client.unsubscribe(ctrlTopicFilter)
self.client.message_callback_remove(ctrlTopicFilter)
def _mqtt_node_start(self, node):
self._mqtt_publish_frequency(node, node.frequency)
self._mqtt_publish_bandChannel(node, node.bandChannel)
self._mqtt_publish_enter_trigger(node, node.enter_at_level)
self._mqtt_publish_exit_trigger(node, node.exit_at_level)
def _mqtt_create_node_topic(self, parent_topic, node, sub_topic=None):
node_topic = make_topic(parent_topic, [self.timer_id, node.manager.addr, str(node.multi_node_index)])
return node_topic+'/'+sub_topic if sub_topic else node_topic
def _mqtt_get_node_from_topic(self, node_manager, topic):
topicNames = split_topic(topic)
if len(topicNames) >= 4:
timer_id = topicNames[-4]
nm_name = topicNames[-3]
multi_node_index = int(topicNames[-2])
if timer_id == self.timer_id and nm_name == node_manager.addr and multi_node_index < len(node_manager.nodes):
return node_manager.nodes[multi_node_index]
return None
# incoming message handlers
def _mqtt_set_frequency(self, node_manager, client, userdata, msg):
node = self._mqtt_get_node_from_topic(node_manager, msg.topic)
if node:
if msg.payload:
try:
freq_bandChannel = msg.payload.decode('utf-8').split(',')
freq = int(freq_bandChannel[0])
if len(freq_bandChannel) >= 2:
bandChannel = freq_bandChannel[1]
self.hw_interface.set_frequency(node.index, freq, bandChannel[0], int(bandChannel[1]))
else:
self.hw_interface.set_frequency(node.index, freq)
except ValueError:
logger.warning("Invalid frequency message")
else:
self.hw_interface.set_frequency(node.index, 0)
def _mqtt_set_bandChannel(self, node_manager, client, userdata, msg):
node = self._mqtt_get_node_from_topic(node_manager, msg.topic)
if node:
if msg.payload:
bandChannel = msg.payload.decode('utf-8')
if bandChannel in FREQS:
freq = FREQS[bandChannel]
band = bandChannel[0]
channel = int(bandChannel[1])
self.hw_interface.set_frequency(node.index, freq, band, channel)
else:
self.hw_interface.set_frequency(node.index, node.frequency)
def _mqtt_set_enter_trigger(self, node_manager, client, userdata, msg):
node = self._mqtt_get_node_from_topic(node_manager, msg.topic)
if node:
try:
level = int(msg.payload.decode('utf-8'))
self.hw_interface.set_enter_at_level(node.index, level)
except:
logger.warning('Invalid enter trigger message')
def _mqtt_set_exit_trigger(self, node_manager, client, userdata, msg):
node = self._mqtt_get_node_from_topic(node_manager, msg.topic)
if node:
try:
level = int(msg.payload.decode('utf-8'))
self.hw_interface.set_exit_at_level(node.index, level)
except:
logger.warning('Invalid exit trigger message')
# outgoing messages
def _mqtt_publish_frequency(self, node, frequency):
freq = str(frequency) if frequency else ''
self.client.publish(self._mqtt_create_node_topic(self.ann_topic, node, "frequency"), freq)
def _mqtt_publish_bandChannel(self, node, bandChannel):
bc = bandChannel if bandChannel else ''
self.client.publish(self._mqtt_create_node_topic(self.ann_topic, node, "bandChannel"), bc)
def _mqtt_publish_enter_trigger(self, node, level):
self.client.publish(self._mqtt_create_node_topic(self.ann_topic, node, "enterTrigger"), str(level))
def _mqtt_publish_exit_trigger(self, node, level):
self.client.publish(self._mqtt_create_node_topic(self.ann_topic, node, "exitTrigger"), str(level))
def _mqtt_publish_rssi(self, node, ts: int, rssi: int):
msg = {'timestamp': json_timestamp(ts), 'rssi': rssi}
self.client.publish(self._mqtt_create_node_topic(self.ann_topic, node, "sample"), json.dumps(msg))
def _mqtt_publish_lifetime(self, node, ts: int, lifetime: int):
msg = {'timestamp': json_timestamp(ts), 'lifetime': lifetime}
self.client.publish(self._mqtt_create_node_topic(self.ann_topic, node, "sample"), json.dumps(msg))
def _mqtt_publish_enter(self, node, cross_ts: int, cross_rssi: int, cross_lifetime: Optional[int]=None):
msg = {'count': node.pass_count+1, 'timestamp': json_timestamp(cross_ts), 'rssi': cross_rssi}
if cross_lifetime is not None:
msg['lifetime'] = cross_lifetime
self.client.publish(self._mqtt_create_node_topic(self.ann_topic, node, "enter"), json.dumps(msg))
def _mqtt_publish_exit(self, node, cross_ts: int, cross_rssi: int, cross_lifetime: Optional[int]=None):
msg = {'count': node.pass_count, 'timestamp': json_timestamp(cross_ts), 'rssi': cross_rssi}
if cross_lifetime is not None:
msg['lifetime'] = cross_lifetime
self.client.publish(self._mqtt_create_node_topic(self.ann_topic, node, "exit"), json.dumps(msg))
def _mqtt_publish_pass(self, node, lap_ts: int, lap_source, pass_rssi: int):
if lap_source == BaseHardwareInterface.LAP_SOURCE_REALTIME:
lap_source_type = 'realtime'
elif lap_source == BaseHardwareInterface.LAP_SOURCE_MANUAL:
lap_source_type = 'manual'
else:
lap_source_type = None
msg = {'count': node.pass_count, 'timestamp': json_timestamp(lap_ts), 'source': lap_source_type}
if pass_rssi:
msg['rssi'] = pass_rssi
self.client.publish(self._mqtt_create_node_topic(self.ann_topic, node, "pass"), json.dumps(msg))
def _mqtt_publish_history(self, node, extremum_timestamp, extremum_rssi, extremum_duration):
msg = {'timestamp': json_timestamp(extremum_timestamp), 'rssi': extremum_rssi, 'duration': str(extremum_duration)}
self.client.publish(self._mqtt_create_node_topic(self.ann_topic, node, "history"), json.dumps(msg))
def get_mqtt_interface_for(hw_cls):
import importlib
module_parts = hw_cls.__module__.split('.')
mqtt_module_name = '.'.join(module_parts[:-1]) + '.' + 'Mqtt' + module_parts[-1]
try:
mqtt_module = importlib.import_module(mqtt_module_name)
return getattr(mqtt_module, 'Mqtt' + hw_cls.__name__)
except (ModuleNotFoundError, AttributeError):
logger.info('No custom MQTT hardware interface found for {} - using default'.format(hw_cls.__name__))
return MqttInterface
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,047
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/rh/helpers/__init__.py
|
def i2c_url(bus_id, addr):
return "i2c:{}/{:#04x}".format(bus_id, addr)
def parse_i2c_url(url):
if not url.startswith('i2c:'):
raise ValueError('Invalid I2C URL: {}'.format(url))
bus_addr = url[4:].split('/')
return (int(bus_addr[0]), int(bus_addr[1], 16))
def serial_url(port):
if port.startswith('/'):
# linux
return "file:{}".format(port)
else:
# windows
return "serial:{}".format(port)
def socket_url(ip, port):
return "socket://{}:{}/".format(ip, port)
def parse_socket_url(url):
if not url.startswith('socket://'):
raise ValueError('Invalid socket URL: {}'.format(url))
start = 9
end = -1 if url[-1] == '/' else len(url)
ip_port = url[start:end].split(':')
return (ip_port[0], int(ip_port[1]))
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,048
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/rh/tools/adc_test.py
|
import gevent.monkey
gevent.monkey.patch_all()
import logging
import sys
from . import get_interface
from rh.util import ms_counter
def start(port, freq):
INTERFACE = get_interface(port)
for node in INTERFACE.nodes:
INTERFACE.set_frequency(node.index, freq)
while True:
for node in INTERFACE.nodes:
freq_val = node.read_frequency()
adc_val = node.read_adc()
rssi_val = node.read_rssi()
print("{}, {}, {}, {}, {}".format(ms_counter(), node, freq_val, adc_val, rssi_val))
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
if len(sys.argv) < 3:
print('Please specify a serial port, e.g. COM12 (or I2C address, e.g. i2c:1/0x08, or socket port, e.g. :5005), and a frequency.')
exit()
port = sys.argv[1]
freq = int(sys.argv[2])
start(port, freq)
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,049
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/rh/tools/plot_filter.py
|
import csv
import sys
import numpy as np
import matplotlib.pyplot as plt
rows = []
with open(sys.argv[1]) as f:
reader = csv.reader(f)
header = next(reader)
for r in reader:
rows.append(np.array([float(r[0]), float(r[1]), float(r[2])]))
data = np.array(rows)
plt.plot(data[1000:2000,1:3])
plt.show()
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,050
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/rh/events/led_event_manager.py
|
'''
LED event manager
Wires events to handlers
{
'manual': False,
'include': [],
'exclude': [Evt.ALL]
}
'''
import copy
import json
from typing import Any, Dict, List
from rh.app import RHRace
import gevent
from rh.leds import hexToColor
from rh.events.eventmanager import Evt
from six.moves import UserDict
import logging
logger = logging.getLogger(__name__)
class LEDEventManager:
def __init__(self, eventmanager, strip, RHData, RACE, Language, INTERFACE):
self.Events = eventmanager
self.strip = strip
self.RHData = RHData
self.RACE = RACE
self.Language = Language
self.INTERFACE = INTERFACE
self.events: Dict[Evt,Any] = {}
self.idleArgs: Dict[Evt,Any] = {}
self.eventEffects: Dict[str,Any] = {}
self.eventThread = None
self.displayColorCache: List[int] = []
# hold
self.registerEffect(LEDEffect("hold", "Hold", lambda *args: None, {
'include': [Evt.SHUTDOWN, LEDEvent.IDLE_DONE, LEDEvent.IDLE_READY, LEDEvent.IDLE_RACING],
'exclude': [Evt.STARTUP],
'recommended': [Evt.ALL]
}, {
'preventIdle': True
}))
# do nothing
self.registerEffect(LEDEffect("none", "No Effect", lambda *args: None, {
'manual': False,
'exclude': [Evt.STARTUP],
'recommended': [Evt.ALL]
}))
def isEnabled(self):
return True
def registerEffect(self, effect):
self.eventEffects[effect['name']] = effect
return True
def getRegisteredEffects(self):
return self.eventEffects
def getEventEffect(self, event):
if event in self.events:
return self.events[event]
else:
return False
def setEventEffect(self, event, name):
self.events[event] = name
if name not in self.eventEffects:
return None
if name == 'none':
self.Events.off(event, 'LED')
return True
args = copy.deepcopy(self.eventEffects[name]['defaultArgs'])
if args is None:
args = {}
args.update({
'handlerFn': self.eventEffects[name]['handlerFn'],
'strip': self.strip,
'RHData': self.RHData,
'RACE': self.RACE,
'Language': self.Language,
'INTERFACE': self.INTERFACE,
'manager': self,
})
if event in [LEDEvent.IDLE_READY, LEDEvent.IDLE_DONE, LEDEvent.IDLE_RACING]:
# event is idle
self.idleArgs[event] = args
else:
if event in [Evt.SHUTDOWN]:
priority = 50 # event is direct (blocking)
else:
priority = 150 # event is normal (threaded/non-blocking)
self.Events.on(event, 'LED', self.activateEffect, args, priority)
return True
def clear(self):
self.setEventEffect(Evt.LED_MANUAL, 'clear')
self.Events.trigger(Evt.LED_MANUAL, {'time': None, 'preventIdle': True})
def setDisplayColorCache(self, colorCache):
self.displayColorCache = colorCache
def getNodeColors(self, num_nodes):
colors = []
for node_index in range(num_nodes):
colors.append(self.getDisplayColor(node_index))
return colors
def getDisplayColor(self, node_index, from_result=False):
if node_index < len(self.displayColorCache):
return self.displayColorCache[node_index]
mode = self.RHData.get_optionInt('ledColorMode', 0)
color = False
if mode == 1: # by pilot
color = '#ffffff'
if from_result:
if self.RACE.last_race_results and 'by_race_time' in self.RACE.last_race_results:
for line in self.RACE.last_race_results['by_race_time']:
if line['node'] == node_index:
color = self.RHData.get_pilot(line['pilot_id']).color
break
elif self.RACE.results and 'by_race_time' in self.RACE.results:
for line in self.RACE.results['by_race_time']:
if line['node'] == node_index:
color = self.RHData.get_pilot(line['pilot_id']).color
break
else:
if self.RACE.current_heat:
for heatNode in self.RHData.get_heatNodes_by_heat(self.RACE.current_heat):
if heatNode.node_index == node_index:
if heatNode.pilot_id:
color = self.RHData.get_pilot(heatNode.pilot_id).color
break
elif mode == 2: # by frequency
profile = self.RHData.get_profile(self.RHData.get_optionInt('currentProfile'))
profile_freqs = json.loads(profile.frequencies)
freq = profile_freqs["f"][node_index]
if freq <= 5672:
color = '#ffffff' # White
elif freq <= 5711:
color = '#ff0000' # Red
elif freq <= 5750:
color = '#ff8000' # Orange
elif freq <= 5789:
color = '#ffff00' # Yellow
elif freq <= 5829:
color = '#00ff00' # Green
elif freq <= 5867:
color = '#0000ff' # Blue
elif freq <= 5906:
color = '#8000ff' # Dark Violet
else:
color = '#ff0080' # Deep Pink
else: # by node
colorNodeSerial = self.RHData.get_option('ledColorNodes', False)
if colorNodeSerial:
colorNodes = json.loads(colorNodeSerial)
else:
colorNodes = [
"#0022ff", # Blue
"#ff5500", # Orange
"#00ff22", # Green
"#ff0055", # Magenta
"#ddff00", # Yellow
"#7700ff", # Purple
"#00ffdd", # Teal
"#aaaaaa", # White
]
color = colorNodes[node_index % len(colorNodes)]
if not color:
color = '#ffffff'
return hexToColor(color)
def activateEffect(self, args):
# check related event is not shutdown
if 'event' in args and args['event'] == Evt.SHUTDOWN:
return False
result = args['handlerFn'](args)
if result == False:
logger.debug('LED effect %s produced no output', args['handlerFn'])
if 'preventIdle' not in args or not args['preventIdle']:
if 'time' in args:
time = args['time']
else:
time = 0
if time:
gevent.sleep(float(time))
self.activateIdle()
def activateIdle(self):
gevent.idle()
event = None
if self.RACE.race_status == RHRace.RaceStatus.DONE:
event = LEDEvent.IDLE_DONE
elif self.RACE.race_status == RHRace.RaceStatus.READY:
event = LEDEvent.IDLE_READY
elif self.RACE.race_status == RHRace.RaceStatus.RACING:
event = LEDEvent.IDLE_RACING
if event and event in self.events:
self.eventEffects[self.events[event]]['handlerFn'](self.idleArgs[event])
class NoLEDManager():
def __init__(self):
pass
def isEnabled(self):
return False
def __getattr__(self, *args, **kwargs):
def nothing(*args, **kwargs):
return False
return nothing
# Similar to NoLEDManager but with enough support to send 'effect' events to cluster timers
class ClusterLEDManager():
def __init__(self):
self.eventEffects: Dict[str,Any] = {}
def isEnabled(self):
return False
def registerEffect(self, effect):
self.eventEffects[effect['name']] = effect
return True
def getRegisteredEffects(self):
return self.eventEffects
def __getattr__(self, *args, **kwargs):
def nothing(*args, **kwargs):
return False
return nothing
# Generic data structures for working with LED commands
class ColorPattern:
SOLID = None
''' [# ON, # OFF] '''
ALTERNATING = [1, 1]
ONE_OF_THREE = [1, 2]
TWO_OUT_OF_THREE = [2, 1]
MOD_SEVEN = [1, 6]
FOUR_ON_FOUR_OFF = [4, 4]
class LEDEvent:
IDLE_READY = 'ledIdleReady'
IDLE_DONE = 'ledIdleDone'
IDLE_RACING = 'ledIdleRacing'
configurable_events = [
{
"event": Evt.RACE_STAGE,
"label": "Race Staging"
},
{
"event": Evt.RACE_START,
"label": "Race Start"
},
{
"event": Evt.RACE_FINISH,
"label": "Race Finish"
},
{
"event": Evt.RACE_STOP,
"label": "Race Stop"
},
{
"event": Evt.LAPS_CLEAR,
"label": "Save/Clear Laps"
},
{
"event": Evt.CROSSING_ENTER,
"label": "Gate Entrance"
},
{
"event": Evt.CROSSING_EXIT,
"label": "Gate Exit"
},
{
"event": Evt.RACE_LAP_RECORDED,
"label": "Lap Recorded"
},
{
"event": Evt.RACE_WIN,
"label": "Race Winner Declared"
},
{
"event": Evt.MESSAGE_STANDARD,
"label": "Message (Normal)"
},
{
"event": Evt.MESSAGE_INTERRUPT,
"label": "Message (Priority)"
},
{
"event": Evt.STARTUP,
"label": "Server Startup"
},
{
"event": Evt.SHUTDOWN,
"label": "Server Shutdown"
},
{
"event": Evt.CLUSTER_JOIN,
"label": "Joined Timer Cluster"
},
{
"event": IDLE_READY,
"label": "Idle: System Ready"
},
{
"event": IDLE_RACING,
"label": "Idle: Racing"
},
{
"event": IDLE_DONE,
"label": "Idle: Race Stopped"
},
]
class LEDEffect(UserDict):
def __init__(self, name, label, handlerFn, validEvents, defaultArgs=None):
UserDict.__init__(self, {
"name": name,
"label": label,
"handlerFn": handlerFn,
"validEvents": validEvents,
"defaultArgs": defaultArgs
})
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,051
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/rh/leds/__init__.py
|
import gevent
from rh.util import ms_counter, millis_to_secs
def Color(red, green, blue):
"""Convert the provided red, green, blue color to a 24-bit color value.
Each color component should be a value 0-255 where 0 is the lowest intensity
and 255 is the highest intensity.
"""
return (red << 16) | (green << 8) | blue
def hexToColor(hexColor):
return int(hexColor.replace('#', ''), 16)
class ColorVal:
NONE = Color(0,0,0)
BLUE = Color(0,31,255)
CYAN = Color(0,255,255)
DARK_ORANGE = Color(255,63,0)
DARK_YELLOW = Color(250,210,0)
GREEN = Color(0,255,0)
LIGHT_GREEN = Color(127,255,0)
ORANGE = Color(255,128,0)
MINT = Color(63,255,63)
PINK = Color(255,0,127)
PURPLE = Color(127,0,255)
RED = Color(255,0,0)
SKY = Color(0,191,255)
WHITE = Color(255,255,255)
YELLOW = Color(255,255,0)
def setPixels(strip, img, invertRows=False):
pos = 0
for row in range(0, img.height):
for col in range(0, img.width):
if pos >= strip.numPixels():
return
c = col
if invertRows:
if row % 2 == 0:
c = 15 - col
px = img.getpixel((c, row))
strip.setPixelColor(pos, Color(px[0], px[1], px[2]))
pos += 1
def stagingEffects(start_time_ms, callback):
if start_time_ms is not None:
while ms_counter() < start_time_ms:
diff_ms = start_time_ms - ms_counter()
if diff_ms:
diff_to_s = millis_to_secs(diff_ms % 1000)
gevent.sleep(diff_to_s)
callback(diff_ms)
else:
break
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,052
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/rh/interface/Node.py
|
'''
Node class for the RotorHazard interface.
Command agnostic behaviour only.
'''
import logging
import gevent.lock
from typing import List, Optional
from rh.util import ms_counter
from . import pack_8, unpack_8, pack_16, unpack_16, pack_32, unpack_32, \
calculate_checksum, SampleHistory, RssiSample, LifetimeSample
MAX_RETRY_COUNT = 4 # Limit of I/O retries
logger = logging.getLogger(__name__)
class CommandsWithRetry:
def __init__(self, manager):
self.manager = manager
self.io_request_ms: Optional[int] = None # request time of last I/O read
self.io_response_ms: Optional[int] = None # response time of last I/O read
self.write_command_count = 0
self.read_command_count = 0
self.write_error_count = 0
self.read_error_count = 0
def read_command(self, command, size, max_retries=MAX_RETRY_COUNT, log_level=logging.WARNING):
self.read_command_count += 1
success = False
retry_count = 0
def log_io_error(msg):
nonlocal retry_count
if retry_count < max_retries:
logger.debug('Retry ({4}) in read_command: addr={0} cmd={1:#04x} size={2} retry={3}'.format(self.addr, command, size, retry_count, msg))
else:
logger.log(log_level, 'Retry limit reached ({4}) in read_command: addr={0} cmd={1:#04x} size={2} retry={3}'.format(self.addr, command, size, retry_count, msg))
retry_count += 1
self.read_error_count += 1
gevent.sleep(0.025)
data = None
while success is False and retry_count <= max_retries:
try:
self.io_response_ms = None
self.io_request_ms = ms_counter()
data = self.manager._read_command(command, size)
self.io_response_ms = ms_counter()
if data and len(data) == size + 1:
# validate checksum
expected_checksum = calculate_checksum(data[:-1])
actual_checksum = data[-1]
if actual_checksum == expected_checksum:
data = data[:-1]
success = True
else:
log_io_error("checksum was {} expected {}, data was {}".format(actual_checksum, expected_checksum, bytearray(data).hex()))
else:
log_io_error("bad length {}".format(len(data)) if data else "no data")
except IOError as err:
log_io_error(err)
return data if success else None
def write_command(self, command, data, max_retries=MAX_RETRY_COUNT, log_level=logging.WARNING):
self.write_command_count += 1
success = False
retry_count = 0
def log_io_error(msg):
nonlocal retry_count
if retry_count <= max_retries:
logger.debug('Retry ({4}) in write_command: addr={0} cmd={1:#04x} data={2} retry={3}'.format(self.addr, command, data, retry_count, msg))
else:
logger.log(log_level, 'Retry limit reached ({4}) in write_command: addr={0} cmd={1:#04x} data={2} retry={3}'.format(self.addr, command, data, retry_count, msg))
retry_count += 1
self.write_error_count += 1
gevent.sleep(0.025)
data_with_checksum = bytearray()
data_with_checksum.extend(data)
data_with_checksum.append(calculate_checksum(data_with_checksum))
while success is False and retry_count <= max_retries:
try:
self.manager._write_command(command, data_with_checksum)
success = True
except IOError as err:
log_io_error(err)
return success
def get_value_8(self, command, max_retries=MAX_RETRY_COUNT):
data = self.read_command(command, 1, max_retries)
return unpack_8(data) if data is not None else None
def get_value_16(self, command, max_retries=MAX_RETRY_COUNT):
data = self.read_command(command, 2, max_retries)
return unpack_16(data) if data is not None else None
def get_value_32(self, command):
data = self.read_command(command, 4)
return unpack_32(data) if data is not None else None
def set_value_8(self, command, val):
self.write_command(command, pack_8(val))
def set_value_16(self, command, val):
self.write_command(command, pack_16(val))
def set_value_32(self, command, val):
self.write_command(command, pack_32(val))
def set_and_validate_value(self, write_func, write_command, read_func, read_command, in_value, size, max_retries=MAX_RETRY_COUNT, not_set_handler=None):
success = False
retry_count = 0
out_value = None
while success is False and retry_count <= max_retries:
write_func(write_command, in_value)
gevent.sleep(0.05) # allow time for command to be actioned
out_value = read_func(read_command, size)
if out_value == in_value:
success = True
else:
retry_count += 1
detailed_err_msg = not_set_handler(in_value, out_value) if not_set_handler else None
logger.info('Value not set (retry={0}): cmd={1:#04x}, set={2}, get={3}, node={4}{5}'.
format(retry_count, write_command, in_value, out_value, self,
" ({})".format(detailed_err_msg) if detailed_err_msg else ""))
return out_value if success else in_value
def set_and_validate_value_8(self, write_command, read_command, val, **kwargs):
return self.set_and_validate_value(self.set_value_8, write_command, self.get_value_8, read_command, val, 1, **kwargs)
def set_and_validate_value_16(self, write_command, read_command, val, **kwargs):
return self.set_and_validate_value(self.set_value_16, write_command, self.get_value_16, read_command, val, 2, **kwargs)
def set_and_validate_value_32(self, write_command, read_command, val, **kwargs):
return self.set_and_validate_value(self.set_value_32, write_command, self.get_value_32, read_command, val, 4, **kwargs)
class NodeManager(CommandsWithRetry):
def __init__(self):
super().__init__(manager=self)
self.nodes: List[Node] = []
self.lock = gevent.lock.RLock()
def is_multi_node(self):
return len(self.nodes) > 1
def add_node(self, index):
node = self._create_node(index, len(self.nodes))
self.nodes.append(node)
return node
def remove_node(self, node):
if node.manager is not self:
raise ValueError("Node does not belong to this manager")
self.nodes.remove(node)
def _create_node(self, index, multi_node_index):
return Node(index, multi_node_index, self)
def read_command(self, command, size, max_retries=MAX_RETRY_COUNT, log_level=logging.WARNING):
'''
Read data given command, and data size.
'''
with self: # only allow one greenlet at a time
return super().read_command(command, size, max_retries, log_level)
def write_command(self, command, data, max_retries=MAX_RETRY_COUNT, log_level=logging.WARNING):
'''
Write data given command, and data.
'''
with self: # only allow one greenlet at a time
return super().write_command(command, data, max_retries, log_level)
def set_and_validate_value(self, write_func, write_command, read_func, read_command, val, size, **kwargs):
with self: # only allow one greenlet at a time
return super().set_and_validate_value(write_func, write_command, read_func, read_command, val, size, **kwargs)
def select(self, node):
return True
def close(self):
pass
def get_disabled_frequency(self):
return 0
def __enter__(self):
self.lock.__enter__()
def __exit__(self, exc_type, exc_value, traceback):
self.lock.__exit__(exc_type, exc_value, traceback)
class Node(CommandsWithRetry):
'''Node class represents the arduino/rx pair.'''
def __init__(self, index, multi_node_index, manager):
super().__init__(manager=manager)
# logical node index within an interface
self.index = index
# logical node index within a manager
self.multi_node_index = multi_node_index
# physical slot position
self.multi_node_slot_index = None
self.addr = "{}#{}".format(self.manager.addr, self.multi_node_index)
self.frequency = 0
self.bandChannel = None
self.current_rssi = RssiSample()
self.current_lifetime = LifetimeSample() # ph
self.node_peak_rssi = 0
self.node_nadir_rssi = manager.max_rssi_value
self.current_pilot_id = 0
self.first_cross_flag = False
self.show_crossing_flag = False
self.enter_at_level = 0
self.exit_at_level = 0
self.ai_calibrate = False
self.calibrate = True
self.start_thresh_lower_flag = False # True while EnterAt/ExitAt lowered at start of race
self.start_thresh_lower_time_ms = 0 # time when EnterAt/ExitAt should be restored
self.cap_enter_at_flag = False
self.cap_enter_at_total = 0
self.cap_enter_at_count = 0
self.cap_enter_at_end_ts_ms = 0
self.cap_exit_at_flag = False
self.cap_exit_at_total = 0
self.cap_exit_at_count = 0
self.cap_exit_at_end_ts_ms = 0
self.scan_enabled = False
self.scan_data = {}
self._init()
def _init(self):
self.pass_history: List[RssiSample] = []
self.history = SampleHistory() # clear race history
self.pass_count = None
self.pass_peak_rssi = 0
self.pass_nadir_rssi = self.manager.max_rssi_value
self.pass_crossing_flag = False
self.is_crossing = False
self.enter_at_sample: Optional[RssiSample] = None
self.exit_at_sample: Optional[RssiSample] = None
self.lap_stats_count = None
self.enter_count = None
self.exit_count = None
self.pending_enter_count = None
self.pending_exit_count = None
self.under_min_lap_count = 0
self._loop_time = 0 # microseconds
@property
def loop_time(self):
return self._loop_time
@loop_time.setter
def loop_time(self, v):
self._loop_time = v
def reset(self):
self._init()
def is_valid_rssi(self, value):
return value < self.manager.max_rssi_value
def get_read_error_report_str(self):
return "Node {0}: {1}/{2} ({3:.2%})".format(self, self.read_error_count, \
self.read_command_count, (float(self.read_error_count) / float(self.read_command_count)))
def read_command(self, command, size, max_retries=MAX_RETRY_COUNT, log_level=logging.WARNING):
'''
Read data given command, and data size.
'''
with self.manager: # only allow one greenlet at a time
if self.manager.select(self):
return super().read_command(command, size, max_retries, log_level)
def write_command(self, command, data, max_retries=MAX_RETRY_COUNT, log_level=logging.WARNING):
'''
Write data given command, and data.
'''
with self.manager: # only allow one greenlet at a time
if self.manager.select(self):
return super().write_command(command, data, max_retries, log_level)
def set_and_validate_value(self, write_func, write_command, read_func, read_command, val, size, **kwargs):
with self.manager: # only allow one greenlet at a time
if self.manager.select(self):
return super().set_and_validate_value(write_func, write_command, read_func, read_command, val, size, **kwargs)
def summary_stats(self):
pass
def __str__(self):
return "{}@{}".format(self.index+1, self.addr)
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,053
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/rh/apis/__init__.py
|
from collections import namedtuple
from threading import Lock
from rh.interface import RssiSample, LifetimeSample
from rh.interface.BaseHardwareInterface import BaseHardwareInterface, BaseHardwareInterfaceListener
from rh.helpers.mqtt_helper import make_topic
from typing import Optional
RESET_FREQUENCY = -1
class NodeRef(namedtuple('NodeRef', ['timer', 'address', 'index', 'node'])):
def __hash__(self):
return hash(self[:3])
def __eq__(self, other):
return self[:3] == other[:3]
def __str__(self):
return make_topic('', [self.timer, self.address, str(self.index)])
class RHListener(BaseHardwareInterfaceListener):
def __init__(self,
node_crossing_callback,
pass_record_callback,
split_record_callback,
on_set_frequency,
on_set_enter_at_level,
on_set_exit_at_level):
self.node_crossing_callback = node_crossing_callback
self.pass_record_callback = pass_record_callback
self.split_record_callback = split_record_callback
self.on_set_frequency = on_set_frequency
self.on_set_enter_at_level = on_set_enter_at_level
self.on_set_exit_at_level = on_set_exit_at_level
def on_rssi_sample(self, node_ref, ts: int, rssi: int):
pass
def on_enter_triggered(self, node_ref, cross_ts: int, cross_rssi: int, cross_lifetime: Optional[int]=None):
if node_ref.node:
self.node_crossing_callback(node_ref.node, True, cross_ts, cross_rssi)
def on_exit_triggered(self, node_ref, cross_ts: int , cross_rssi: int, cross_lifetime: Optional[int]=None):
if node_ref.node:
self.node_crossing_callback(node_ref.node, False, cross_ts, cross_rssi)
def on_pass(self, node_ref, lap_ts: int, lap_source, pass_rssi: int):
if node_ref.node:
self.pass_record_callback(node_ref.node, lap_ts, lap_source)
else:
self.split_record_callback(node_ref.timer, node_ref.address, node_ref.index, lap_ts)
def on_frequency_changed(self, node_ref, frequency: int, band: Optional[str]=None, channel: Optional[int]=None):
if node_ref.node:
if frequency >= 0:
freq_data = {'node': node_ref.node.index, 'frequency': frequency}
if frequency > 0 and band is not None and channel is not None:
freq_data['band'] = band
freq_data['channel'] = channel
self.on_set_frequency(freq_data)
elif frequency == RESET_FREQUENCY:
# clear band/channel assignments
self.on_set_frequency({'node': node_ref.node.index, 'frequency': node_ref.node.frequency})
def on_enter_trigger_changed(self, node_ref, level: int):
if node_ref.node:
self.on_set_enter_at_level({'node': node_ref.node.index, 'enter_at_level': level})
def on_exit_trigger_changed(self, node_ref, level: int):
if node_ref.node:
self.on_set_exit_at_level({'node': node_ref.node.index, 'exit_at_level': level})
class RssiSampleListener(BaseHardwareInterfaceListener):
MAX_SAMPLES = 20
def __init__(self):
self.lock = Lock()
self.rssi_samples_by_node = {}
self.lifetime_samples_by_node = {}
def get_rssis(self):
with self.lock:
for samples in self.rssi_samples_by_node.values():
samples.sort(key=lambda s: s.timestamp)
return self.rssi_samples_by_node
def get_lifetimes(self):
with self.lock:
for samples in self.lifetime_samples_by_node.values():
samples.sort(key=lambda s: s.timestamp)
return self.lifetime_samples_by_node
def _get_rssi_samples(self, node_ref):
rssi_samples = self.rssi_samples_by_node.get(node_ref)
if rssi_samples is None:
rssi_samples = []
self.rssi_samples_by_node[node_ref] = rssi_samples
return rssi_samples
def _get_lifetime_samples(self, node_ref):
lifetime_samples = self.lifetime_samples_by_node.get(node_ref)
if lifetime_samples is None:
lifetime_samples = []
self.lifetime_samples_by_node[node_ref] = lifetime_samples
return lifetime_samples
def _truncate_samples(self, samples):
if len(samples) > RssiSampleListener.MAX_SAMPLES:
samples.sort(key=lambda s: s.timestamp)
del samples[:-RssiSampleListener.MAX_SAMPLES]
def on_rssi_sample(self, node_ref, ts: int, rssi: int):
with self.lock:
rssi_samples = self._get_rssi_samples(node_ref)
rssi_samples.append(RssiSample(ts, rssi))
self._truncate_samples(rssi_samples)
def on_enter_triggered(self, node_ref, cross_ts: int, cross_rssi: int, cross_lifetime: Optional[int]=None):
with self.lock:
rssi_samples = self._get_rssi_samples(node_ref)
rssi_samples.append(RssiSample(cross_ts, cross_rssi))
self._truncate_samples(rssi_samples)
if cross_lifetime is not None:
lifetime_samples = self._get_lifetime_samples(node_ref)
lifetime_samples.append(LifetimeSample(cross_ts, cross_lifetime))
self._truncate_samples(lifetime_samples)
def on_exit_triggered(self, node_ref, cross_ts: int , cross_rssi: int, cross_lifetime: Optional[int]=None):
with self.lock:
rssi_samples = self._get_rssi_samples(node_ref)
rssi_samples.append(RssiSample(cross_ts, cross_rssi))
self._truncate_samples(rssi_samples)
if cross_lifetime is not None:
lifetime_samples = self._get_lifetime_samples(node_ref)
# store nadir lifetimes as negatives
lifetime_samples.append(LifetimeSample(cross_ts, -cross_lifetime))
self._truncate_samples(lifetime_samples)
def on_pass(self, node_ref, lap_ts: int, lap_source, pass_rssi: int):
if lap_source == BaseHardwareInterface.LAP_SOURCE_REALTIME:
with self.lock:
rssi_samples = self._get_rssi_samples(node_ref)
rssi_samples.append(RssiSample(lap_ts, pass_rssi))
self._truncate_samples(rssi_samples)
def on_lifetime_sample(self, node_ref, ts: int, lifetime: int):
with self.lock:
# lifetimes are negatives for nadirs
lifetime_samples = self._get_lifetime_samples(node_ref)
lifetime_samples.append(LifetimeSample(ts, lifetime))
self._truncate_samples(lifetime_samples)
def on_extremum_history(self, node_ref, extremum_timestamp: int, extremum_rssi: int, extremum_duration: int):
with self.lock:
rssi_samples = self._get_rssi_samples(node_ref)
rssi_samples.append(RssiSample(extremum_timestamp, extremum_rssi))
rssi_samples.append(RssiSample(extremum_timestamp + extremum_duration, extremum_rssi))
self._truncate_samples(rssi_samples)
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,054
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/tests/test_sensors.py
|
import unittest
from rh.sensors import Sensors, I2CSensor
import sys
try:
from smbus2 import SMBus
except:
import fake_rpi
sys.modules['smbus2'] = fake_rpi.smbus
from rh.helpers.i2c_helper import I2CBus
import tests as tests_pkg
class SensorsTest(unittest.TestCase):
def setUp(self):
self.i2c_bus = I2CBus(1)
self.sensors = Sensors()
def tearDown(self):
pass
def test_update(self):
self.sensors.discover(tests_pkg)
self.assertEqual(len(self.sensors), 1)
before = self.sensors[0].getReadings()
self.sensors.update_environmental_data()
self.sensors.update_environmental_data()
after = self.sensors[0].getReadings()
self.assertEqual(after['counter']['value'], before['counter']['value']+1)
def test_i2c_sensor(self):
sensor = I2CSensor('i2c test', 8, self.i2c_bus)
self.assertEqual(sensor.url, 'i2c:1/0x08')
self.assertEqual(sensor.name, 'i2c test')
self.assertEqual(sensor.i2c_address, 8)
self.assertEqual(sensor.i2c_bus.id, 1)
if __name__ == '__main__':
unittest.main()
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,055
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/tests/test_chorus.py
|
import unittest
from rh.interface.ChorusInterface import ChorusInterface
from rh.interface.MockInterface import MockInterface
from rh.interface import RssiSample
from rh.apis.chorus_api import ChorusAPI
from . import stub_sensor
import gevent
class ChorusTest(unittest.TestCase):
class DummySerial:
def __init__(self, handler):
self.port = 'COM'
self.handler = handler
self.buffer = []
def write(self, raw_data):
msgs = bytes.decode(raw_data)[:-1]
for msg in msgs.split('\n'):
response = self.handler(msg)
self.buffer.append(response)
def read_until(self):
while not self.buffer:
gevent.sleep(0.1)
data = self.buffer.pop(0)
return data
def test(self):
mock_intf = MockInterface()
started = False
race_stopped = False
laps = 0
def on_start():
nonlocal started
started = True
def on_stop_race():
nonlocal race_stopped
race_stopped = True
def on_reset_race():
pass
def on_pass(node, lap_ts, source, rssi):
nonlocal laps
laps += 1
api = ChorusAPI(None, mock_intf, stub_sensor.discover(), on_start, on_stop_race, on_reset_race)
api_io = ChorusTest.DummySerial(lambda data : api._process_message(data))
intf = ChorusInterface(api_io)
api.serial_io = ChorusTest.DummySerial(lambda data : intf._process_message(intf.node_managers[0], data))
intf.listener.on_pass = on_pass
self.assertTrue(started)
for sensor in intf.sensors:
sensor.update()
intf._update()
self.assertGreater(len(sensor.getReadings()), 0)
intf.set_frequency(2, 5885)
self.assertEqual(mock_intf.nodes[2].frequency, 5885)
intf.set_enter_at_level(4, 33)
self.assertEqual(mock_intf.nodes[4].enter_at_level, 33)
self.assertEqual(mock_intf.nodes[4].exit_at_level, 33)
intf.set_exit_at_level(5, 34)
self.assertEqual(mock_intf.nodes[4].enter_at_level, 33)
self.assertEqual(mock_intf.nodes[4].exit_at_level, 33)
self.assertEqual(mock_intf.nodes[5].enter_at_level, 34)
self.assertEqual(mock_intf.nodes[5].exit_at_level, 34)
api.emit_pass_record(mock_intf.nodes[0], 1, 98)
gevent.sleep(0)
self.assertEqual(laps, 1)
intf.start()
sample_0 = RssiSample(0, 66) # timestamp value is unused
sample_3 = RssiSample(0, 67) # timestamp value is unused
mock_intf.nodes[0].current_rssi = sample_0
mock_intf.nodes[3].current_rssi = sample_3
api.emit_rssi('*')
gevent.sleep(0.5)
intf.stop()
self.assertEqual(intf.nodes[0].current_rssi.rssi, sample_0.rssi)
self.assertEqual(intf.nodes[3].current_rssi.rssi, sample_3.rssi)
if __name__ == '__main__':
unittest.main()
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,056
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/tests/test_leds.py
|
import unittest
from rh.leds import led_handler_strip, led_handler_bitmap, led_handler_character, led_handler_graph
from rh.app.RHRace import RHRace
from rh.interface.MockInterface import MockInterface
class MockPixel:
def __init__(self, count):
self.pixels = [0 for _i in range(count)]
self.frames = []
def begin(self):
pass
def numPixels(self):
return len(self.pixels)
def setPixelColor(self, i, color):
self.pixels[i] = color
def getPixelColor(self, i):
return self.pixels[i]
def show(self):
self.frames.append(self.pixels.copy())
class MockManager:
def getDisplayColor(self, n, from_result=False):
return 1
class LedsTest(unittest.TestCase):
def test_strip(self):
self.run_effects(led_handler_strip)
def test_bitmap(self):
self.run_effects(led_handler_bitmap)
def test_character(self):
self.run_effects(led_handler_character)
def test_graph(self):
self.run_effects(led_handler_graph)
def run_effects(self, module):
strip = MockPixel(36)
race = RHRace()
race.result_fn = lambda current_race: {'by_race_time': [{'starts':1, 'node':0, 'position':1}]}
test_args = {
'RACE': race,
'iterations': 3,
'time': 0,
'lap': {
'lap_number': 5,
'lap_time': 45000,
'lap_time_formatted': '45s'
},
'node_index': 0,
'hide_stage_timer': True,
'message': 'Test',
'strip': strip,
'manager': MockManager(),
'INTERFACE': MockInterface()
}
config = {'LED_ROWS': 6, 'PANEL_ROTATE': False, 'INVERTED_PANEL_ROWS': False}
effects = module.discover(config)
for effect in effects:
args = {}
args.update(effect['defaultArgs'])
args.update(test_args)
strip.frames = []
effect['handlerFn'](args)
self.assertGreater(len(strip.frames), 0, effect)
if __name__ == '__main__':
unittest.main()
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,057
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/tests/stub_sensor.py
|
from rh.sensors import Sensor, Reading
class StubSensor(Sensor):
def __init__(self):
Sensor.__init__(self, 'test:/test', 'TestSensor')
self.description = 'Sensor for testing'
self.value = 0
@Reading(units='')
def counter(self):
return self.value
@Reading(units='V')
def voltage(self):
return 12.8
def update(self):
self.value += 1
def discover(*args, **kwargs):
return [StubSensor()]
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,058
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/tests/test_rhinterface.py
|
import sys
import logging
import unittest
from rh.interface.RHInterface import RHInterface
from rh.app.config import Config
import gevent
import platform
import subprocess
logger = logging.getLogger()
logger.level = logging.DEBUG
logger.addHandler(logging.StreamHandler(sys.stdout))
@unittest.skipUnless(platform.system() == 'Linux', 'Test is only supported on Linux currently')
class RHInterfaceTest(unittest.TestCase):
def test_node(self):
subprocess.run("./scripts/build_ci.sh 1", cwd='node', shell=True)
self.node_proc = subprocess.Popen(["node/build_ci/rhnode1", "adcClock=COUNTER", "127.0.0.1:7881"])
try:
laps = 0
def on_pass(node, lap_ts, source, rssi):
nonlocal laps
laps += 1
config = Config()
config.SERIAL_PORTS = []
config.SOCKET_PORTS = [7881]
intf = RHInterface(config=config, warn_loop_time=66000)
try:
intf.listener.on_pass = on_pass
self.assertEqual(len(intf.nodes), 1)
for i in range(len(intf.nodes)):
self.assertEqual(intf.nodes[i].index, i)
self.assertEqual(intf.nodes[i].multi_node_index, i)
self.assertEqual(intf.nodes[i].multi_node_slot_index, i)
self.check_settings(intf)
intf.start()
# test laps
gevent.sleep(10)
self.assertGreater(laps, 0)
# test scan
node = intf.nodes[0]
intf.set_frequency_scan(0, True)
self.assertEqual(node.scan_enabled, True)
gevent.sleep(10)
self.assertGreater(len(node.scan_data), 0)
intf.set_frequency_scan(0, False)
self.assertEqual(node.scan_enabled, False)
self.assertEqual(len(node.scan_data), 0)
intf.send_shutdown_started_message()
finally:
intf.stop()
intf.close()
finally:
self.node_proc.terminate()
self.node_proc.wait(timeout=30)
self.gcov('test_rhnode1')
def test_multinode(self):
subprocess.run("./scripts/build_ci.sh 4", cwd='node', shell=True)
self.node_proc = subprocess.Popen(["node/build_ci/rhnode4", "adcClock=COUNTER", "127.0.0.1:7884"])
try:
laps = 0
def on_pass(node, lap_ts, source, rssi):
nonlocal laps
laps += 1
config = Config()
config.SERIAL_PORTS = []
config.SOCKET_PORTS = [7884]
intf = RHInterface(config=config, warn_loop_time=66000)
try:
intf.listener.on_pass = on_pass
self.assertEqual(len(intf.nodes), 4)
for i in range(len(intf.nodes)):
self.assertEqual(intf.nodes[i].index, i)
self.assertEqual(intf.nodes[i].multi_node_index, i)
self.assertEqual(intf.nodes[i].multi_node_slot_index, i)
self.check_settings(intf)
intf.start()
gevent.sleep(10)
self.assertGreater(laps, 0)
intf.send_shutdown_started_message()
finally:
intf.stop()
intf.close()
finally:
self.node_proc.terminate()
self.node_proc.wait(timeout=30)
self.gcov('test_rhnode4')
def test_no_nodes(self):
subprocess.run("./scripts/build_ci.sh 0", cwd='node', shell=True)
self.node_proc = subprocess.Popen(["node/build_ci/rhnode0", "adcClock=COUNTER", "127.0.0.1:7880"])
try:
config = Config()
config.SERIAL_PORTS = []
config.SOCKET_PORTS = [7880]
intf = RHInterface(config=config, warn_loop_time=66000)
try:
self.assertEqual(len(intf.nodes), 0)
intf.start()
gevent.sleep(1)
intf.send_shutdown_started_message()
finally:
intf.stop()
intf.close()
finally:
self.node_proc.terminate()
self.node_proc.wait(timeout=30)
self.gcov('test_rhnode0')
def check_settings(self, intf):
for i in range(len(intf.nodes)):
intf.set_frequency(i, 5885)
self.assertEqual(intf.nodes[i].frequency, 5885)
intf.set_enter_at_level(i, 23)
self.assertEqual(intf.nodes[i].enter_at_level, 23)
intf.set_exit_at_level(i, 24)
self.assertEqual(intf.nodes[i].exit_at_level, 24)
def gcov(self, testname):
subprocess.run("gcov -b -c *.cpp", cwd='node', shell=True)
gcov_dst_path = '../test-reports/' + testname
subprocess.run("mkdir -p {0}; mv *.gcov {0}; rm *.gcda; rm *.gcno".format(gcov_dst_path), cwd='node', shell=True)
if __name__ == '__main__':
unittest.main()
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,059
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/rh/interface/nodes/i2c_node.py
|
'''RotorHazard I2C interface layer.'''
import logging
from smbus2 import SMBus
from .. import RHInterface as rhi
logger = logging.getLogger(__name__)
class I2CNodeManager(rhi.RHNodeManager):
def __init__(self, i2c_addr, i2c_bus):
super().__init__()
self.i2c_addr = i2c_addr
self.i2c_bus = i2c_bus
self.addr = self.i2c_bus.url_of(self.i2c_addr)
def _read_command(self, command, size):
def _read():
with SMBus(self.i2c_bus.id) as bus:
return bus.read_i2c_block_data(self.i2c_addr, command, size + 1)
return self.i2c_bus.with_i2c(_read)
def _write_command(self, command, data):
def _write():
with SMBus(self.i2c_bus.id) as bus:
bus.write_i2c_block_data(self.i2c_addr, command, data)
self.i2c_bus.with_i2c(_write)
def discover(idxOffset, i2c_helper, i2c_addrs=[8, 10, 12, 14, 16, 18, 20, 22], *args, **kwargs):
logger.info("Searching for I2C nodes...")
node_managers = []
# Scans provided i2c_addrs to populate nodes array
next_index = idxOffset
for i2c_bus in i2c_helper:
logger.info("...scanning I2C bus {}...".format(i2c_bus.id))
for i2c_addr in i2c_addrs:
node_manager = I2CNodeManager(i2c_addr, i2c_bus)
try:
node_addr = node_manager.read_address()
if node_addr == i2c_addr:
if node_manager.discover_nodes(next_index):
logger.info('...{} I2C node(s) with API level {} found at address {}'.format(len(node_manager.nodes), node_manager.api_level, i2c_addr))
next_index += len(node_manager.nodes)
node_managers.append(node_manager)
elif node_addr:
logger.error("Reported address {} does not match actual address {}".format(node_addr, i2c_addr))
except IOError:
logger.info("...No I2C nodes at address {}".format(i2c_addr))
if len(node_managers) == 0:
break # if first I2C node not found then stop trying
return node_managers
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,060
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/rh/endpoints/rssi_endpoints.py
|
from flask.blueprints import Blueprint
from rh.util import RHTimeFns
EPOCH_SYNC = RHTimeFns.MonotonicEpochSync()
def createBlueprint(sample_listener):
APP = Blueprint('rssi', __name__)
@APP.route('/rssi')
def rssi_data():
rssis_by_node = sample_listener.get_rssis()
lifetimes_by_node = sample_listener.get_lifetimes()
nodes = set()
nodes.update(rssis_by_node)
nodes.update(lifetimes_by_node)
payload = {}
for node in sorted(nodes):
node_samples = {}
rssi_samples = rssis_by_node.get(node)
if rssi_samples:
node_samples['rssi'] = [{'t': EPOCH_SYNC.monotonic_to_epoch_millis(s[0]), 'y': s[1]} for s in rssi_samples]
lifetime_samples = lifetimes_by_node.get(node)
if lifetime_samples:
node_samples['lifetime'] = [{'t': EPOCH_SYNC.monotonic_to_epoch_millis(s[0]), 'y': s[1]} for s in lifetime_samples]
payload[str(node)] = node_samples
return payload
return APP
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,061
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/rh/sensors/psutil_sensor.py
|
# coding=UTF-8
import psutil
from . import Sensor, Reading
def psutil_sensor_url(unit_name, sub_label):
return "psutil:{}/{}".format(unit_name, sub_label)
def psutil_sensor_name(unit_name, sub_label):
return "{} ({})".format(unit_name, sub_label) if sub_label else unit_name
class PsUtilSensor(Sensor):
def __init__(self, name, unit_name, sub_label):
super().__init__(url=psutil_sensor_url(unit_name, sub_label), name=name)
self.unit_name = unit_name
self.sub_label = sub_label
self.update()
class TemperatureSensor(PsUtilSensor):
def __init__(self, name, unit_name, sub_label):
super().__init__(name=name, unit_name=unit_name, sub_label=sub_label)
self.description = 'Temperature'
def update(self):
temps = psutil.sensors_temperatures()
if self.unit_name in temps:
self._temp = next(filter(lambda s: s.label==self.sub_label, temps[self.unit_name]), None).current
@Reading(units='°C')
def temperature(self):
return self._temp
class FanSensor(PsUtilSensor):
def __init__(self, name, unit_name, sub_label):
super().__init__(name=name, unit_name=unit_name, sub_label=sub_label)
self.description = 'Fan'
def update(self):
fans = psutil.sensors_fans()
if self.unit_name in fans:
self._rpm = next(filter(lambda s: s.label==self.sub_label, fans[self.unit_name]), None).current
@Reading(units='rpm')
def speed(self):
return self._rpm
class BatterySensor(PsUtilSensor):
def __init__(self, name, unit_name, sub_label):
super().__init__(name=name, unit_name=unit_name, sub_label=sub_label)
self.description = 'Battery'
def update(self):
batt = psutil.sensors_battery()
self._capacity = batt.percent
@Reading(units='%')
def capacity(self):
return self._capacity
def discover(config, *args, **kwargs):
sensors = []
if hasattr(psutil, 'sensors_battery'):
unit_name = 'battery'
sub_label = ''
url = psutil_sensor_url(unit_name, sub_label)
sensor_config = config.get(url, {})
if sensor_config.get('enabled', True) and psutil.sensors_battery():
name = sensor_config.get('name', 'Battery')
sensors.append(BatterySensor(name, unit_name, sub_label))
if hasattr(psutil, 'sensors_temperatures'):
temps = psutil.sensors_temperatures()
for unit_name, sub_sensors in temps.items():
for sub_sensor in sub_sensors:
sub_label = sub_sensor.label
url = psutil_sensor_url(unit_name, sub_label)
sensor_config = config.get(url, {})
if sensor_config.get('enabled', True):
name = sensor_config.get('name', psutil_sensor_name(unit_name, sub_label))
sensors.append(TemperatureSensor(name, unit_name, sub_label))
if hasattr(psutil, 'sensors_fans'):
fans = psutil.sensors_fans()
for unit_name, sub_sensors in fans.items():
for sub_sensor in sub_sensors:
sub_label = sub_sensor.label
url = psutil_sensor_url(unit_name, sub_label)
sensor_config = config.get(url, {})
if sensor_config.get('enabled', True):
name = sensor_config.get('name', psutil_sensor_name(unit_name, sub_label))
sensors.append(FanSensor(name, unit_name, sub_label))
return sensors
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,062
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/rh/events/mqtt_event_manager.py
|
import logging
from .eventmanager import Evt
from rh.helpers.mqtt_helper import make_topic
import json
logger = logging.getLogger(__name__)
class MqttEventManager:
def __init__(self, eventmanager, data, race, config, mqttClient):
self.Events = eventmanager
self.RHData = data
self.RACE = race
self.config = config
self.client = mqttClient
def install_default_messages(self):
if self.client:
self.addEvent(Evt.RACE_START, race_start)
self.addEvent(Evt.RACE_LAP_RECORDED, race_lap)
self.addEvent(Evt.RACE_SPLIT_RECORDED, race_split)
self.addEvent(Evt.RACE_FINISH, race_finish)
self.addEvent(Evt.RACE_STOP, race_stop)
self.addEvent(Evt.SENSOR_UPDATE, sensor_update)
def addEvent(self, event, msgFunc):
self.Events.on(event, 'MQTT', self.create_handler(msgFunc))
def create_handler(self, func):
def _handler(args):
args['client'] = self.client
args['race_topic'] = self.config['RACE_ANN_TOPIC']
args['sensor_topic'] = self.config['SENSOR_ANN_TOPIC']
args['raceEvent'] = self.RHData.get_option('eventName', '')
args['RHData'] = self.RHData
args['RACE'] = self.RACE
func(**args)
return _handler
def race_start(client, race_topic, raceEvent, RACE, **kwargs):
msg = {'startTime': RACE.start_time_epoch_ms}
client.publish(make_topic(race_topic, [raceEvent, str(RACE.current_stage), str(RACE.current_heat), str(RACE.current_round)]), json.dumps(msg))
def race_lap(client, race_topic, raceEvent, RACE, node_index, lap, location_id, **kwargs):
pilot = RACE.node_pilots[node_index]
msg = {'lap': lap['lap_number'], 'timestamp': lap['lap_time_stamp'], 'location': location_id, 'seat': node_index}
client.publish(make_topic(race_topic, [raceEvent, str(RACE.current_stage), str(RACE.current_heat), str(RACE.current_round), pilot.callsign]), json.dumps(msg))
def race_split(client, race_topic, raceEvent, RACE, node_index, split, location_id, **kwargs):
pilot = RACE.node_pilots[node_index]
msg = {'lap': split['lap_number'], 'timestamp': split['split_time_stamp'], 'location': location_id, 'seat': node_index}
client.publish(make_topic(race_topic, [raceEvent, str(RACE.current_stage), str(RACE.current_heat), str(RACE.current_round), pilot.callsign]), json.dumps(msg))
def race_finish(client, race_topic, raceEvent, RACE, **kwargs):
msg = {'finishTime': RACE.finish_time_epoch_ms}
client.publish(make_topic(race_topic, [raceEvent, str(RACE.current_stage), str(RACE.current_heat), str(RACE.current_round)]), json.dumps(msg))
def race_stop(client, race_topic, raceEvent, RACE, **kwargs):
msg = {'stopTime': RACE.end_time_epoch_ms}
client.publish(make_topic(race_topic, [raceEvent, str(RACE.current_stage), str(RACE.current_heat), str(RACE.current_round)]), json.dumps(msg))
def sensor_update(client, sensor_topic, sensors, **kwargs):
for sensor in sensors:
for name, readings in sensor.items():
for reading, value in readings.items():
msg = str(value['value'])
if 'units' in value:
msg += ' ' + value['units']
client.publish(make_topic(sensor_topic, [name, reading]), msg)
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,063
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/rh/events/audio_event_manager.py
|
import copy
import subprocess
import logging
from rh.events.eventmanager import Evt
from rh.app.RHRace import RaceMode, StagingTones, StartBehavior
from rh.util import RHUtils, secs_to_millis
logger = logging.getLogger(__name__)
class AudioEventManager:
def __init__(self, eventmanager, data, race, config):
self.Events = eventmanager
self.RHData = data
self.RACE = race
self.config = config
self.proc = None
def install_default_effects(self):
if 'PLAYER' in self.config:
self.addEvent(Evt.RACE_STAGE, play_stage_beep)
self.addEvent(Evt.RACE_START_COUNTDOWN, play_start_countdown_beeps)
self.addEvent(Evt.RACE_START, play_start_beep)
self.addEvent(Evt.RACE_FIRST_PASS, play_first_pass_beep)
self.addEvent(Evt.CROSSING_ENTER, play_crossing_enter_beep)
self.addEvent(Evt.CROSSING_EXIT, play_crossing_exit_beep)
if 'TTS' in self.config:
self.addEvent(Evt.RACE_START_COUNTDOWN, say_start_countdown)
self.addEvent(Evt.RACE_TICK, say_race_times)
self.addEvent(Evt.RACE_LAP_RECORDED, say_lap_time)
self.addEvent(Evt.RACE_FINISH, say_race_complete)
def addEvent(self, event, effectFunc):
self.Events.on(event, 'Audio', self.create_handler(effectFunc))
def create_handler(self, func):
def _handler(args):
args['RHData'] = self.RHData
args['RACE'] = self.RACE
args['play'] = self.play
args['say'] = self.say
func(**args)
return _handler
def play(self, audio_file):
if self.proc:
self.proc.wait()
if self.config['PLAYER']:
args = copy.copy(self.config['PLAYER'])
args.append(audio_file)
self.proc = subprocess.Popen(args)
def say(self, text):
if self.proc:
self.proc.wait()
if self.config['TTS']:
args = copy.copy(self.config['TTS'])
args.append(text)
self.proc = subprocess.Popen(args)
def play_stage_beep(RACE, play, **kwargs):
if (RACE.format.staging_tones == StagingTones.TONES_ONE):
play('server/static/audio/stage.wav')
def play_start_countdown_beeps(time_remaining, countdown_time, RACE, play, **kwargs):
if (RACE.format.staging_tones == StagingTones.TONES_3_2_1 and time_remaining <= 3) \
or (RACE.format.staging_tones == StagingTones.TONES_ALL):
play('server/static/audio/stage.wav')
def say_start_countdown(time_remaining, countdown_time, RACE, say, **kwargs):
if time_remaining == 30 or time_remaining == 20 or time_remaining == 10:
say("Starting in {} seconds".format(time_remaining))
def play_start_beep(play, **kwargs):
play('server/static/audio/buzzer.wav')
def play_first_pass_beep(play, **kwargs):
play('server/static/audio/beep.wav')
def play_crossing_enter_beep(play, **kwargs):
play('server/static/audio/enter.wav')
def play_crossing_exit_beep(play, **kwargs):
play('server/static/audio/exit.wav')
def say_race_times(timer_sec, RACE, say, **kwargs):
race_format = RACE.format
if race_format.race_mode == RaceMode.FIXED_TIME:
remaining = race_format.race_time_sec - timer_sec
if remaining == 60:
say("60 seconds")
elif remaining == 30:
say("30 seconds")
elif remaining == 10:
say("10 seconds")
elif remaining == 0 and race_format.lap_grace_sec:
say("Pilots, finish your lap");
def say_lap_time(node_index, lap, RHData, RACE, say, **kwargs):
lap_num = lap['lap_number']
race_format = RACE.format
if lap_num > 0 or race_format.start_behavior == StartBehavior.FIRST_LAP:
pilot = RACE.node_pilots[node_index]
phonetic_time = RHUtils.phonetictime_format(lap['lap_time'], RHData.get_option('timeFormatPhonetic'))
lap_time_stamp = lap['lap_time_stamp']
msg = "{}".format(pilot.phonetic if pilot.phonetic else pilot.callsign)
if race_format.lap_grace_sec and lap_time_stamp > secs_to_millis(race_format.race_time_sec) and lap_time_stamp <= secs_to_millis(race_format.race_time_sec + race_format.lap_grace_sec):
msg += " done"
msg += ", lap {}, {}".format(lap_num, phonetic_time)
say(msg)
def say_race_complete(say, **kwargs):
say("The race has finished")
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,064
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/rh/interface/nodes/serial_node.py
|
'''RotorHazard serial interface layer.'''
import logging
import serial # For serial comms
import gevent
import time
from .. import pack_8
from rh.helpers import serial_url
from .. import RHInterface as rhi
BOOTLOADER_CHILL_TIME = 2 # Delay for USB to switch from bootloader to serial mode
SERIAL_BAUD_RATES = [921600, 500000, 115200]
DEF_S32BPILL_SERIAL_PORT = "/dev/serial0"
logger = logging.getLogger(__name__)
class SerialNodeManager(rhi.RHNodeManager):
def __init__(self, serial_obj):
super().__init__()
self.serial_io = serial_obj
self.addr = serial_url(self.serial_io.port)
def _read_command(self, command, size):
self.serial_io.reset_input_buffer()
self.serial_io.write(bytearray([command]))
return bytearray(self.serial_io.read(size + 1))
def _write_command(self, command, data):
data_with_cmd = bytearray()
data_with_cmd.append(command)
data_with_cmd.extend(data)
self.serial_io.write(data_with_cmd)
def close(self):
self.serial_io.close()
def jump_to_bootloader(self):
try:
if self.api_level >= 32:
logger.info('Sending JUMP_TO_BOOTLOADER message to serial node {0}'.format(self))
self.write_command(rhi.JUMP_TO_BOOTLOADER, pack_8(0))
self.serial_io.reset_input_buffer()
time.sleep(0.1)
self.serial_io.reset_input_buffer()
self.serial_io.reset_output_buffer()
self.serial_io.close()
except Exception as ex:
logger.error('Error sending JUMP_TO_BOOTLOADER message to serial node {0}: {1}'.format(self, ex))
def discover(idxOffset, config, isS32BPillFlag=False, *args, **kwargs):
node_managers = []
config_ser_ports = getattr(config, 'SERIAL_PORTS', [])
if isS32BPillFlag and len(config_ser_ports) == 0:
config_ser_ports.append(DEF_S32BPILL_SERIAL_PORT)
logger.debug("Using default serial port ('{}') for S32_BPill board".format(DEF_S32BPILL_SERIAL_PORT))
if config_ser_ports:
next_index = idxOffset
for comm in config_ser_ports:
for baudrate in SERIAL_BAUD_RATES:
logger.info("Trying {} with baud rate {}".format(comm, baudrate))
serial_obj = serial.Serial(port=None, baudrate=baudrate, timeout=0.25)
serial_obj.setDTR(0) # clear in case line is tied to node-processor reset
serial_obj.setRTS(0)
serial_obj.setPort(comm)
serial_obj.open() # open port (now that DTR is configured for no change)
gevent.sleep(BOOTLOADER_CHILL_TIME) # delay needed for Arduino USB
node_manager = SerialNodeManager(serial_obj)
if node_manager.discover_nodes(next_index):
logger.info('{} node(s) with API level {} found at baudrate {}'.format(len(node_manager.nodes), node_manager.api_level, baudrate))
next_index += len(node_manager.nodes)
node_managers.append(node_manager)
break
else:
serial_obj.close()
return node_managers
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,065
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/rh/tools/__init__.py
|
from rh.app.config import Config
from rh.interface import RHInterface, MockInterface
def get_interface(port):
if port == 'MOCK':
INTERFACE = MockInterface.get_hardware_interface()
elif port.startswith('COM') or port.startswith('/dev/'):
config = Config()
config.SERIAL_PORTS = [port]
INTERFACE = RHInterface.get_hardware_interface(config=config)
elif port.startswith('i2c:'):
from rh.helpers.i2c_helper import I2CBus
from rh.helpers import parse_i2c_url
bus_addr = parse_i2c_url(port)
params = {}
params['idxOffset'] = 0
params['i2c_helper'] = [I2CBus(bus_addr[0])]
params['i2c_addrs'] = [bus_addr[1]]
INTERFACE = RHInterface.get_hardware_interface(**params)
elif port.startswith(':'):
config = Config()
config.SOCKET_PORTS = [int(port[1:])]
INTERFACE = RHInterface.get_hardware_interface(config=config)
else:
print("Invalid port: {}".format(port))
exit(1)
print("Nodes detected: {}".format(len(INTERFACE.nodes)))
return INTERFACE
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,066
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/rh/orgs/ifpv_org.py
|
import re as regex
import requests
import json
from rh.util.RHUtils import FREQS
TIMEOUT = 5
IFPV_BANDS = {
'rb': 'R',
'fs': 'F'
}
class Ifpv:
def is_pilot_url(self, url):
matches = regex.match('https://league.ifpv.co.uk/pilots/([0-9]+)', url)
if matches:
return matches.group(1)
else:
return None
def get_pilot_data(self, url, pilot_id):
resp = requests.get(url, timeout=TIMEOUT)
pilot_data = {}
name_match = regex.search("<div class=\"row vertical-center\">\s+<div class=\"col-md-3\">\s+<h1>(.*)(?=<)</h1>\s+<p>(.*)(?=<)</p>", resp.text)
if name_match:
pilot_data['callsign'] = name_match.group(1)
pilot_data['name'] = name_match.group(2)
logo_match = regex.search('https://league.ifpv.co.uk/storage/images/pilots/[0-9]+\.(jpg|png|gif)', resp.text)
if logo_match:
pilot_data['logo'] = logo_match.group(0)
return pilot_data
def is_event_url(self, url):
matches = regex.match('https://league.ifpv.co.uk/events/([0-9]+)/data', url)
if matches:
return matches.group(1)
else:
return None
def get_event_data(self, url, event_id):
resp = requests.get(url, timeout=TIMEOUT)
ifpv_data = resp.json()
event_data = self.convert_ifpv_json(ifpv_data)
return event_data
def convert_ifpv_freq(self, ifpv_bc):
groups = regex.search("([a-z]+)([0-9]+)", ifpv_bc)
b = IFPV_BANDS[groups.group(1)]
c = int(groups.group(2))
f = FREQS[b+str(c)]
return b, c, f
def convert_ifpv_json(self, ifpv_data):
event_name = ifpv_data['event']['name']
event_date = ifpv_data['event']['date']
num_heats = ifpv_data['event']['heats']
race_class_name = 'BDRA Open'
race_format_name = 'BDRA Qualifying'
freqs = json.loads(ifpv_data['event']['frequencies'])
rhfreqs = [self.convert_ifpv_freq(f) for f in freqs]
seats = [
{'frequency': f,
'bandChannel': b+str(c)
} for b,c,f in rhfreqs
]
pilots = {
pilot['callsign']: {'name': pilot['name'], 'url': pilot['pilot_url'], 'ifpvId': pilot['id']}
for pilot in ifpv_data['pilots']
}
heats = [None] * num_heats
for pilot in ifpv_data['pilots']:
heat = pilot['heat']-1
seat = pilot['car']-1
if heats[heat] is None:
heats[heat] = {'name': 'Heat '+str(heat+1),
'class': race_class_name,
'seats': [None] * len(seats)}
heats[heat]['seats'][seat] = pilot['callsign']
event_data = {
'name': event_name,
'date': event_date,
'classes': {race_class_name: {'format': race_format_name}},
'seats': seats,
'pilots': pilots,
'stages': [
{'name': 'Qualifying',
'heats': heats}
]
}
return event_data
def discover(*args, **kwargs):
return [Ifpv()]
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,067
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/rh/interface/laprf_protocol.py
|
'''LapRF hardware interface helpers.'''
from struct import pack_into, unpack_from
from enum import IntEnum
from typing import Optional, Union, List
from time import perf_counter_ns
import logging
logger = logging.getLogger(__name__)
SOR = 0x5a
EOR = 0x5b
ESC = 0x5c
ESC_OFFSET = 0x40
MAX_RECORD_LEN = 1024
MAX_SLOTS = 8
MAX_CHANNELS = 8
MAX_THRESHOLD = 3000
MAX_GAIN = 63
LIVE_TIME_BANDS = ['F','R', 'E', 'B', 'A', 'L']
class RecordType(IntEnum):
RSSI = 0xda01
RF_SETUP = 0xda02
STATE_CONTROL = 0xda04
SETTINGS = 0xda07
DESCRIPTOR = 0xda08
PASSING = 0xda09
STATUS = 0xda0a
TIME = 0xda0c
ERROR = 0xffff
class RFSetupField(IntEnum):
SLOT_INDEX = 0x01
ENABLED = 0x20
CHANNEL = 0x21
BAND = 0x22
THRESHOLD = 0x23
GAIN = 0x24
FREQUENCY = 0x25
class RssiField(IntEnum):
SLOT_INDEX = 0x01 # uint8
SAMPLE_COUNT = 0x07 # uint32
MIN_RSSI = 0x20 # f32
MAX_RSSI = 0x21 # f32
MEAN_RSSI = 0x22 # f32
UNKNOWN_1 = 0x23
CUSTOM_RATE = 0x24
PACKET_RATE = 0x25
UNKNOWN_2 = 0x26
class PassingField(IntEnum):
SLOT_INDEX = 0x01
RTC_TIME = 0x02
DECODER_ID = 0x20
PASSING_NUMBER = 0x21
PEAK_HEIGHT = 0x22
FLAGS = 0x23
class SettingsField(IntEnum):
STATUS_INTERVAL = 0x22
SAVE_SETTINGS = 0x25
MIN_LAP_TIME = 0x26
class StateControlField(IntEnum):
GATE_STATE = 0x20
class StatusField(IntEnum):
SLOT_INDEX = 0x01
FLAGS = 0x03
BATTERY_VOLTAGE = 0x21
LAST_RSSI = 0x22
GATE_STATE = 0x23
DETECTION_COUNT = 0x24
class TimeField(IntEnum):
RTC_TIME = 0x02
TIME_RTC_TIME = 0x20
class LapRFEvent():
def __init__(self: "LapRFEvent", rec_type: str):
self.timestamp: int = round(perf_counter_ns()/1000000)
self.rec_type = rec_type
class RFSetupEvent(LapRFEvent):
"A LapRF receiver radio frequency setup event"
def __init__(self: "RFSetupEvent"):
super().__init__("slot_config")
self.slot_index: Optional[int] = None
self.enabled: Optional[bool] = None
self.band: Optional[int] = None
self.channel: Optional[int] = None
self.frequency: Optional[int] = None
self.threshold: Optional[float] = None
self.gain: Optional[int] = None
def is_valid(self: "RFSetupEvent") -> bool:
return (isinstance(self.slot_index, int) and
isinstance(self.enabled, bool) and
isinstance(self.band, int) and
isinstance(self.channel, int) and
isinstance(self.frequency, int) and
isinstance(self.threshold, float) and
isinstance(self.gain, int))
class PassingEvent(LapRFEvent):
"A LapRF passing event"
def __init__(self: "PassingEvent"):
super().__init__("passing")
self.slot_index: Optional[int] = None
self.rtc_time: Optional[int] = None
self.decoder_id: Optional[int] = None
self.passing_number: Optional[int] = None
self.peak_height: Optional[int] = None
self.flags: Optional[int] = None
def is_valid(self: "PassingEvent") -> bool:
return (isinstance(self.slot_index, int) and
isinstance(self.rtc_time, int) and
isinstance(self.decoder_id, int) and
isinstance(self.passing_number, int) and
isinstance(self.peak_height, int) and
isinstance(self.flags, int))
# Haven't encounter this type of record.
class RSSIEvent(LapRFEvent):
"A LapRF RSSI event"
def __init__(self: "RSSIEvent"):
super().__init__("rssi")
self.slot_index: Optional[int] = None
self.min_rssi: Optional[float] = None
self.max_rssi: Optional[float] = None
self.mean_rssi: Optional[float] = None
def is_valid(self: "RSSIEvent") -> bool:
return (isinstance(self.slot_index, int) and
isinstance(self.min_rssi, float) and
isinstance(self.max_rssi, float) and
isinstance(self.mean_rssi, float))
class SettingsEvent(LapRFEvent):
"A LapRF settings event"
def __init__(self: "SettingsEvent"):
super().__init__("settings")
self.status_interval: Optional[int] = None
self.min_lap_time: Optional[int] = None
def is_valid(self: "SettingsEvent") -> bool:
if self.status_interval and not isinstance(self.status_interval, int):
return False
if self.min_lap_time and not isinstance(self.min_lap_time, int):
return False
return True
class StatusEvent(LapRFEvent):
"A LapRF status event"
def __init__(self: "StatusEvent"):
super().__init__("status")
self.battery_voltage: Optional[int] = None
self.gate_state: Optional[int] = None
self.detection_count: Optional[int] = None
self.flags: Optional[int] = None
self.last_rssi: List[Optional[float]] = [None] * MAX_SLOTS
def is_valid(self: "StatusEvent") -> bool:
for slot in self.last_rssi:
if not isinstance(slot, float):
return False
return (isinstance(self.battery_voltage, int) and
isinstance(self.gate_state, int) and
isinstance(self.detection_count, int) and
isinstance(self.flags, int))
class TimeEvent(LapRFEvent):
"A LapRF time event"
def __init__(self: "TimeEvent"):
super().__init__("time")
self.rtc_time: Optional[int] = None
self.time_rtc_time: Optional[int] = None
def is_valid(self: "TimeEvent") -> bool:
return (isinstance(self.rtc_time, int) and
isinstance(self.time_rtc_time, int))
Event = Union[RFSetupEvent, PassingEvent, SettingsEvent, StatusEvent, TimeEvent]
class InvalidRecordError(Exception):
"""Exception raised for an invalid LapRF record.
"""
def __init__(self, message="Invalid LapRF record"):
self.message = message
super().__init__(self.message)
class CrcMismatchError(Exception):
"""Exception raised for a CRC mismatch.
"""
def __init__(self, message="LapRF record CRC mismatch"):
self.message = message
super().__init__(self.message)
class ByteSizeError(Exception):
"""Exception raised for a byte size mismatch.
"""
def __init__(self, expected: int, received: int):
self.message = f"Byte size mismatch expected: {expected}, received: {received}"
super().__init__(self.message)
def _escape_record(input_data: bytearray) -> bytes:
"""Escape a LapRF record.
"""
output = []
byte: int
length = len(input_data)
last_index = length - 1
for offset in range(length):
byte = input_data[offset]
if (byte == ESC or byte == SOR or byte == EOR) and offset != 0 and offset != last_index:
output.append(ESC)
output.append(byte + ESC_OFFSET)
else:
output.append(byte)
return bytes(output)
def _unescape_record(input_data: bytes) -> bytearray:
"""Unescape a LapRF record.
"""
output = []
byte: int
escaped = False
for offset in range(len(input_data)):
byte = input_data[offset]
if escaped:
escaped = False
output.append(byte - ESC_OFFSET)
else:
if byte == EOR:
output.append(byte)
return bytearray(output)
elif byte == ESC:
escaped = True
else:
output.append(byte)
raise InvalidRecordError("Record unescape failed")
def _split_records(input_data: bytes) -> List[bytearray]:
"""Split a LapRF packet into individual unescaped records.
"""
output = []
pos = 0
while True:
sor = input_data.find(SOR, pos)
if (sor > -1):
pos = input_data.find(EOR, sor)
if (pos > -1):
output.append(_unescape_record(input_data[sor:pos+1]))
else:
break
else:
break
return output
def _gen_crc_16_table():
table = []
remainder = 0
for x in range(256):
remainder = (x << 8) & 0xff00
for _ in range(8):
if remainder & 0x8000 == 0x8000:
remainder = ((remainder << 1) & 0xffff) ^ 0x8005
else:
remainder = (remainder << 1) & 0xffff
table.append(remainder)
return table
def _reflect(input_data: int, nbits: int) -> int:
shift = input_data
output = 0
for x in range(nbits):
if (shift & 0x01) == 0x01:
output |= 1 << (nbits - 1 - x)
shift >>= 1
return output
def _compute(buffer: bytes) -> int:
remainder = 0
for offset in range(len(buffer)):
a = _reflect(buffer[offset], 8)
a &= 0xff
b = (remainder >> 8) & 0xff
c = (remainder << 8) & 0xffff
data = a ^ b
remainder = crc_16_table[data] ^ c
return _reflect(remainder, 16)
def _verify_crc(buffer: bytes) -> None:
"""Verify a LapRF record by performing a cyclic redundancy check (CRC).
WARNING: The `buffer` is modified in order to verify the CRC. Its required to remove
the CRC field from the record, because it was not in the record when it was generated.
Args:
buffer: A LapRF record to verify.
Returns:
None
Raises:
CrcError: An error of a CRC mismatch.
"""
crc_record, = unpack_from("<H", buffer, 3)
buffer_no_crc = bytearray(buffer)
pack_into("<H", buffer_no_crc, 3, 0)
crc_computed = _compute(buffer_no_crc)
if (crc_record != crc_computed):
raise CrcMismatchError()
crc_16_table = _gen_crc_16_table()
class Encoder:
"""A LapRF record decoder.
Attributes:
pos: An integer cursor postion in the buffer.
type: An interger representing the type of record.
"""
def __init__(self, rec_type: int):
self.pos = 7
self.rec_type = rec_type
self._buffer = bytearray(MAX_RECORD_LEN)
# Start LapRF Record
pack_into(
"<BHHH", # 7 bytes
self._buffer,
0,
SOR,
0, # byte length
0, # CRC
rec_type
)
def finish(self: "Encoder") -> bytes:
"""Finish LapRF Record
Returns:
The complete and escaped LapRF record.
"""
pack_into("<B", self._buffer, self.pos, EOR)
self._advance(1)
record = self._buffer[0:self.pos]
pack_into("<H", record, 1, self.pos)
pack_into("<H", record, 3, _compute(record))
escaped = _escape_record(record)
return escaped
def write_u8(self: "Encoder", value: int) -> "Encoder":
"""Write a single byte to the internal buffer
"""
if value < 0 or value > 255:
raise Exception('Invalid argument, value must be a 8 bit unsigned integer')
pack_into("<B", self._buffer, self.pos, value)
self._advance(1)
return self
def encode_u8_field(self: "Encoder", signature: int, value: int) -> "Encoder":
"""Encode an unsigned 8 bit integer field.
"""
if value < 0 or value > 255:
raise Exception('Invalid argument, value must be a 8 bit unsigned integer')
pack_into("<BBB", self._buffer, self.pos, signature, 1, value)
self._advance(3) # u8, u8, u8
return self
def encode_u16_field(self: "Encoder", signature: int, value: int) -> "Encoder":
"""Encode an unsigned 16 bit integer field.
"""
if value < 0 or value > 65_535:
raise Exception('Invalid argument, value must be a 16 bit unsigned integer')
pack_into("<BBH", self._buffer, self.pos, signature, 2, value)
self._advance(4) # u8, u8, u16
return self
def encode_u32_field(self: "Encoder", signature: int, value: int) -> "Encoder":
"""Encode an unsigned 32 bit integer field.
"""
if value < 0 or value > 4_294_967_295:
raise Exception('Invalid argument, value must be a 32 bit unsigned integer')
pack_into("<BBI", self._buffer, self.pos, signature, 4, value)
self._advance(6) # u8, u8, u32
return self
def encode_u64_field(self: "Encoder", signature: int, value: int) -> "Encoder":
"""Encode an unsigned 64 bit integer field.
"""
pack_into("<BBQ", self._buffer, self.pos, signature, 8, value)
self._advance(10) # u8, u8, u64
return self
def encode_f32_field(self: "Encoder", signature: int, value: float) -> "Encoder":
"""Encode a 32 bit float field.
"""
pack_into("<BBf", self._buffer, self.pos, signature, 4, value)
self._advance(6) # u8, u8, f32
return self
def encode_f64_field(self: "Encoder", signature: int, value: float) -> "Encoder":
"""Encode a 64 bit float field.
"""
pack_into("<BBd", self._buffer, self.pos, signature, 8, value)
self._advance(10) # u8, u8, f64
return self
def _advance(self: "Encoder", byte_length: int):
self.pos += byte_length
class Decoder:
"""A LapRF record decoder.
Attributes:
pos: An integer cursor postion in the buffer.
type: An interger representing the type of record.
length: An integer byte length of the record.
"""
def __init__(self, buffer: bytes):
length, _, rec_type = unpack_from("<HHH", buffer, 1)
if len(buffer) != length:
raise InvalidRecordError(f"Invalid record length of {len(buffer)}, expected {length}")
_verify_crc(buffer)
self.pos = 7
self.rec_type: int = rec_type
self.length: int = length
self._buffer = buffer
def decode_field_signature(self: "Decoder") -> int:
"""Decode record field signature.
"""
signature, = unpack_from("<B", self._buffer, self.pos)
self.pos += 1
return signature
def decode_u8_field(self: "Decoder") -> int:
"""Decode an unsigned byte field.
"""
size, data = unpack_from("<BB", self._buffer, self.pos)
self._advance(1, size)
return data
def decode_u16_field(self: "Decoder") -> int:
"""Decode an unsigned 16 bit integer field.
"""
size, data = unpack_from("<BH", self._buffer, self.pos)
self._advance(2, size)
return data
def decode_u32_field(self: "Decoder") -> int:
"""Decode an unsigned 32 bit integer field.
"""
size, data = unpack_from("<BI", self._buffer, self.pos)
self._advance(4, size)
return data
def decode_u64_field(self: "Decoder") -> int:
"""Decode an unsigned 64 bit integer field.
"""
size, data = unpack_from("<BQ", self._buffer, self.pos)
self._advance(8, size)
return data
def decode_f32_field(self: "Decoder") -> float:
"""Decode a 32 bit float field.
"""
size, data = unpack_from("<Bf", self._buffer, self.pos)
self._advance(4, size)
return data
def decode_f64_field(self: "Decoder") -> float:
"""Decode a 64 bit float field.
"""
size, data = unpack_from("<Bd", self._buffer, self.pos)
self._advance(8, size)
return data
def skip_unknown_field(self: "Decoder", signature: int) -> None:
"""Skip a LapRF record field
"""
t = hex(self.rec_type)
s = hex(signature)
logger.warning(f"Unknown field signature {s} found in record type record type {t}")
size, = unpack_from("<B", self._buffer, self.pos)
self.pos += size + 1 # Also skip over the size byte that was read above.
def _advance(self: "Decoder", expected: int, received: int) -> None:
if expected == received:
self.pos += expected + 1 # Also skip over the size byte that was read above.
else:
raise ByteSizeError(expected, received)
def _decode_rf_setup_record(record: Decoder) -> RFSetupEvent:
event = RFSetupEvent()
while record.pos < record.length:
signature = record.decode_field_signature()
if signature == EOR:
break
elif signature == RFSetupField.SLOT_INDEX:
event.slot_index = record.decode_u8_field()
elif signature == RFSetupField.ENABLED:
event.enabled = True if record.decode_u16_field() == 1 else False
elif signature == RFSetupField.BAND:
event.band = record.decode_u16_field()
elif signature == RFSetupField.CHANNEL:
event.channel = record.decode_u16_field()
elif signature == RFSetupField.BAND:
event.band = record.decode_u16_field()
elif signature == RFSetupField.FREQUENCY:
event.frequency = record.decode_u16_field()
elif signature == RFSetupField.THRESHOLD:
event.threshold = record.decode_f32_field()
elif signature == RFSetupField.GAIN:
event.gain = record.decode_u16_field()
else:
record.skip_unknown_field(signature)
return event
# def _decode_rssi_record(record: Decoder) -> RSSIEvent:
def _decode_settings_record(record: Decoder) -> SettingsEvent:
event = SettingsEvent()
while record.pos < record.length:
signature = record.decode_field_signature()
if signature == EOR:
break
elif signature == SettingsField.STATUS_INTERVAL:
event.status_interval = record.decode_u16_field()
elif signature == SettingsField.SAVE_SETTINGS:
record.decode_u8_field() # Discard, should only be used on a request.
elif signature == SettingsField.MIN_LAP_TIME:
event.min_lap_time = record.decode_u32_field()
else:
record.skip_unknown_field(signature)
return event
def _decode_passing_record(record: Decoder) -> PassingEvent:
event = PassingEvent()
while record.pos < record.length:
signature = record.decode_field_signature()
if signature == EOR:
break
elif signature == PassingField.SLOT_INDEX:
event.slot_index = record.decode_u8_field()
elif signature == PassingField.RTC_TIME:
event.rtc_time = record.decode_u64_field()
elif signature == PassingField.DECODER_ID:
event.decoder_id = record.decode_u32_field()
elif signature == PassingField.PASSING_NUMBER:
event.passing_number = record.decode_u32_field()
elif signature == PassingField.PEAK_HEIGHT:
event.peak_height = record.decode_u16_field()
elif signature == PassingField.FLAGS:
event.flags = record.decode_u16_field()
else:
record.skip_unknown_field(signature)
return event
def _decode_status_record(record: Decoder) -> StatusEvent:
slot_index: Optional[int] = None
event = StatusEvent()
while record.pos < record.length:
signature = record.decode_field_signature()
if signature == EOR:
break
elif signature == StatusField.SLOT_INDEX:
slot_index = record.decode_u8_field()
elif signature == StatusField.FLAGS:
event.flags = record.decode_u16_field()
elif signature == StatusField.BATTERY_VOLTAGE:
event.battery_voltage = record.decode_u16_field()
elif signature == StatusField.LAST_RSSI:
if slot_index and slot_index > 0:
slot_index = slot_index - 1 # convert to 1-based index
if (slot_index < MAX_SLOTS):
event.last_rssi[slot_index] = record.decode_f32_field()
slot_index = None # reset for next loop
elif signature == StatusField.GATE_STATE:
event.gate_state = record.decode_u8_field()
elif signature == StatusField.DETECTION_COUNT:
event.detection_count = record.decode_u32_field()
else:
record.skip_unknown_field(signature)
return event
def _decode_time_record(record: Decoder) -> TimeEvent:
event = TimeEvent()
while record.pos < record.length:
signature = record.decode_field_signature()
if signature == EOR:
break
elif signature == TimeField.RTC_TIME:
event.rtc_time = record.decode_u64_field()
elif signature == TimeField.TIME_RTC_TIME:
# Need to research difference from rtc_time.
event.time_rtc_time = record.decode_u64_field()
else:
record.skip_unknown_field(signature)
return event
def _decode_record(buffer: bytes):
record = Decoder(buffer)
if record.rec_type == RecordType.RF_SETUP:
return _decode_rf_setup_record(record)
elif record.rec_type == RecordType.RSSI:
# _decode_rssi_record(record)
pass
elif record.rec_type == RecordType.PASSING:
return _decode_passing_record(record)
elif record.rec_type == RecordType.SETTINGS:
return _decode_settings_record(record)
elif record.rec_type == RecordType.STATUS:
return _decode_status_record(record)
elif record.rec_type == RecordType.TIME:
return _decode_time_record(record)
# elif record.rec_type == RecordType.Descriptor:
# Record Type: 0xda08, Unknown Signature: 0x20, Size: 4
# Record Type: 0xda08, Unknown Signature: 0x21, Size: 1
else:
logger.warning("Unrecognised record type: {:#04x}".format(record.rec_type))
# Module Public Functions
def decode(packet: bytes):
"""Deserialize a LapRF packet.
"""
records: List[Event] = []
buffers = _split_records(packet)
for buffer in buffers:
try:
record = _decode_record(buffer)
if record:
records.append(record)
except:
# TODO - Log errors
pass
return records
def encode_get_rtc_time_record() -> bytes:
"""Encode a LapRF RF record to request the RTC time.
"""
# Requesting the RTC time requires an irregular packet.
return (Encoder(RecordType.TIME)
.write_u8(TimeField.RTC_TIME)
.write_u8(0x00)
.finish())
def encode_get_min_lap_time_record() -> bytes:
"""Encode a LapRF RF record to get the minimum lap time setting.
"""
return (Encoder(RecordType.SETTINGS)
.encode_u32_field(SettingsField.MIN_LAP_TIME, 0x00)
.finish())
def encode_set_min_lap_time_record(milliseconds: int) -> bytes:
"""Encode a LapRF RF record to set the minimum lap time setting.
"""
if not milliseconds:
raise ValueError("Minimum lap-time must be greater than zero")
return (Encoder(RecordType.SETTINGS)
.encode_u32_field(SettingsField.MIN_LAP_TIME, milliseconds)
.finish())
def encode_set_status_interval_record(milliseconds: int) -> bytes:
"""Encode a LapRF RF record to set the status interval setting.
"""
if not milliseconds:
raise ValueError("Status interval must be greater than zero")
return (Encoder(RecordType.SETTINGS)
.encode_u16_field(SettingsField.STATUS_INTERVAL, milliseconds)
.finish())
def encode_get_rf_setup_record(slot_index: Optional[int] = None):
"""Encode a LapRF RF record to request a receiver configuration.
Request either a single slot, or all if no slot_index is provided.
"""
record = Encoder(RecordType.RF_SETUP)
if slot_index and slot_index >= 1 and slot_index <= MAX_SLOTS:
record.encode_u8_field(RFSetupField.SLOT_INDEX, slot_index)
else:
for index in range(1, MAX_SLOTS+1):
record.encode_u8_field(RFSetupField.SLOT_INDEX, index)
return record.finish()
def encode_set_rf_setup_record(slot_index: int, enabled: bool, band: int, channel: int, frequency: int, gain: int, threshold: float) -> bytes:
"""Encode a LapRF RF record to configure a receiver slot
*NOTE* slot_index, band, and channel all use 1-based indexing
Attributes:
slot_index: integer - The slot index to configure.
band: integer - Radio band. Band order = FREBAL
channel: integer - Radio channel.
frequency: integer - Radio frequency.
gain: integer - The receiver gain.
threshold: float - The passing threshold.
enabled: boolean
"""
return (Encoder(RecordType.RF_SETUP)
.encode_u8_field(RFSetupField.SLOT_INDEX, slot_index)
.encode_u16_field(RFSetupField.ENABLED, 1 if enabled else 0)
.encode_u16_field(RFSetupField.CHANNEL, channel)
.encode_u16_field(RFSetupField.BAND, band)
.encode_f32_field(RFSetupField.THRESHOLD, threshold)
.encode_u16_field(RFSetupField.GAIN, gain)
.encode_u16_field(RFSetupField.FREQUENCY, frequency)
.finish())
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,068
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/rh/data_export/data_export_csv.py
|
'''CSV data exporter'''
import logging
logger = logging.getLogger(__name__)
from rh.util import RHUtils
import io
import csv
from . import DataExporter
def write_csv(data):
output = io.StringIO()
writer = csv.writer(output, quoting=csv.QUOTE_NONNUMERIC)
writer.writerows(data)
return {
'data': output.getvalue(),
'encoding': 'text/csv',
'ext': 'csv'
}
def assemble_all(RHData, PageCache, Language):
payload = {}
payload['Pilots'] = assemble_pilots(RHData, PageCache, Language)
payload['Heats'] = assemble_heats(RHData, PageCache, Language)
payload['Classes'] = assemble_classes(RHData, PageCache, Language)
payload['Formats'] = assemble_formats(RHData, PageCache, Language)
payload['Results'] = assemble_results(RHData, PageCache, Language)
output = []
for datatype in payload:
output.append([datatype])
for data in payload[datatype]:
output.append(data)
output.append('')
return output
def assemble_pilots(RHData, PageCache, Language):
payload = [[Language.__('Callsign'), Language.__('Name'), Language.__('Team')]]
pilots = RHData.get_pilots()
for pilot in pilots:
payload.append([pilot.callsign, pilot.name, pilot.team])
return payload
def assemble_heats(RHData, PageCache, Language):
payload = [[Language.__('Name'), Language.__('Class'), Language.__('Pilots')]]
for heat in RHData.get_heats():
note = heat.note
if heat.class_id != RHUtils.CLASS_ID_NONE:
race_class = RHData.get_raceClass(heat.class_id).name
else:
race_class = None
row = [note, race_class]
heatnodes = RHData.get_heatNodes_by_heat(heat.id)
for heatnode in heatnodes:
if heatnode.pilot_id != RHUtils.PILOT_ID_NONE:
row.append(RHData.get_pilot(heatnode.pilot_id).callsign)
else:
row.append('-')
payload.append(row)
return payload
def assemble_classes(RHData, PageCache, Language):
race_classes = RHData.get_raceClasses()
payload = [[Language.__('Name'), Language.__('Description'), Language.__('Race Format')]]
for race_class in race_classes:
# expand format id to name
race_format = RHData.get_raceFormat(race_class.format_id)
if race_format:
format_string = race_format.name
else:
format_string = '-'
payload.append([race_class.name, race_class.description, format_string])
return payload
def assemble_formats(RHData, PageCache, Language):
timer_modes = [
Language.__('Fixed Time'),
Language.__('No Time Limit'),
]
tones = [
Language.__('None'),
Language.__('One'),
Language.__('Each Second')
]
win_conditions = [
Language.__('None'),
Language.__('Most Laps in Fastest Time'),
Language.__('First to X Laps'),
Language.__('Fastest Lap'),
Language.__('Fastest 3 Consecutive Laps'),
Language.__('Most Laps Only'),
Language.__('Most Laps Only with Overtime')
]
start_behaviors = [
Language.__('Hole Shot'),
Language.__('First Lap'),
Language.__('Staggered Start'),
]
formats = RHData.get_raceFormats()
payload = [[
Language.__('Name'),
Language.__('Race Clock Mode'),
Language.__('Timer Duration (seconds)'),
Language.__('Minimum Start Delay'),
Language.__('Maximum Start Delay'),
Language.__('Staging Tones'),
Language.__('First Crossing'),
Language.__('Win Condition'),
Language.__('Number of Laps to Win'),
Language.__('Team Racing Mode'),
]]
for race_format in formats:
payload.append([race_format.name,
timer_modes[race_format.race_mode],
race_format.race_time_sec,
race_format.start_delay_min,
race_format.start_delay_max,
tones[race_format.staging_tones],
start_behaviors[race_format.start_behavior],
win_conditions[race_format.win_condition],
race_format.number_laps_win,
race_format.team_racing_mode,
])
return payload
def build_leaderboard(leaderboard, Language, **kwargs):
meta = leaderboard['meta']
if 'primary_leaderboard' in kwargs and kwargs['primary_leaderboard'] in leaderboard:
primary_leaderboard = leaderboard[kwargs['primary_leaderboard']]
else:
primary_leaderboard = leaderboard[meta['primary_leaderboard']]
if meta['start_behavior'] == 2:
total_label = Language.__('Laps Total');
total_source = 'total_time_laps'
else:
total_label = Language.__('Total');
total_source = 'total_time'
output = [[
Language.__('Seat'),
Language.__('Rank'),
Language.__('Pilot'),
Language.__('Laps'),
Language.__(total_label),
Language.__('Avg.'),
Language.__('Fastest'),
Language.__('3 Consecutive'),
Language.__('Team'),
]]
for entry in primary_leaderboard:
output.append([
entry['node'],
entry['position'],
entry['callsign'],
entry['laps'],
entry[total_source],
entry['average_lap'],
entry['fastest_lap'],
entry['consecutives'],
entry['team_name'],
])
return output
def assemble_results(RHData, PageCache, Language):
results = PageCache.get_cache()
payload = []
payload.append([Language.__('Event Leaderboards') + ': ' + Language.__('Race Totals')])
for row in build_leaderboard(results['event_leaderboard'], Language, primary_leaderboard='by_race_time'):
payload.append(row[1:])
payload.append([''])
payload.append([Language.__('Event Leaderboards') + ': ' + Language.__('Fastest Laps')])
for row in build_leaderboard(results['event_leaderboard'], Language, primary_leaderboard='by_fastest_lap'):
payload.append(row[1:])
payload.append([''])
payload.append([Language.__('Event Leaderboards') + ': ' + Language.__('Fastest 3 Consecutive Laps')])
for row in build_leaderboard(results['event_leaderboard'], Language, primary_leaderboard='by_consecutives'):
payload.append(row[1:])
payload.append([''])
payload.append([Language.__('Class Leaderboards')])
# move unclassified heats to end
all_classes = sorted(list(results['heats_by_class'].keys()))
all_classes.append(all_classes.pop(all_classes.index(0)))
for class_id in all_classes:
valid_heats = False;
if len(results['heats_by_class'][class_id]):
for heat in results['heats_by_class'].keys():
if heat in results['heats']:
valid_heats = True
break
if valid_heats:
if class_id in results['classes']:
race_class = results['classes'][class_id]
else:
race_class = False
payload.append([])
if race_class:
payload.append([Language.__('Class') + ': ' + race_class['name']])
payload.append([])
payload.append([Language.__('Class Summary')])
for row in build_leaderboard(race_class['leaderboard'], Language):
payload.append(row[1:])
else:
if len(results['classes']):
payload.append([Language.__('Unclassified')])
else:
payload.append([Language.__('Heats')])
for heat_id in results['heats_by_class'][class_id]:
if heat_id in results['heats']:
heat = results['heats'][heat_id]
payload.append([])
if heat['note']:
payload.append([Language.__('Heat') + ': ' + heat['note']])
else:
payload.append([Language.__('Heat') + ' ' + str(heat_id)])
if len(heat['rounds']) > 1:
payload.append([])
payload.append([Language.__('Heat Summary')])
for row in build_leaderboard(heat['leaderboard'], Language):
payload.append(row[1:])
for heat_round in heat['rounds']:
payload.append([])
payload.append([Language.__('Round {0}').format(heat_round['id'])])
laptimes = []
for row in build_leaderboard(heat_round['leaderboard'], Language):
for node in heat_round['nodes']:
if row[0] == node['node_index']:
laplist = []
laplist.append(node['callsign'])
for lap in node['laps']:
if not lap['deleted']:
laplist.append(lap['lap_time_formatted'])
laptimes.append(laplist)
payload.append(row[1:])
payload.append([])
payload.append([Language.__('Round {0} Times').format(str(heat_round['id']))])
for row in laptimes:
payload.append(row)
return payload
def discover(*args, **kwargs):
# returns array of exporters with default arguments
return [
DataExporter(
'csv_pilots',
'CSV (Friendly) / Pilots',
write_csv,
assemble_pilots
),
DataExporter(
'csv_heats',
'CSV (Friendly) / Heats',
write_csv,
assemble_heats
),
DataExporter(
'csv_classes',
'CSV (Friendly) / Classes',
write_csv,
assemble_classes
),
DataExporter(
'csv_formats',
'CSV (Friendly) / Formats',
write_csv,
assemble_formats
),
DataExporter(
'csv_results',
'CSV (Friendly) / Results',
write_csv,
assemble_results
),
DataExporter(
'csv_all',
'CSV (Friendly) / All',
write_csv,
assemble_all
)
]
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,069
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/rh/interface/BaseHardwareInterface.py
|
import os
import gevent
import inspect
import logging
from collections import UserList
import rh.util.persistent_homology as ph
from rh.util.RHUtils import FREQUENCY_ID_NONE
from rh.util import ms_counter
from .Node import Node, NodeManager
from rh.sensors import Sensor
from . import RssiSample, LifetimeSample
import bisect
from typing import Any, Dict, List, Tuple, Optional
ENTER_AT_PEAK_MARGIN = 5 # closest that captured enter-at level can be to node peak RSSI
CAP_ENTER_EXIT_AT_MS = 3000 # number of milliseconds for capture of enter/exit-at levels
logger = logging.getLogger(__name__)
class BaseHardwareInterfaceListener:
def on_rssi_sample(self, node, ts: int, rssi: int):
pass
def on_lifetime_sample(self, node, ts: int, lifetime: int):
pass
def on_enter_triggered(self, node, cross_ts: int, cross_rssi: int, cross_lifetime: Optional[int]=None):
pass
def on_exit_triggered(self, node, cross_ts: int , cross_rssi: int, cross_lifetime: Optional[int]=None):
pass
def on_pass(self, node, lap_ts: int, lap_source, pass_rssi: int):
pass
def on_extremum_history(self, node, extremum_timestamp: int, extremum_rssi: int, extremum_duration: int):
pass
def on_frequency_changed(self, node, frequency: int, band: Optional[str]=None, channel: Optional[int]=None):
pass
def on_enter_trigger_changed(self, node, level: int):
pass
def on_exit_trigger_changed(self, node, level: int):
pass
class BaseHardwareInterfaceEventBroadcaster(UserList,BaseHardwareInterfaceListener):
pass
def _broadcast_wrap(attr):
def _broadcast(self: BaseHardwareInterfaceEventBroadcaster, *args):
for l in self.data:
getattr(l, attr)(*args)
return _broadcast
for attr, value in inspect.getmembers(BaseHardwareInterfaceListener, callable):
if attr.startswith('on_'):
setattr(BaseHardwareInterfaceEventBroadcaster, attr, _broadcast_wrap(attr))
class BaseHardwareInterface:
LAP_SOURCE_REALTIME = 0
LAP_SOURCE_MANUAL = 1
LAP_SOURCE_RECALC = 2
RACE_STATUS_READY = 0
RACE_STATUS_RACING = 1
RACE_STATUS_DONE = 2
def __init__(self, listener=None, update_sleep=0.1):
self.node_managers: List[NodeManager] = []
self.nodes: List[Node] = []
self.sensors: List[Sensor] = []
# Main update loop delay
self.update_sleep = float(os.environ.get('RH_UPDATE_INTERVAL', update_sleep))
self.update_thread = None # Thread for running the main update loop
self.environmental_data_update_tracker = 0
self.race_start_time_ms: int = 0
self.is_racing = False
self.listener = listener if listener is not None else BaseHardwareInterfaceListener()
self.pass_count_mask = 0xFF
self.intf_error_report_limit = 0.0 # log if ratio of comm errors is larger
def start(self):
if self.update_thread is None:
logger.info('Starting {} background thread'.format(type(self).__name__))
self.update_thread = gevent.spawn(self._update_loop)
def stop(self):
if self.update_thread:
logger.info('Stopping {} background thread'.format(type(self).__name__))
self.update_thread.kill(block=True, timeout=0.5)
self.update_thread = None
def close(self):
for node in self.nodes:
node.summary_stats()
for manager in self.node_managers:
manager.close()
def _notify_rssi_sample(self, node, ts: int, rssi: int):
self.listener.on_rssi_sample(node, ts, rssi)
def _notify_lifetime_sample(self, node, ts: int, lifetime: int):
self.append_lifetime_history(node, ts, lifetime)
self.listener.on_lifetime_sample(node, ts, lifetime)
def _notify_enter_triggered(self, node, trigger_ts: int, trigger_rssi: int, trigger_lifetime: int):
self.listener.on_enter_triggered(node, trigger_ts, trigger_rssi, trigger_lifetime)
def _notify_exit_triggered(self, node, trigger_ts: int, trigger_rssi: int, trigger_lifetime: int):
self.listener.on_exit_triggered(node, trigger_ts, trigger_rssi, trigger_lifetime)
def _notify_pass(self, node, lap_ts_ms: int, lap_source, pass_rssi: Optional[int]):
self.listener.on_pass(node, lap_ts_ms, lap_source, pass_rssi)
def _notify_extremum_history(self, node, extremum_timestamp, extremum_rssi, extremum_duration):
self.append_rssi_history(node, extremum_timestamp, extremum_rssi, extremum_duration)
self.listener.on_extremum_history(node, extremum_timestamp, extremum_rssi, extremum_duration)
def _notify_frequency_changed(self, node):
if node.bandChannel:
self.listener.on_frequency_changed(node, node.frequency, band=node.bandChannel[0], channel=int(node.bandChannel[1]))
else:
self.listener.on_frequency_changed(node, node.frequency)
def _notify_enter_trigger_changed(self, node):
self.listener.on_enter_trigger_changed(node, node.enter_at_level)
def _notify_exit_trigger_changed(self, node):
self.listener.on_exit_trigger_changed(node, node.exit_at_level)
def _update_loop(self):
while True:
try:
self._update()
except KeyboardInterrupt:
logger.info("Update thread terminated by keyboard interrupt")
raise
except OSError:
raise
except SystemExit:
raise
except Exception:
logger.exception('Exception in {} _update_loop():'.format(type(self).__name__))
def lap_count_change(self, new_count, old_count):
delta = new_count - old_count
# handle unsigned roll-over
if self.pass_count_mask is not None:
delta = delta & self.pass_count_mask
return delta
def is_new_lap(self, node, timestamp: int, rssi: int, pass_count, is_crossing):
'''Parameter order must match order in packet'''
node.current_rssi = RssiSample(timestamp, rssi)
prev_pass_count = node.pass_count
if prev_pass_count is None:
# if None then initialize
node.pass_count = pass_count
node.lap_stats_count = pass_count
node.enter_count = pass_count
node.exit_count = pass_count
node.pending_enter_count = pass_count
node.pending_exit_count = pass_count
elif pass_count != prev_pass_count:
if pass_count > prev_pass_count:
if self.lap_count_change(pass_count, node.pass_count) > 1:
logger.warning("Missed pass on node {}!!! (count was {}, now is {})".format(node, node.pass_count, pass_count))
node.pending_enter_count = pass_count
node.pending_exit_count = pass_count
else:
logger.warning("Resyncing lap counter for node {}!!! (count was {}, now is {})".format(node, node.pass_count, pass_count))
node.pass_count = pass_count
node.lap_stats_count = pass_count
node.enter_count = pass_count
node.exit_count = pass_count
node.pending_enter_count = pass_count
node.pending_exit_count = pass_count
# if 'crossing' status changed
if is_crossing != node.is_crossing:
node.is_crossing = is_crossing
if pass_count == prev_pass_count:
if is_crossing:
node.pending_enter_count = pass_count + 1
else:
node.pending_exit_count = pass_count + 1
self._notify_rssi_sample(node, timestamp, rssi)
has_new_lap = node.lap_stats_count < pass_count
has_entered = node.enter_count < node.pending_enter_count
has_exited = node.exit_count < node.pending_exit_count
return has_new_lap, has_entered, has_exited
def process_enter_trigger(self, node, trigger_count, trigger_timestamp: int, trigger_rssi: int, trigger_lifetime: int):
'''Parameter order must match order in packet'''
logger.debug("ENTER: node={}, trigger_count={}, trigger_timestamp={}, trigger_rssi={}, trigger_lifetime={}".format(node, trigger_count, trigger_timestamp, trigger_rssi, trigger_lifetime))
if node.enter_count is not None and self.lap_count_change(trigger_count, node.enter_count) > 1:
logger.warning("Missed enter on node {}!!! (count was {}, now is {})".format(node, node.enter_count, trigger_count))
node.enter_count = trigger_count
# NB: crossing race times are relative to the race start time
crossing_race_time = trigger_timestamp - self.race_start_time_ms
if crossing_race_time < 0:
logger.warning("Node {}: Enter crossing before race start: {} < {}".format(node, trigger_timestamp, self.race_start_time_ms))
node.pass_crossing_flag = True # will be cleared when lap-pass is processed
node.enter_at_sample = RssiSample(crossing_race_time, trigger_rssi)
self._notify_enter_triggered(node, crossing_race_time, trigger_rssi, trigger_lifetime)
def process_exit_trigger(self, node, trigger_count, trigger_timestamp: int, trigger_rssi: int, trigger_lifetime: int):
'''Parameter order must match order in packet'''
logger.debug("EXIT: node={}, trigger_count={}, trigger_timestamp={}, trigger_rssi={}, trigger_lifetime={}".format(node, trigger_count, trigger_timestamp, trigger_rssi, trigger_lifetime))
if node.exit_count is not None and self.lap_count_change(trigger_count, node.exit_count) > 1:
logger.warning("Missed exit on node {}!!! (count was {}, now is {})".format(node, node.exit_count, trigger_count))
node.exit_count = trigger_count
# NB: crossing race times are relative to the race start time
crossing_race_time = trigger_timestamp - self.race_start_time_ms
if crossing_race_time < 0:
logger.warning("Node {}: Exit crossing before race start: {} < {}".format(node, trigger_timestamp, self.race_start_time_ms))
node.exit_at_sample = RssiSample(crossing_race_time, trigger_rssi)
self._notify_exit_triggered(node, crossing_race_time, trigger_rssi, trigger_lifetime)
def process_lap_stats(self, node, pass_count, pass_timestamp: int, pass_peak_rssi: int, pass_nadir_rssi: int):
'''Parameter order must match order in packet'''
logger.debug("PASS: node={}, pass_count={}, pass_timestamp={}, pass_peak_rssi={}, pass_nadir_rssi={}".format(node, pass_count, pass_timestamp, pass_peak_rssi, pass_nadir_rssi))
if self.lap_count_change(pass_count, node.pass_count) != 1:
logger.warning("Missed pass on node {}!!! (count was {}, now is {})".format(node, node.pass_count, pass_count))
node.pass_count = pass_count
node.lap_stats_count = pass_count
if pass_peak_rssi is not None:
node.pass_peak_rssi = pass_peak_rssi
if pass_nadir_rssi is not None:
node.pass_nadir_rssi = pass_nadir_rssi
if node.enter_count == node.pass_count and node.enter_at_sample is not None and node.enter_at_sample.timestamp > pass_timestamp:
logger.warning("Node {}: Enter timestamp {} is after pass timestamp {}!!! ".format(node, node.enter_at_sample.timestamp, pass_timestamp))
if node.exit_count == node.pass_count and node.exit_at_sample is not None and node.exit_at_sample.timestamp < pass_timestamp:
logger.warning("Node {}: Exit timestamp {} is before pass timestamp {}!!! ".format(node, node.exit_at_sample.timestamp, pass_timestamp))
node.is_crossing = False
node.enter_at_sample = None
node.exit_at_sample = None
# NB: lap race times are relative to the race start time
lap_race_time_ms = pass_timestamp - self.race_start_time_ms
if lap_race_time_ms < 0:
logger.warning("Node {}: Lap before race start: {} < {}".format(node, pass_timestamp, self.race_start_time_ms))
if self.is_racing and pass_peak_rssi:
node.pass_history.append(RssiSample(pass_timestamp, pass_peak_rssi))
self._notify_pass(node, lap_race_time_ms, BaseHardwareInterface.LAP_SOURCE_REALTIME, pass_peak_rssi)
def process_rssi_stats(self, node, peak_rssi: int, nadir_rssi: int):
'''Parameter order must match order in packet'''
if peak_rssi is not None:
node.node_peak_rssi = peak_rssi
if nadir_rssi is not None:
node.node_nadir_rssi = nadir_rssi
def process_analytics(self, node, timestamp: int, lifetime: int, loop_time: int, extremum_rssi: int, extremum_timestamp: int, extremum_duration: int):
'''Parameter order must match order in packet'''
node.current_lifetime = LifetimeSample(timestamp, lifetime)
self._notify_lifetime_sample(node, timestamp, lifetime)
node.loop_time = loop_time
if extremum_rssi is not None and extremum_timestamp is not None and extremum_duration is not None:
self._notify_extremum_history(node, extremum_timestamp, extremum_rssi, extremum_duration)
def append_rssi_history(self, node, timestamp: int, rssi: int, duration=0):
# append history data (except when race is over)
if self.is_racing:
node.history.append(timestamp, rssi)
if duration > 0:
node.history.append(timestamp + duration, rssi)
def append_lifetime_history(self, node, timestamp: int, lifetime: int):
if self.is_racing:
node.lifetime_history.append(timestamp, lifetime)
def process_capturing(self, node):
# check if capturing enter-at level for node
if node.cap_enter_at_flag:
node.cap_enter_at_total += node.current_rssi.rssi
node.cap_enter_at_count += 1
if ms_counter() >= node.cap_enter_at_end_ts_ms:
node.enter_at_level = int(round(node.cap_enter_at_total / node.cap_enter_at_count))
node.cap_enter_at_flag = False
# if too close node peak then set a bit below node-peak RSSI value:
if node.node_peak_rssi > 0 and node.node_peak_rssi - node.enter_at_level < ENTER_AT_PEAK_MARGIN:
node.enter_at_level = node.node_peak_rssi - ENTER_AT_PEAK_MARGIN
logger.info('Finished capture of enter-at level for node {0}, level={1}, count={2}'.format(node, node.enter_at_level, node.cap_enter_at_count))
self._notify_enter_trigger_changed(node)
# check if capturing exit-at level for node
if node.cap_exit_at_flag:
node.cap_exit_at_total += node.current_rssi.rssi
node.cap_exit_at_count += 1
if ms_counter() >= node.cap_exit_at_end_ts_ms:
node.exit_at_level = int(round(node.cap_exit_at_total / node.cap_exit_at_count))
node.cap_exit_at_flag = False
logger.info('Finished capture of exit-at level for node {0}, level={1}, count={2}'.format(node, node.exit_at_level, node.cap_exit_at_count))
self._notify_exit_trigger_changed(node)
def _restore_lowered_thresholds(self, node):
# check if node is set to temporary lower EnterAt/ExitAt values
if node.start_thresh_lower_flag and ms_counter() >= node.start_thresh_lower_time_ms:
logger.info("For node {0} restoring EnterAt to {1} and ExitAt to {2}"\
.format(node.index+1, node.enter_at_level, \
node.exit_at_level))
self.transmit_enter_at_level(node, node.enter_at_level)
self.transmit_exit_at_level(node, node.exit_at_level)
node.start_thresh_lower_flag = False
node.start_thresh_lower_time_ms = 0
def ai_calibrate_nodes(self):
for node in self.nodes:
history_times, history_values = node.history.get()
assert len(history_times) == len(history_values)
if node.ai_calibrate and node.first_cross_flag and history_values:
ccs = ph.calculatePeakPersistentHomology(history_values)
lo, hi = ph.findBreak(ccs)
diff = hi - lo
if diff > 1:
# cap changes to 50%
learning_rate = 0.5
new_enter_level = int((lo + diff/2 - node.enter_at_level)*learning_rate + node.enter_at_level)
# set exit a bit lower to register a pass sooner
new_exit_level = int((lo + diff/4 - node.exit_at_level)*learning_rate + node.exit_at_level)
logger.info('AI calibrating node {}: break {}-{}, adjusting ({}, {}) to ({}, {})'.format(node.index, lo, hi, node.enter_at_level, node.exit_at_level, new_enter_level, new_exit_level))
node.enter_at_level = new_enter_level
node.exit_at_level = new_exit_level
self._notify_enter_trigger_changed(node)
self._notify_exit_trigger_changed(node)
else:
logger.info('AI calibrating node {}: break {}-{} too narrow'.format(node.index, lo, hi))
def calibrate_nodes(self, start_time_ms: int, race_laps_history: Dict[int,Tuple[List[Dict[str,Any]],List[int],List[int]]]):
for node_idx, node_laps_history in race_laps_history.items():
node = self.nodes[node_idx]
node_laps, history_times, history_values = node_laps_history
assert len(history_times) == len(history_values)
if node.calibrate and history_values:
lap_ts_ms = [start_time_ms + lap['lap_time_stamp'] for lap in node_laps if not lap['deleted']]
if lap_ts_ms:
ccs = ph.calculatePeakPersistentHomology(history_values)
ccs.sort(key=lambda cc: history_times[cc.birth[0]])
birth_ts = [history_times[cc.birth[0]] for cc in ccs]
pass_idxs = []
for lap_timestamp in lap_ts_ms:
idx = bisect.bisect_left(birth_ts, lap_timestamp)
if idx == len(birth_ts):
pass_idxs.append(idx-1)
elif idx == 0 or birth_ts[idx] == lap_timestamp:
pass_idxs.append(idx)
elif ccs[idx].lifetime() > ccs[idx-1].lifetime():
pass_idxs.append(idx)
else:
pass_idxs.append(idx-1)
hi = min([ccs[j].lifetime() for j in pass_idxs])
lo = max([cc.lifetime() for cc in ccs if cc.lifetime() < hi]+[0])
diff = hi - lo
if diff > 1:
new_enter_level = lo + diff//2
new_exit_level = lo + diff//4
logger.info('Calibrating node {}: break {}-{}, adjusting ({}, {}) to ({}, {})'.format(node.index, lo, hi, node.enter_at_level, node.exit_at_level, new_enter_level, new_exit_level))
node.enter_at_level = new_enter_level
node.exit_at_level = new_exit_level
self._notify_enter_trigger_changed(node)
self._notify_exit_trigger_changed(node)
else:
logger.info('Calibrating node {}: break {}-{} too narrow'.format(node.index, lo, hi))
def transmit_frequency(self, node, frequency):
return frequency
def transmit_enter_at_level(self, node, level):
return level
def transmit_exit_at_level(self, node, level):
return level
#
# External functions for setting data
#
def simulate_lap(self, node_index):
node = self.nodes[node_index]
lap_race_time_ms = ms_counter() - self.race_start_time_ms # relative to start time
node.enter_at_sample = node.exit_at_sample = None
self._notify_pass(node, lap_race_time_ms, BaseHardwareInterface.LAP_SOURCE_MANUAL, None)
def force_end_crossing(self, node_index):
pass
def on_race_start(self, race_start_time_ms: int):
for node in self.nodes:
node.reset()
self.race_start_time_ms = race_start_time_ms
self.is_racing = True
def on_race_stop(self):
self.is_racing = False
for node in self.nodes:
node.history.merge(node.pass_history)
node.pass_history = []
gevent.spawn(self.ai_calibrate_nodes)
for node in self.nodes:
node.summary_stats()
def set_frequency(self, node_index, frequency, band=None, channel=None):
logger.debug("set_frequency: node {} frequency {} band {} channel {}".format(node_index, frequency, band, channel))
node = self.nodes[node_index]
old_frequency = node.frequency
old_bandChannel = node.bandChannel
if frequency != old_frequency:
disabled_freq = node.manager.get_disabled_frequency()
# if frequency == 0 (node disabled) then write frequency value to power down rx module, but save 0 value
_freq = frequency if frequency else disabled_freq
new_freq = self.transmit_frequency(node, _freq)
if frequency or new_freq != disabled_freq:
node.frequency = new_freq
else:
node.frequency = 0
# if node enabled and successfully changed frequency and have an associated band/channel
if frequency and new_freq == _freq and band and channel:
node.bandChannel = band + str(channel)
else:
node.bandChannel = None
else:
# just changing band/channel values
if band and channel:
node.bandChannel = band + str(channel)
if node.frequency != old_frequency or node.bandChannel != old_bandChannel:
self._notify_frequency_changed(node)
def set_enter_at_level(self, node_index, level):
node = self.nodes[node_index]
if node.is_valid_rssi(level):
old_level = node.enter_at_level
node.enter_at_level = self.transmit_enter_at_level(node, level)
if node.enter_at_level != old_level:
self._notify_enter_trigger_changed(node)
def set_exit_at_level(self, node_index, level):
node = self.nodes[node_index]
if node.is_valid_rssi(level):
old_level = node.exit_at_level
node.exit_at_level = self.transmit_exit_at_level(node, level)
if node.exit_at_level != old_level:
self._notify_exit_trigger_changed(node)
def start_capture_enter_at_level(self, node_index):
node = self.nodes[node_index]
if node.cap_enter_at_flag is False:
node.cap_enter_at_total = 0
node.cap_enter_at_count = 0
# set end time for capture of RSSI level:
node.cap_enter_at_end_ts_ms = ms_counter() + CAP_ENTER_EXIT_AT_MS
node.cap_enter_at_flag = True
return True
return False
def start_capture_exit_at_level(self, node_index):
node = self.nodes[node_index]
if node.cap_exit_at_flag is False:
node.cap_exit_at_total = 0
node.cap_exit_at_count = 0
# set end time for capture of RSSI level:
node.cap_exit_at_end_ts_ms = ms_counter() + CAP_ENTER_EXIT_AT_MS
node.cap_exit_at_flag = True
return True
return False
def get_node_frequencies(self):
return [node.frequency if not node.scan_enabled else FREQUENCY_ID_NONE for node in self.nodes]
#
# Get Json Node Data Functions
#
def get_heartbeat_json(self):
json = {
'current_rssi': [node.current_rssi.rssi if not node.scan_enabled else 0 for node in self.nodes],
'current_lifetime': [node.current_lifetime.lifetime if not node.scan_enabled else 0 for node in self.nodes],
'frequency': self.get_node_frequencies(),
'loop_time': [node.loop_time if not node.scan_enabled else 0 for node in self.nodes],
'crossing_flag': [node.is_crossing if not node.scan_enabled else False for node in self.nodes]
}
return json
def get_frequency_json(self, node_index):
node = self.nodes[node_index]
return {
'node': node.index,
'frequency': node.frequency
}
@property
def intf_read_command_count(self):
total = 0
for manager in self.node_managers:
total += manager.read_command_count
for node in self.nodes:
total += node.read_command_count
return total
@property
def intf_read_error_count(self):
total = 0
for manager in self.node_managers:
total += manager.read_error_count
for node in self.nodes:
total += node.read_error_count
return total
@property
def intf_write_command_count(self):
total = 0
for manager in self.node_managers:
total += manager.write_command_count
for node in self.nodes:
total += node.write_command_count
return total
@property
def intf_write_error_count(self):
total = 0
for manager in self.node_managers:
total += manager.write_error_count
for node in self.nodes:
total += node.write_error_count
return total
def get_intf_total_error_count(self):
return self.intf_read_error_count + self.intf_write_error_count
# log comm errors if error percentage is >= this value
def set_intf_error_report_percent_limit(self, percentVal):
self.intf_error_report_limit = percentVal / 100
def get_intf_error_report_str(self, forceFlag=False):
try:
if self.intf_read_command_count <= 0:
return None
r_err_ratio = float(self.intf_read_error_count) / float(self.intf_read_command_count) \
if self.intf_read_error_count > 0 else 0
w_err_ratio = float(self.intf_write_error_count) / float(self.intf_write_command_count) \
if self.intf_write_command_count > 0 and self.intf_write_error_count > 0 else 0
if forceFlag or r_err_ratio > self.intf_error_report_limit or \
w_err_ratio > self.intf_error_report_limit:
retStr = "CommErrors:"
if forceFlag or self.intf_write_error_count > 0:
retStr += "Write:{0}/{1}({2:.2%}),".format(self.intf_write_error_count,
self.intf_write_command_count, w_err_ratio)
retStr += "Read:{0}/{1}({2:.2%})".format(self.intf_read_error_count,
self.intf_read_command_count, r_err_ratio)
for node in self.nodes:
retStr += ", " + node.get_read_error_report_str()
return retStr
except Exception as ex:
logger.info("Error in 'get_intf_error_report_str()': " + str(ex))
return None
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,070
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/rh/interface/ChorusInterface.py
|
'''Chorus hardware interface layer.'''
import logging
import gevent
import serial
from .BaseHardwareInterface import BaseHardwareInterface
from .Node import Node, NodeManager
from rh.sensors import Sensor, Reading
from rh.util import ms_counter
from . import ExtremumFilter, ensure_iter, RssiSample
from rh.helpers import serial_url
RETRY_COUNT=5
logger = logging.getLogger(__name__)
class ChorusNodeManager(NodeManager):
TYPE = "Chorus"
def __init__(self, serial_io):
super().__init__()
self.serial_io = serial_io
self.max_rssi_value = 2700
self.addr = serial_url(self.serial_io.port)
self.voltage = None
def _create_node(self, index, multi_node_index):
return ChorusNode(index, multi_node_index, self)
def write(self, data):
self.serial_io.write(data.encode('UTF-8'))
def read(self):
return self.serial_io.read_until()[:-1]
def close(self):
self.serial_io.close()
class ChorusSensor(Sensor):
def __init__(self, node_manager):
super().__init__(node_manager.addr, "Chorus")
self.description = "Chorus"
self.node_manager = node_manager
def update(self):
self.node_manager.write('R*v\n')
@Reading(units='V')
def voltage(self):
return self.node_manager.voltage*55.0/1024.0 if self.node_manager.voltage is not None else None
class ChorusNode(Node):
def __init__(self, index, multi_node_index, manager):
super().__init__(index=index, multi_node_index=multi_node_index, manager=manager)
self.history_filter = ExtremumFilter()
def reset(self):
super().reset()
self.history_filter = ExtremumFilter()
def send_command(self, command, in_value):
with self.manager:
self.manager.write('R{0}{1}{2:04x}\n'.format(self.index, command, in_value))
out_value = int(self.manager.read()[3:7], 16)
return out_value
def set_and_validate_value_4x(self, command, in_value):
success = False
retry_count = 0
out_value = None
while success is False and retry_count < RETRY_COUNT:
out_value = self.send_command(command, in_value)
if out_value == in_value:
success = True
else:
retry_count += 1
logger.warning('Value Not Set (retry={0}): cmd={1}, val={2}, node={3}'.\
format(retry_count, command, in_value, self.index+1))
return out_value if out_value is not None else in_value
class ChorusInterface(BaseHardwareInterface):
def __init__(self, serial_ios):
super().__init__()
serial_ios = ensure_iter(serial_ios)
for serial_io in serial_ios:
node_manager = ChorusNodeManager(serial_io)
self.node_managers.append(node_manager)
for node_manager in self.node_managers:
with node_manager:
node_manager.write('N0\n')
resp = node_manager.read()
if resp:
last_node = resp[1]
else:
logger.warning("Invalid response received")
for index in range(int(last_node)):
node = node_manager.add_node(index)
self.nodes.append(node)
self.sensors.append(ChorusSensor(node_manager))
for node in self.nodes:
# set minimum lap time to zero - let the server handle it
node.set_and_validate_value_4x('M', 0)
#
# Update Loop
#
def _update(self):
nm_sleep_interval = self.update_sleep/max(len(self.node_managers), 1)
if self.node_managers:
for node_manager in self.node_managers:
with node_manager:
data = node_manager.read()
if data:
self._process_message(node_manager, data)
gevent.sleep(nm_sleep_interval)
else:
gevent.sleep(nm_sleep_interval)
def _process_message(self, node_manager, data):
if data[0] == 'S':
multi_node_idx = int(data[1])
node = node_manager.nodes[multi_node_idx]
cmd = data[2]
if cmd == 'L':
node.pass_count = int(data[3:5], 16) # lap count
lap_ts_ms = int(data[5:13], 16) # relative to start time
self._notify_pass(node, lap_ts_ms, BaseHardwareInterface.LAP_SOURCE_REALTIME, None)
elif cmd == 'r':
rssi = int(data[3:7], 16)
ts_ms = ms_counter()
node.current_rssi = RssiSample(ts_ms, rssi)
node.node_peak_rssi = max(rssi, node.node_peak_rssi)
node.node_nadir_rssi = min(rssi, node.node_nadir_rssi)
filtered_ts, filtered_rssi = node.history_filter.filter(ts_ms, rssi)
self.append_rssi_history(node, filtered_ts, filtered_rssi)
elif cmd == 'v':
node.manager.voltage = int(data[3:7], 16)
def on_race_start(self, race_start_time_ms):
super().on_race_start(race_start_time_ms)
# reset timers to zero
for node_manager in self.node_managers:
with node_manager:
# mode = lap times relative to start time
node_manager.write('R*R2\n')
node_manager.read()
def on_race_stop(self):
for node_manager in self.node_managers:
with node_manager:
node_manager.write('R*R0\n')
node_manager.read()
super().on_race_stop()
def transmit_frequency(self, node, frequency):
return node.set_and_validate_value_4x('F', frequency)
def transmit_enter_at_level(self, node, level):
return node.set_and_validate_value_4x('T', level)
def transmit_exit_at_level(self, node, level):
return node.set_and_validate_value_4x('T', level)
def get_hardware_interface(config, *args, **kwargs):
'''Returns the interface object.'''
ports = ensure_iter(config.CHORUS['HARDWARE_PORT'])
serial_ios = [serial.Serial(port=port, baudrate=115200, timeout=0.1) for port in ports]
return ChorusInterface(serial_ios)
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
34,619,071
|
pulquero/RotorHazard
|
refs/heads/BetaHazard
|
/src/rh/app/web.py
|
import functools
import requests
import logging
from rh.app import SOCKET_IO
from flask import current_app
import rh.app.race_explorer_core as racex
from rh.util.Plugins import Plugins
import rh.orgs as org_pkg
logger = logging.getLogger(__name__)
TIMEOUT = 5
ORGS = Plugins(suffix='org')
def init(rhconfig):
ORGS.discover(org_pkg, config=rhconfig)
@functools.lru_cache(maxsize=128)
def get_pilot_data(url):
for org in ORGS:
pilot_id = org.is_pilot_url(url)
if pilot_id:
try:
return org.get_pilot_data(url, pilot_id)
except BaseException as err:
logger.warning("Error connecting to '{}'".format(url), exc_info=err)
return {}
return {}
def get_event_data(url):
for org in ORGS:
event_id = org.is_event_url(url)
if event_id:
try:
return org.get_event_data(url, event_id)
except BaseException as err:
logger.warning("Error connecting to '{}'".format(url), exc_info=err)
return {}
resp = requests.get(url, timeout=TIMEOUT)
event_data = resp.json()
return event_data
@SOCKET_IO.on('sync_event')
def on_sync_event():
sync_event(current_app.rhserver)
def sync_event(rhserver):
rhdata = rhserver['RHData']
event_info = racex.export_event_basic(rhdata)
url = event_info['url']
if not url:
return
logging.info("Syncing event...")
event_data = get_event_data(url)
if event_data:
racex.import_event(event_data, rhserver)
logging.info("Syncing completed")
else:
logging.info("Nothing to sync")
def upload_results(rhserver):
rhdata = rhserver['RHData']
event_info = racex.export_event_basic(rhdata)
url = event_info['url']
if not url:
return
logging.info("Uploading results...")
leaderboard = racex.export_leaderboard(rhdata)
for org in ORGS:
event_id = org.is_event_url(url)
if event_id:
try:
org.upload_results(event_id, leaderboard)
logger.info("Upload completed")
except BaseException as err:
logger.warning("Error connecting to '{}'".format(url), exc_info=err)
return {}
|
{"/src/rh/sensors/linux_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/LapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/apis/mqtt_api.py": ["/src/rh/apis/__init__.py"], "/src/rh/helpers/i2c_helper.py": ["/src/rh/helpers/__init__.py"], "/src/rh/tools/rssi_dump.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/RHInterface.py": ["/src/rh/interface/__init__.py", "/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py"], "/src/rh/interface/nodes/socket_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_character.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MqttInterface.py": ["/src/rh/interface/BaseHardwareInterface.py"], "/src/rh/tools/adc_test.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/Node.py": ["/src/rh/interface/__init__.py"], "/src/rh/interface/nodes/i2c_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/sensors/psutil_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/interface/nodes/serial_node.py": ["/src/rh/interface/__init__.py"], "/src/rh/data_export/data_export_csv.py": ["/src/rh/data_export/__init__.py"], "/src/rh/interface/BaseHardwareInterface.py": ["/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/interface/ChorusInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/Node.py", "/src/rh/interface/__init__.py"], "/src/rh/leds/led_handler_bitmap.py": ["/src/rh/leds/__init__.py"], "/src/rh/interface/MockInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/RHInterface.py"], "/src/rh/tools/scanner.py": ["/src/rh/tools/__init__.py"], "/src/rh/interface/MqttLapRFInterface.py": ["/src/rh/interface/BaseHardwareInterface.py", "/src/rh/interface/MqttInterface.py", "/src/rh/interface/LapRFInterface.py"], "/src/rh/sensors/bme280_sensor.py": ["/src/rh/sensors/__init__.py"], "/src/rh/app/Results.py": ["/src/rh/app/RHRace.py"], "/src/rh/leds/led_handler_graph.py": ["/src/rh/leds/__init__.py"], "/src/rh/util/RHTimeFns.py": ["/src/rh/util/__init__.py"], "/src/rh/data_export/data_export_json.py": ["/src/rh/data_export/__init__.py"], "/src/rh/app/RHData.py": ["/src/rh/app/RHRace.py"]}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.